RecursivelyNILImpl.Mesa
Copyright © 1985 by Xerox Corporation. All rights reserved.
last edited On July 10, 1985 3:55:57 pm PDT by Bob Hagmann
DIRECTORY
Allocator USING[NHeaderP],
AllocatorOps USING[ReferentType, REFToNHP],
AtomPrivate,
Process,
RCMapOps USING[GetBase],
RCMap USING[Index, Base],
RecursivelyNIL,
Rope USING[RopeRep],
RTCommon USING[FetchField],
RTTypesBasicPrivate USING[MapTiTd],
SafeStorage USING[ReclaimCollectibleObjects, Type];
RecursivelyNILImpl: PROGRAM
IMPORTS AllocatorOps, Process, RCMapOps, RTCommon, RTTypesBasicPrivate, SafeStorage
EXPORTS RecursivelyNIL
SHARES Rope
= BEGIN
rcmb: RCMap.Base = RCMapOps.GetBase[].base;
lastREF: CARDINAL = 19;
REFBlockRec: TYPE = RECORD [
prev: REFBlock ← NIL,
next: REFBlock ← NIL,
refs: ARRAY [0..lastREF] OF REF ANYALL[NIL]
];
REFBlock: TYPE = REF REFBlockRec;
phonyREFRec: TYPE = RECORD [ theREF: REF ANYNIL];
Given a REF ANY, process it and all reachable collectable objects from it and NIL out all REF containing fields. If an reachable object is finalizable, NIL out the ref to it, but do not process the object itself (possible violation of the package reference counts => ZCT Disaster). The internals of ROPEs are never processed. If a CheckProc is given, it is consulted every time a field is about to be NILed. If a TRUE is returned, the field is NILed and the previous REF is added to the processing. If FALSE is returned, the field is not NILed and the field's REF is not processed. A NIL checkProc means to always process.
Although the module name is RecursivelyNILImpl, the implementation is not recursive. This is because it would consume too much MDS space.
NILRef: PUBLIC PROC[root: REF ANY, checkProc: RecursivelyNIL.CheckProc ← NIL] = {
pushBlock: REFBlock ← NIL ;
pushBlockIndex: CARDINAL ← 0;
currentRef: REF ANY;
nilledRefs: INT ← 0 ;
countForAbort: INT ← 0 ;
phonyREF: REF phonyREFRec ← NEW[phonyREFRec];
ropeRepType: SafeStorage.Type = CODE[Rope.RopeRep];
atomRecType: SafeStorage.Type = CODE[AtomPrivate.AtomRec];
addRef: PROC [parentREF: REF ANY, refToAdd: REF ANY] RETURNS[NILit: BOOLTRUE] = INLINE {
refToAddType: SafeStorage.Type = AllocatorOps.ReferentType[refToAdd];
IF refToAdd = NIL OR refToAddType = ropeRepType OR refToAddType = atomRecType THEN RETURN[FALSE];
IF checkProc # NIL AND ~checkProc[parentREF, AllocatorOps.ReferentType[parentREF], refToAdd, refToAddType] THEN RETURN[FALSE];
nilledRefs ← nilledRefs +1;
IF nilledRefs > 2000 THEN {
nilledRefs ← 0 ;
SafeStorage.ReclaimCollectibleObjects[suspendMe: FALSE, traceAndSweep: FALSE]
};
countForAbort ← countForAbort +1 ;
IF countForAbort > 200 THEN {
Process.CheckForAbort[];
};
IF pushBlock = NIL THEN pushBlock ← NEW[REFBlockRec];
IF pushBlockIndex > lastREF THEN {
newBlock: REFBlock ;
IF pushBlock.next = NIL THEN {
newBlock ← NEW[REFBlockRec];
pushBlock.next ← newBlock
}
ELSE newBlock ← pushBlock.next;
IF pushBlock # NIL THEN {
newBlock.prev ← pushBlock;
};
pushBlockIndex ← 0;
pushBlock ← newBlock;
};
pushBlock.refs[pushBlockIndex] ← refToAdd;
pushBlockIndex ← pushBlockIndex + 1 ;
};
popRef: PROC RETURNS [refPoped: REF ANY] = {
IF pushBlockIndex = 0 THEN {
IF pushBlock.prev = NIL THEN RETURN[NIL]
ELSE {
nowBlock: REFBlock ← pushBlock.prev;
pushBlock.prev ← NIL;
pushBlock ← nowBlock;
pushBlockIndex ← lastREF;
refPoped ← pushBlock.refs[pushBlockIndex];
pushBlock.refs[pushBlockIndex] ← NIL ;
};
}
ELSE {
pushBlockIndex ← pushBlockIndex - 1;
refPoped ← pushBlock.refs[pushBlockIndex];
pushBlock.refs[pushBlockIndex] ← NIL ;
};
};
smashComponents: PROC[ref: REF ANY, ptr: LONG POINTER, rcmx: RCMap.Index] = {
WITH rcmr: rcmb[rcmx] SELECT FROM
null => NULL;
oneRef => {
IF addRef[ref, LOOPHOLE[ptr+rcmr.offset, LONG POINTER TO REF ANY]^] THEN {
LOOPHOLE[phonyREF.theREF, LONG POINTER] ← LOOPHOLE[ptr+rcmr.offset, LONG POINTER TO LONG POINTER]^; -- copies the REF but does not change the reference count
LOOPHOLE[ptr+rcmr.offset, LONG POINTER TO REF ANY]^ ← NIL ; -- NILs the REF to the object, but does not decrement the reference count=> reference count is now correct
phonyREF.theREF ← NIL; -- nil the REF and decrement the reference count
};
};
simple =>
FOR i: CARDINAL IN [0..rcmr.length) DO
IF rcmr.refs[i]
THEN {
IF addRef[ref, LOOPHOLE[ptr+i, LONG POINTER TO REF ANY]^] THEN {
LOOPHOLE[phonyREF.theREF, LONG POINTER] ← LOOPHOLE[ptr+i, LONG POINTER TO LONG POINTER]^; -- copies the REF but does not change the reference count
LOOPHOLE[ptr+i, LONG POINTER TO REF ANY]^ ← NIL ; -- NILs the REF to the object, but does not decrement the reference count=> reference count is now correct
phonyREF.theREF ← NIL; -- nil the REF and decrement the reference count
};
};
ENDLOOP;
ref => {
IF addRef[ref, LOOPHOLE[ptr, LONG POINTER TO REF ANY]^] THEN {
LOOPHOLE[phonyREF.theREF, LONG POINTER] ← LOOPHOLE[ptr, LONG POINTER TO LONG POINTER]^; -- copies the REF but does not change the reference count
LOOPHOLE[ptr, LONG POINTER TO REF ANY]^ ← NIL ; -- NILs the REF to the object, but does not decrement the reference count=> reference count is now correct
phonyREF.theREF ← NIL; -- nil the REF and decrement the reference count
};
};
nonVariant => {
FOR i: CARDINAL IN [0..rcmr.nComponents) DO
smashComponents[ref, ptr + rcmr.components[i].wordOffset, rcmr.components[i].rcmi];
ENDLOOP;
};
variant => {
v: CARDINAL = RTCommon.FetchField[ptr + rcmr.fdTag.wordOffset, [bitFirst: rcmr.fdTag.bitFirst, bitCount: rcmr.fdTag.bitCount]];
smashComponents[ref, ptr, rcmr.variants[v]];
};
array => {
FOR i: CARDINAL IN [0..rcmr.nElements) DO
smashComponents[ref, ptr + i * rcmr.wordsPerElement, rcmr.rcmi];
ENDLOOP;
};
sequence => {
length: CARDINAL = RTCommon.FetchField[ptr+rcmr.fdLength.wordOffset, [bitFirst: rcmr.fdLength.bitFirst, bitCount: rcmr.fdLength.bitCount]];
smashComponents[ref, ptr, rcmr.commonPart];
FOR i: CARDINAL IN [0..length) DO
smashComponents[ref, ptr + rcmr.dataOffset + i * rcmr.wordsPerElement, rcmr.rcmi];
ENDLOOP;
};
ENDCASE => ERROR;
};
currentRef ← root;
WHILE currentRef # NIL DO
For all REFs inside of the currentRef, add to the queue. Then pop the end of the queue into currentRef. Done if NIL.
i: INT ← 0 ;
refType: SafeStorage.Type ← LOOPHOLE[AllocatorOps.ReferentType[currentRef], SafeStorage.Type];
nhp: Allocator.NHeaderP ← AllocatorOps.REFToNHP[currentRef];
rcmx: RCMap.Index = RTTypesBasicPrivate.MapTiTd[refType].rcmx;
ptr: LONG POINTER = LOOPHOLE[currentRef, LONG POINTER];
IF ~nhp.f THEN smashComponents[currentRef, ptr, rcmx]
ELSE {i ← i + 1};
currentRef ← popRef[];
ENDLOOP;
pushBlock ← NIL;
};
END.
Bob Hagmann May 20, 1985 8:29:38 am PDT
modified: convert to Cedar 6.0
Bob Hagmann July 10, 1985 3:55:57 pm PDT
modified: now will not destroy inside of ATOMs