TraceAndSweepImpl.mesa
last change by Paul Rovner, December 9, 1983 4:51 pm
This module implements a trace-and-sweep garbage collector for Cedar ("DoTraceAndSweepCollection"). It uses auxilliary storage for a reference stack. To allow subsequent incremental collection, DoTraceAndSweepCollection restores reference counts to correct values. No collectible storage is allocated while DoTraceAndSweepCollection is active, nor are any REFs change in counted storage during that time. Client processes that attempt to do so are suspended until after DoTraceAndSweepCollection finishes.
DIRECTORY
Allocator: TYPE USING [bsiEscape, ExtendedHeader, HeaderP, NHeaderP, QuantumIndex, wordsPerQuantum, LastAddress, NormalHeader],
AllocatorOps: TYPE USING [EnterAndCallBack, NHPToREF, REFToNHP, TAndSFreeObject, IsValidRef, quantumMap, Reset, BlockSize],
Basics: TYPE USING [BITAND, LowHalf],
Collector: TYPE USING [EstablishTAndSProc],
DebuggerSwap: TYPE USING [CallDebugger],
LoadState: TYPE USING [Acquire, Release, local, EnumerateAllModules, ConfigID, ModuleIndex, ModuleToGlobalFrame, GlobalFrameToType],
PrincOps: TYPE USING[GlobalFrameHandle, wordsPerPage],
RCMap: TYPE USING [nullIndex],
RCMicrocodeOps: TYPE USING [OnZ, ZCTFull, RCOverflowOccurred, RCUnderflowOccurred, ASSIGNREF],
RTFrameHeapSnapshot: TYPE USING
[AcquireFrameHeapSnapshot, ReleaseFrameHeapSnapshot, MapUncountedBodies],
RTTypesBasicPrivate: TYPE USING [MapTiRcmx, MapRefs, NumberPackageRefs],
SafeStorage: TYPE USING [nullType, Type, GetReferentType, ReclaimCollectibleObjects],
VM: TYPE USING [AddressForPageNumber, Allocate, Interval, Free, PageNumberForAddress],
ZCT: TYPE USING [EnterAndCallBack, EnterRCOvAndCallBack, InnerHandleRCOverflow, InnerHandleRCUnderflow, zct, zctBlockWords, RCOvReset];
TraceAndSweepImpl: PROGRAM
IMPORTS AllocatorOps, Basics, Collector, DebuggerSwap, LoadState, RCMicrocodeOps, RTFrameHeapSnapshot, RTTypesBasicPrivate, SafeStorage, VM, ZCT
= BEGIN OPEN Allocator, AllocatorOps, SafeStorage;
Variables
nRetainedObjects: LONG CARDINAL ← 0;
number of objects retained only because REFs appear onstack
rpa: ARRAY [0..maxNRPIndex] OF LONG POINTERALL[NIL];
maxNRPIndex: CARDINAL = 20;
nextRpaIndex: CARDINAL ← 0;
destructiveTestEnabled: BOOLFALSE;
destructiveTestEnabled TRUE => do a second TandS without the stack scan, then punt
findingCircularGarbage: BOOLFALSE;
findingCircularGarbage TRUE => remember stuff found; don't reclaim it
refStackChain: RefStack ← NIL;
refStackFree: RefStack ← NIL;
nRefsPushed: INT ← 0;
interesting statistics
objectsSeen: LONG CARDINAL ← 0;
objectsReclaimed: LONG CARDINAL ← 0;
objectsKept: LONG CARDINAL ← 0;
nGlobalFrames: LONG CARDINAL ← 0;
nRCGlobalFrames: LONG CARDINAL ← 0;
nLocalFrames: LONG CARDINAL ← 0;
debugging aids
SuspectSeen: SIGNAL = CODE;
suspect: LONG POINTER TO UNSPECIFIEDLOOPHOLE[LONG[-1]];
TYPEs and constants
RefStackRep: TYPE
= RECORD [
next: RefStack,
size, max: CARDINAL,
refs: SEQUENCE COMPUTED CARDINAL OF LONG POINTER
];
RefStack: TYPE = LONG POINTER TO RefStackRep;
support for WhoPointsTo and FindCircularGarbage
suspectRef: LONG POINTER TO UNSPECIFIEDNIL; -- a LOOPHOLE'd REF
APNodes: TYPE = ARRAY [0..maxNReferers) OF NHeaderP;
suspectReferers: REF APNodes ← NEW[APNodes ← ALL[NIL]];
maxNReferers: NAT = 10;
nSuspectReferers: NAT ← 0;
AIPNodes: TYPE = ARRAY [0..maxNInnerReferers) OF NHeaderP;
innerReferers: REF AIPNodes ← NEW[AIPNodes ← ALL[NIL]];
maxNInnerReferers: NAT = 50;
nInnerReferers: NAT ← 0;
gfhToSuspectRef: PrincOps.GlobalFrameHandle ← NIL;
suspectRefFoundInLocalFrame: BOOLFALSE;
circularStructureFound: BOOLFALSE;
PROCS
Look up the referer chain. Stop if a gfh found, or if foundInLocalFrame, or if a loop is found,
or if more than 1 referer is found, or if no referers are found (changed conserv scan)
WhoPointsTo: PROC [nhp: NHeaderP, cycleLimit: NAT ← 20]
RETURNS[
referers: REF APNodes,
gfh: PrincOps.GlobalFrameHandle,
foundInLocalFrame, foundInCircularStructure: BOOL,
cycles: NAT ← 0] = {
FOR i: NAT IN [0..maxNReferers) DO suspectReferers[i] ← NIL ENDLOOP;
suspectReferers[0] ← nhp;
nSuspectReferers ← 1;
FOR i: NAT IN [0..maxNInnerReferers) DO innerReferers[i] ← NIL ENDLOOP;
nInnerReferers ← 0;
gfhToSuspectRef ← NIL;
suspectRefFoundInLocalFrame ← FALSE;
circularStructureFound ← FALSE;
UNTIL gfhToSuspectRef # NIL
OR circularStructureFound
OR suspectRefFoundInLocalFrame
OR nSuspectReferers > 1
OR nSuspectReferers = 0
OR cycles = cycleLimit
DO
suspectRef ← LOOPHOLE[NHPToREF[suspectReferers[0]], LONG POINTER];
FOR i: NAT IN [0..maxNReferers) DO suspectReferers[i] ← NIL ENDLOOP;
nSuspectReferers ← 0;
SafeStorage.ReclaimCollectibleObjects[suspendMe: TRUE, traceAndSweep: TRUE];
cycles ← cycles + 1;
ENDLOOP;
suspectRef ← NIL;
RETURN[
referers: suspectReferers,
gfh: gfhToSuspectRef,
foundInLocalFrame: suspectRefFoundInLocalFrame,
foundInCircularStructure: circularStructureFound,
cycles: cycles];
};
This guy first does a standard incremental collection. Then it goes thru the motions of a TandS, but does not reclaim any garbage. Instead, it leaves the marked bit set. NOTE that this works only if SSExtra.useSizeToZn is TRUE. BEWARE don't forget to clear marks later! XXX
FindCircularGarbage: PROC = {
SafeStorage.ReclaimCollectibleObjects[suspendMe: TRUE];
findingCircularGarbage ← TRUE;
SafeStorage.ReclaimCollectibleObjects[suspendMe: TRUE, traceAndSweep: TRUE];
findingCircularGarbage ← FALSE;
};
IncRC: PROC [ref: REF] = {
dummy: REFNIL;
RCMicrocodeOps.ASSIGNREF[rhs: ref, lhs: @dummy
! RCMicrocodeOps.RCOverflowOccurred => {ZCT.InnerHandleRCOverflow[ref]; RETRY};
];
dummy ← NIL;
};
DecRC: PROC [ref: REF] = {
dummy: REF ← ref;
RCMicrocodeOps.ASSIGNREF[rhs: NIL, lhs: @dummy
! RCMicrocodeOps.RCUnderflowOccurred => {ZCT.InnerHandleRCUnderflow[ref]; RETRY}];
dummy ← NIL;
};
local space management
ActuallyDoIt: PROC[ignoreStack: BOOLFALSE] = {
clearRC: PROC [nhp: NHeaderP] = {
objectsSeen ← objectsSeen + 1;
nhp.inZCT ← FALSE;
nhp.maybeOnStack ← FALSE;
Leave the f flag and the type field alone
nhp.refCount ← 0;
nhp.rcOverflowed ← FALSE;
};
Initialize
objectsSeen ← 0;
objectsKept ← objectsReclaimed ← 0;
nRCGlobalFrames ← nGlobalFrames ← nLocalFrames ← 0;
nRetainedObjects ← 0;
nextRpaIndex ← 0;
nRefsPushed ← 0;
Clear all free lists
ZCT.zct.bsiToFreeList ← ALL[NIL];
AllocatorOps.Reset[];
Clear the zero-count table
ZCT.zct.markingDecrements ← FALSE;
ZCT.zct.rp
ZCT.zct.wp
ZCT.zct.rp
- Basics.BITAND[
Basics.LowHalf[LOOPHOLE[ZCT.zct.rp, LONG CARDINAL]],
ZCT.zctBlockWords-1
];
mask rp by (LastAddress-(zctBlockWords-1))
Clear all ref counts and count objects
ZCT.RCOvReset[];
AllObjects[clearRC];
trace from all local and global frames in the world
IF NOT ignoreStack THEN RTFrameHeapSnapshot.MapUncountedBodies[TraceLocalFrame];
[] ← LoadState.local.EnumerateAllModules[order: newestFirst, proc: TraceGlobalFrame];
continue marking from the ref stack and from objects
UNTIL EmptyRefStack[] DO
TraceRefsInObject[LOOPHOLE[PopRef[], REF]]
ENDLOOP;
free the space for ref stacks
FreeRefStacks[];
Free the objects
Reclaim unmarked and free stuff, rebuild the zct and remember retained stuff due to frame heap only
AllObjects[visitOneObject];
XXX make use of ordered enumeration to merge and trim free blocks
IF objectsReclaimed + objectsKept # objectsSeen THEN ERROR; -- check for consistency
}; -- end ActuallyDoIt
DoTraceAndSweepCollection: PROC = {ENABLE UNWIND => NULL;
haveAllocatorLocked: PROC = { -- here with the loader and allocator locked
haveRCOvLocked: PROC = {
here with the loader, allocator and RCOvBank locked
haveZCTLocked: PROC = {
here with the loader, allocator, RCOvBank and ZCT locked
RTFrameHeapSnapshot.AcquireFrameHeapSnapshot[];
{ ENABLE UNWIND => RTFrameHeapSnapshot.ReleaseFrameHeapSnapshot[];
ActuallyDoIt[ignoreStack: FALSE];
IF destructiveTestEnabled THEN {
ActuallyDoIt[ignoreStack: TRUE];
DebuggerSwap.CallDebugger["destructive collection finished."];
}; -- look at XXX from the debugger
RTFrameHeapSnapshot.ReleaseFrameHeapSnapshot[];
};
};
START haveRCOvLocked HERE
Acquire the lock on the ref counting machinery, do the work, then release the lock.
ZCT.EnterAndCallBack[haveZCTLocked];
};
START haveAllocatorLocked HERE
Acquire the lock on the RCOverflowBank, do the work, then release the lock.
ZCT.EnterRCOvAndCallBack[haveRCOvLocked];
}; -- end haveAllocatorLocked
START DoTraceAndSweepCollection HERE
First, acquire the lock on the loader (REFs in GF's are counted)
LoadState.local.Acquire[exclusive];
Next, acquire the lock on the allocator, do the work, then release the lock.
AllocatorOps.EnterAndCallBack[haveAllocatorLocked ! UNWIND => LoadState.local.Release[]];
Release the loader's lock
LoadState.local.Release[];
}; -- end DoTraceAndSweepCollection
this is called once for each valid REF found in the frameheap or in counted storage
(this includes reconstructing the RC for the object)
MarkRef: PROC [ref: REF, countIt: BOOL ← TRUE] = INLINE {
IF NOT Marked[ref] THEN {
here if this REF is valid and not seen before
SetMark[ref];
IF RefContaining[ref] THEN PushRef[LOOPHOLE[ref, LONG POINTER]];
};
IF countIt THEN IncRC[ref]
};
Reclaim unmarked and free stuff, rebuild the zct and remember retained stuff due to only frame heap
visitOneObject: PROC [nhp: NHeaderP] = {
IF nhp.f THEN { -- adjust refCount
FOR i: CARDINAL IN [1..RTTypesBasicPrivate.NumberPackageRefs[nhp.type]]
DO DecRC[NHPToREF[nhp]
 ! RCMicrocodeOps.ZCTFull => DebuggerSwap.CallDebugger["TAndS disaster."]];
ENDLOOP;
};
IF nhp.f OR Marked[AllocatorOps.NHPToREF[nhp]]
THEN {
objectsKept ← objectsKept + 1;
IF NOT findingCircularGarbage THEN ClearMark[nhp];
IF nhp.refCount = 0 AND NOT nhp.rcOverflowed
THEN {
IF NOT nhp.f THEN {
rpa[nextRpaIndex] ← LOOPHOLE[NHPToREF[nhp], LONG POINTER];
IF nextRpaIndex < maxNRPIndex THEN nextRpaIndex ← nextRpaIndex + 1;
nRetainedObjects ← nRetainedObjects + 1;
};
put it on zct
RCMicrocodeOps.OnZ[nhp
! RCMicrocodeOps.ZCTFull => DebuggerSwap.CallDebugger["TAndS disaster."]];
};
}
ELSE {
IF findingCircularGarbage THEN {objectsKept ← objectsKept + 1; RETURN};
objectsReclaimed ← objectsReclaimed + 1;
AllocatorOps.TAndSFreeObject[nhp];
};
};
TraceLocalFrame: PROC [d: LONG DESCRIPTOR FOR ARRAY OF WORD] = {
this procedure is used to mark refs in a local frame
RCs must be decremented on first encounter
pa: LONG POINTER TO LONG CARDINAL
= LOOPHOLE[BASE[d], LONG POINTER TO LONG CARDINAL];
nWords: CARDINAL = LENGTH[d];
nLocalFrames ← nLocalFrames + 1;
IF nWords >= SIZE[REF] THEN
FOR i: CARDINAL IN [0..nWords-SIZE[REF]] DO
addr: LONG CARDINAL = (pa+i)^;
IF addr # 0 AND AllocatorOps.IsValidRef[LOOPHOLE[addr, LONG POINTER]] THEN {
if this REF is valid, non-NIL
ref: REF = LOOPHOLE[addr, REF];
IF suspectRef # NIL AND LOOPHOLE[addr, LONG POINTER] = suspectRef
THEN suspectRefFoundInLocalFrame ← TRUE;
MarkRef[ref: ref, countIt: FALSE];
};
ENDLOOP;
};
TraceGlobalFrame: PROC [configID: LoadState.ConfigID, moduleIndex: LoadState.ModuleIndex]
RETURNS[stop: BOOLFALSE] = {
gfh: PrincOps.GlobalFrameHandle
= LoadState.local.ModuleToGlobalFrame[configID, moduleIndex];
this procedure is used to mark refs from a global frame
the algorithm is essentially the same as for regular objects
type: Type ← LoadState.local.GlobalFrameToType[gfh];
procRef: PROC [ref: REF] = {
IF ref = NIL THEN RETURN;
IF suspectRef # NIL AND LOOPHOLE[ref, LONG POINTER] = suspectRef
THEN gfhToSuspectRef ← gfh;
MarkRef[ref];
};
nGlobalFrames ← nGlobalFrames + 1;
IF type # nullType
THEN {
nRCGlobalFrames ← nRCGlobalFrames + 1;
RTTypesBasicPrivate.MapRefs
[LONG[gfh], RTTypesBasicPrivate.MapTiRcmx[type], procRef];
};
RETURN[FALSE]};
TraceRefsInObject: PROC [container: REF] = {
applies P to each reference in the indicated object (ref)
type: Type ← GetReferentType[container];
refererPushed: BOOLFALSE;
procRef: PROC [ref: REF] = {
IF ref = NIL THEN RETURN;
IF suspectRef # NIL
AND LOOPHOLE[ref, LONG POINTER] = suspectRef
AND NOT refererPushed
THEN {
nhp: NHeaderP = AllocatorOps.REFToNHP[container];
refererPushed ← TRUE;
IF nSuspectReferers < maxNReferers
THEN {
suspectReferers[nSuspectReferers] ← nhp;
nSuspectReferers ← nSuspectReferers + 1;
};
FOR i: NAT IN [0..nInnerReferers)
DO IF nhp = innerReferers[i] THEN {circularStructureFound ← TRUE; EXIT}
ENDLOOP;
IF NOT circularStructureFound AND nInnerReferers < maxNInnerReferers
THEN {
innerReferers[nInnerReferers] ← nhp;
nInnerReferers ← nInnerReferers + 1;
};
};
MarkRef[ref];
}; -- end procRef
DO
oldNRefsPushed: INT = nRefsPushed;
RTTypesBasicPrivate.MapRefs
[LOOPHOLE[container, LONG POINTER],
RTTypesBasicPrivate.MapTiRcmx[type],
procRef];
IF nRefsPushed-oldNRefsPushed # 1 THEN RETURN;
container ← LOOPHOLE[PopRef[], REF];
type ← GetReferentType[container];
refererPushed ← FALSE;
ENDLOOP;
};
AllObjects: PROC [visit: PROC [NHeaderP]] = {
this proc visits all collectible objects (free AND allocated)
qi: QuantumIndex ← FIRST[QuantumIndex];
DO
Find the beginning of the next run of quanta from the quantum map
hp: HeaderP;
blockSize: INT ← 0;
UNTIL AllocatorOps.quantumMap[qi] DO
IF qi = LAST[QuantumIndex] THEN RETURN;
qi ← qi+1;
ENDLOOP;
Start parsing at the beginning of this run
FOR hp ← QuantumIndexToLP[qi], hp + blockSize
WHILE LOOPHOLE[hp, LONG CARDINAL] < LastAddress
AND quantumMap[LPToQuantumIndex[hp]]
DO
nhp: NHeaderP
= LOOPHOLE
[
IF IsExtendedBlock[hp]
THEN hp + SIZE[ExtendedHeader] - SIZE[NormalHeader]
ELSE hp,
NHeaderP
];
blockSize ← BlockSize[hp];
visit[nhp];
ENDLOOP;
IF LOOPHOLE[hp, LONG CARDINAL] >= LastAddress THEN RETURN;
qi ← LPToQuantumIndex[hp];
ENDLOOP;
};
LPToQuantumIndex: PROC[lp: LONG POINTER] RETURNS[QuantumIndex] = {
RETURN[LOOPHOLE[lp, LONG CARDINAL]/wordsPerQuantum];
};
QuantumIndexToLP: PROC[qi: QuantumIndex] RETURNS[LONG POINTER] = {
n: LONG CARDINAL ← qi;
RETURN[LOOPHOLE[n*wordsPerQuantum, LONG POINTER]];
};
IsExtendedBlock: PROC[hp: HeaderP] RETURNS[BOOL] = {
RETURN[LOOPHOLE[hp, NHeaderP].blockSizeIndex = bsiEscape];
};
Marked: PROC [ref: REF] RETURNS [BOOL] = INLINE {
determines whether ref has been seen
RETURN [REFToNHP[ref].inZCT--i.e. marked--]};
SetMark: PROC [ref: REF] = INLINE {
marks object as being seen
REFToNHP[ref].inZCT--i.e. marked--TRUE};
ClearMark: PROC [nhp: NHeaderP] = INLINE {
clears object mark to ground state
nhp.inZCT--i.e. marked--FALSE};
PushRef: PROC [pref: LONG POINTER] = {
pushes ref to object onto the reference stack
stack: RefStack ← refStackChain;
IF stack = NIL OR stack.size = stack.max THEN {
time to get a new stack node
IF refStackFree = NIL
oh well, nothing comes for free
THEN {
stack ← VM.AddressForPageNumber[VM.Allocate[count: 1].page];
stack.next ← NIL;
stack.size ← 0;
stack.max ← (PrincOps.wordsPerPage - (SIZE[RefStackRep])) / (SIZE[LONG POINTER]);
}
ELSE {
stack ← refStackFree;
refStackFree ← stack.next;
};
stack.next ← refStackChain;
refStackChain ← stack;
};
stack[stack.size] ← pref;
stack.size ← stack.size + 1;
nRefsPushed ← nRefsPushed + 1;
};
PopRef: PROC RETURNS [pref: LONG POINTER] = {
pops ref to object from the reference stack
stack: RefStack ← refStackChain;
IF stack # NIL THEN {
size: CARDINAL ← stack.size;
IF size = 0 THEN {
refStackChain ← stack.next;
stack.next ← refStackFree;
refStackFree ← stack;
IF (stack ← refStackChain) = NIL THEN RETURN [NIL];
IF (size ← stack.size) = 0 THEN ERROR;
};
size ← size - 1;
stack.size ← size;
RETURN [stack[size]];
};
RETURN [NIL];
};
EmptyRefStack: PROC RETURNS [BOOL] = INLINE {
tests reference stack for emptiness
RETURN [refStackChain = NIL]};
FreeRefStacks: PROC = {
IF refStackChain # NIL THEN ERROR; -- should never happen, but check anyway
WHILE refStackFree # NIL DO
stack: RefStack ← refStackFree.next;
VM.Free[[page: VM.PageNumberForAddress[refStackFree], count: 1]];
refStackFree ← stack
ENDLOOP;
};
RefContaining: PROC [ref: REF] RETURNS [BOOL] = INLINE {
tests for object being ref-containing
IF ref = NIL THEN RETURN[FALSE];
RETURN [RTTypesBasicPrivate.MapTiRcmx[GetReferentType[ref]] # RCMap.nullIndex];
};
START TraceAndSweepImpl HERE
Collector.EstablishTAndSProc[DoTraceAndSweepCollection]
END.