ExamineStorage.mesa
Copyright © 1985, 1986 by Xerox Corporation. All rights reserved.
Russ Atkinson (RRA) April 3, 1986 3:53:42 pm PST
Spreitzer, June 9, 1986 6:09:41 pm PDT
DIRECTORY
Allocator USING [BlockSizeIndex, bsiEscape, EHeaderP, ExtendedHeader, HeaderP, LastAddress, logPagesPerQuantum, NHeaderP, NormalHeader, QuantumIndex, RefCount],
AllocatorOps USING [AddressToQuantumIndex, bsiToSize, EnterAndCallBack, quantumMap],
Atom,
Basics,
BackStop USING [Call],
BasicTime USING [Now],
Commander USING [CommandProc, Register],
CommandTool USING [ArgumentVector, Failed, Parse],
DebuggerSwap USING [CallDebugger],
HashTable,
IO,
PrintTV USING [PrintType],
RCMap,
RefTab,
Rope USING [Fetch, Length, Match, ROPE, RopeRep],
RTTypesBasicPrivate,
RuntimeError USING [UNCAUGHT],
SafeStorage USING [GetCanonicalType, GetReferentType, nullType, ReclaimCollectibleObjects, Type, TypeIndex],
SymTab,
VM,
ZCT USING [EnterAndCallBack, EnterRCOvAndCallBack];
ExamineStorage:
CEDAR
PROGRAM
IMPORTS AllocatorOps, Atom, Basics, BackStop, BasicTime, Commander, CommandTool, DebuggerSwap, IO, PrintTV, Rope, RTTypesBasicPrivate, RuntimeError, SafeStorage, VM, ZCT
SHARES HashTable, Rope
= BEGIN
LORA: TYPE = LIST OF REF ANY;
ROPE: TYPE = Rope.ROPE;
LOR: TYPE = LIST OF ROPE;
STREAM: TYPE = IO.STREAM;
Type:
TYPE = SafeStorage.Type;
nullType: Type = SafeStorage.nullType;
Command procedures
FindBadGuysProc: Commander.CommandProc =
TRUSTED {
[cmd: Handle] RETURNS [result: REF ← NIL, msg: ROPE ← NIL]
CommandObject = [in, out, err: STREAM, commandLine, command: ROPE, ...]
out: STREAM = cmd.out;
switches: Switches ← ALL[FALSE];
argsProcessed:
NAT ← 0;
# of arguments processed
argv: CommandTool.ArgumentVector ← CommandTool.Parse[cmd: cmd, starExpand:
FALSE
! CommandTool.Failed => {msg ← errorMsg; GO TO failed}];
When parsing the command line, be prepared for failure. The error is reported to the user
eachObject: InfoProc =
TRUSTED {
[type: SafeStorage.Type, size: INT, object: Allocator.HeaderP, objectHP: Allocator.NHeaderP] RETURNS [continue: BOOLEAN]
header: Allocator.NormalHeader ← objectHP^;
SELECT
TRUE
FROM
type < minType => {};
type > maxType => {};
type > RTTypesBasicPrivate.MapTiTd.length => {};
header.blockSizeIndex < minBsi => {};
header.blockSizeIndex > maxBsi => {};
header.f AND NOT allowF => {};
NOT header.f AND NOT allowNotF => {};
header.rcOverflowed AND NOT allowOver => {};
NOT header.rcOverflowed AND NOT allowNotOver => {};
header.refCount < minRC => {};
header.refCount > maxRC => {};
ENDCASE => {
td: RTTypesBasicPrivate.PTypeDesc ← RTTypesBasicPrivate.MapTiTd[type];
IF badGuys = maxBadGuyIndex
THEN badGuysMissed ← badGuysMissed + 1
ELSE {
badGuysArray[badGuys] ← [objectHP, header];
badGuys ← badGuys + 1};
};
RETURN [TRUE];
};
badGuysArray:
ARRAY [0..maxBadGuyIndex)
OF BadGuy;
BadGuy:
TYPE =
RECORD [
addr: Allocator.NHeaderP,
header: Allocator.NormalHeader
];
badGuys: [0..maxBadGuyIndex] ← 0;
badGuysMissed: INT ← 0;
maxBadGuyIndex: NAT = 64;
EnumerateCollectableStorage[eachObject];
IF badGuys = 0 THEN {msg ← "No bad guys found.\n"; RETURN};
FOR i: [0..maxBadGuyIndex)
IN [0..badGuys)
DO
badGuy: BadGuy ← badGuysArray[i];
IO.PutF1[out, "At %bB: ", [cardinal[LOOPHOLE[badGuy.addr]]] ];
PrintHeader[out, @badGuy.header ];
IO.PutRope[out, "\n" ];
ENDLOOP;
IF badGuysMissed # 0
THEN
IO.PutF1[out, "Bad guys missed: %g\n", [integer[badGuysMissed]] ];
EXITS
failed => {result ← $Failure};
};
TakeHeapStatsProc: Commander.CommandProc =
TRUSTED {
[cmd: Handle] RETURNS [result: REF ← NIL, msg: ROPE ← NIL]
CommandObject = [in, out, err: STREAM, commandLine, command: ROPE, ...]
out: STREAM = cmd.out;
switches: Switches ← ALL[FALSE];
argsProcessed:
NAT ← 0;
# of arguments processed
argv: CommandTool.ArgumentVector ← CommandTool.Parse[cmd: cmd, starExpand:
FALSE
! CommandTool.Failed => {msg ← errorMsg; GO TO failed}];
When parsing the command line, be prepared for failure. The error is reported to the user
new: TypeCounts ← lastSample;
old: TypeCounts ← lastSample;
tc: TypeCounts ← lastSample;
switches['g] ← TRUE;
switches['w] ← TRUE;
FOR i:
NAT
IN [1..argv.argc)
DO
arg: ROPE ← argv[i];
IF Rope.Match["-*", arg]
THEN {
switches ← ProcessSwitches[switches, arg];
LOOP;
};
Future home of more intriguing options
ENDLOOP;
IF
NOT switches['r]
THEN {
We have to sample the new stats
new ← InitTypeCounts[];
IF switches['g]
THEN
We need a GC before taking the statistics
SafeStorage.ReclaimCollectibleObjects[suspendMe: TRUE, traceAndSweep: FALSE];
SampleTypeCounts[tc: new, includeFree: switches['f]];
IF switches['d] AND old # NIL THEN tc ← DeltaTypeCounts[old, new] ELSE tc ← new;
};
IF tc #
NIL
AND
NOT switches['s]
THEN {
We have to print the stats
PrintTypeCounts[tc, out];
IF switches['o] THEN PrintRank[tc: tc, out: out, highestN: typeRank, countObjects: TRUE];
IF switches['w] THEN PrintRank[tc: tc, out: out, highestN: typeRank, countObjects: FALSE];
};
lastSample ← IF switches['n] THEN NIL ELSE new;
EXITS
failed => {result ← $Failure};
};
HeapValidProc: Commander.CommandProc =
TRUSTED {
[cmd: Handle] RETURNS [result: REF ← NIL, msg: ROPE ← NIL]
CommandObject = [in, out, err: STREAM, commandLine, command: ROPE, ...]
out: STREAM = cmd.out;
switches: Switches ← ALL[FALSE];
argsProcessed:
NAT ← 0;
# of arguments processed
argv: CommandTool.ArgumentVector ← CommandTool.Parse[cmd: cmd, starExpand:
FALSE
! CommandTool.Failed => {msg ← errorMsg; GO TO failed}];
When parsing the command line, be prepared for failure. The error is reported to the user
FOR i:
NAT
IN [1..argv.argc)
DO
arg: ROPE ← argv[i];
IF Rope.Match["-*", arg]
THEN {
switches ← ProcessSwitches[switches, arg];
LOOP;
};
Future home of more intriguing options
ENDLOOP;
worldSwap ← switches['w];
EnumerateCollectableStorage[NIL];
EXITS
failed => {result ← $Failure};
};
FindCyclicTypesProc: Commander.CommandProc =
TRUSTED {
[cmd: Handle] RETURNS [result: REF ← NIL, msg: ROPE ← NIL]
CommandObject = [in, out, err: STREAM, commandLine, command: ROPE, ...]
out: STREAM = cmd.out;
switches: Switches ← ALL[FALSE];
argsProcessed:
NAT ← 0;
# of arguments processed
argv: CommandTool.ArgumentVector ← CommandTool.Parse[cmd: cmd, starExpand:
FALSE
! CommandTool.Failed => {msg ← errorMsg; GO TO failed}];
When parsing the command line, be prepared for failure. The error is reported to the user
new: TypeCounts ← lastSample;
old: TypeCounts ← lastSample;
tc: TypeCounts ← lastSample;
switches['g] ← TRUE;
switches['w] ← TRUE;
FOR i:
NAT
IN [1..argv.argc)
DO
arg: ROPE ← argv[i];
IF Rope.Match["-*", arg]
THEN {
switches ← ProcessSwitches[switches, arg];
LOOP;
};
Future home of more intriguing options
ENDLOOP;
IF
NOT switches['r]
THEN {
We have to sample the new stats
base: BasePtr = SetupBase[];
{ENABLE UNWIND => CleanupBase[base];
sawATrue, sawAFalse: BOOL ← FALSE;
Cyclic:
UNSAFE
PROC [nhp: Allocator.NHeaderP]
RETURNS [cyclic:
BOOL]
--VertexTest-- = {
cyclic ← ReadFlags[base, nhp].cyclic;
IF cyclic THEN sawATrue ← TRUE ELSE sawAFalse ← TRUE;
};
IF switches['g]
THEN
We need a GC before taking the statistics
SafeStorage.ReclaimCollectibleObjects[];
EnumerateEdges[base: base, Startworthy: True, Transparent: True, IsGoal: False, ConsumeGoal: NIL, cumulative: TRUE];
new ← InitTypeCounts[];
SampleTypeCounts[tc: new, includeFree: FALSE, Filter: Cyclic];
IF switches['d] AND old # NIL THEN tc ← DeltaTypeCounts[old, new] ELSE tc ← new;
}--end base--;
CleanupBase[base];
};
IF
NOT switches['s]
THEN {
PrintTypeCounts[tc, out, "\n-- Only cyclic objects are counted --"];
IF switches['o]
THEN
PrintRank[tc: tc, out: out, highestN: typeRank, countObjects: TRUE];
IF switches['w]
THEN
PrintRank[tc: tc, out: out, highestN: typeRank, countObjects: FALSE];
};
lastSample ← IF switches['n] THEN NIL ELSE new;
EXITS
failed => {result ← $Failure};
};
True:
PROC [nhp: Allocator.NHeaderP]
RETURNS [
BOOL]
--VertexTest-- = {
RETURN [TRUE]};
False:
PROC [nhp: Allocator.NHeaderP]
RETURNS [
BOOL]
--VertexTest-- = {
RETURN [FALSE]};
Interpreter procedures
TypeSet: TYPE = REF TypeSetPrivate;
TypeSetPrivate: TYPE = RECORD [members: PACKED SEQUENCE length: TypeIndex OF BOOL];
TypeList: TYPE = LIST OF Type;
FromList:
PROC [list: TypeList]
RETURNS [ts: TypeSet] =
TRUSTED {
ts ← NEW [TypeSetPrivate[RTTypesBasicPrivate.MapTiTd.length+64]];
FOR ti: INT IN [0 .. ts.length) DO ts[ti] ← FALSE ENDLOOP;
FOR list ← list, list.rest
WHILE list #
NIL
DO
type: Type = SafeStorage.GetCanonicalType[list.first];
ts[type] ← TRUE;
ENDLOOP;
};
HeaderSet: TYPE = REF HeaderSetPrivate;
HeaderSetPrivate:
TYPE =
RECORD [
length: INT ← 0,
headers: SEQUENCE size: NAT OF Allocator.NHeaderP
];
FindSome:
PROC [goalType: Type, upTo:
INT ← 5]
RETURNS [headers: HeaderSet] = {
eachObject:
UNSAFE
PROC [type: Type, size:
INT, object: Allocator.HeaderP, objectHP: Allocator.NHeaderP]
RETURNS [continue:
BOOLEAN ←
TRUE]
--InfoProc-- =
TRUSTED {
IF goalType = type
THEN {
headers[headers.length] ← objectHP;
headers.length ← headers.length + 1;
continue ← headers.length # headers.size;
};
};
headers ← NEW [HeaderSetPrivate[upTo]];
headers.length ← 0;
EnumerateCollectableStorage[eachObject];
};
aList: LORA = LIST[$Foo];
aRopeList: LOR = LIST["a rope"];
aPropList: Atom.PropList = Atom.PutPropOnList[NIL, $Foo, $bar];
basicBores: TypeList =
LIST[
CODE[Rope.RopeRep],
SafeStorage.GetReferentType[aRopeList],
CODE[Atom.DottedPairNode],
SafeStorage.GetReferentType[aPropList],
SafeStorage.GetReferentType[aList]
];
moreBores: TypeList =
CONS[CODE[HashTable.TableRec],
CONS[CODE[HashTable.Seq],
CONS[CODE[HashTable.NodeRep],
CONS[CODE[SymTab.SymTabRep],
CONS[CODE[SymTab.Seq],
CONS[CODE[SymTab.NodeRep],
CONS[CODE[RefTab.RefTabRep],
CONS[CODE[RefTab.Seq],
CONS[CODE[RefTab.NodeRep],
basicBores]]]]]]]]];
ExamineRefs:
PROC [goalList, transparentList: TypeList, log:
STREAM, gcFirst, wordRank:
BOOL ←
TRUE, objectRank:
BOOL ←
FALSE] =
TRUSTED {
goals: TypeSet = FromList[goalList];
transparents: TypeSet = FromList[transparentList];
tc: TypeCounts = InitTypeCounts[];
base: BasePtr = SetupBase[];
{ENABLE UNWIND => CleanupBase[base];
Startworthy:
UNSAFE
PROC [nhp: Allocator.NHeaderP]
RETURNS [start:
BOOL]
--VertexTest-- = {
start ← NOT Transparent[nhp];
};
Transparent:
UNSAFE
PROC [nhp: Allocator.NHeaderP]
RETURNS [is:
BOOL]
--VertexTest-- = {
is ← transparents[SafeStorage.GetCanonicalType[nhp.type]];
};
IsGoal:
UNSAFE
PROC [nhp: Allocator.NHeaderP]
RETURNS [is:
BOOL]
--VertexTest-- = {
is ← goals[SafeStorage.GetCanonicalType[nhp.type]];
};
ConsumeGoal:
UNSAFE
PROC [startNHP, goalNHP: Allocator.NHeaderP]
--GoalConsumer-- = {
size: LONG CARDINAL;
type: Type = startNHP.type;
IF goalNHP.blockSizeIndex = Allocator.bsiEscape
THEN {
ehp: Allocator.EHeaderP ← LOOPHOLE[goalNHP + SIZE[Allocator.NormalHeader] - SIZE[Allocator.ExtendedHeader]];
SELECT ehp.sizeTag
FROM
words => size ← ehp.extendedSize;
pages => size ← VM.WordsForPages[ehp.extendedSize];
ENDCASE => {Crash[goalNHP]; RETURN};
}
ELSE {
size ← AllocatorOps.bsiToSize[goalNHP.blockSizeIndex];
};
tc.objects ← tc.objects + 1;
tc.words ← tc.words + size;
IF type < tc.max
THEN
TRUSTED {
tcp: LONG POINTER TO TypeCount ← @tc[type];
IF tcp.objects = 0
THEN {
First object for this type, so bump the type count
tcp.objects ← 1; tcp.words ← size; tc.types ← tc.types + 1;
}
ELSE {
Previous objects for this type, just bump the counts
tcp.objects ← tcp.objects + 1; tcp.words ← tcp.words + size;
};
};
};
IF gcFirst THEN SafeStorage.ReclaimCollectibleObjects[];
EnumerateEdges[base: base, Startworthy: Startworthy, Transparent: Transparent, IsGoal: IsGoal, ConsumeGoal: ConsumeGoal, cumulative: FALSE];
}--end base--;
CleanupBase[base];
PrintTypeCounts[tc, log];
IF objectRank
THEN
PrintRank[tc: tc, out: log, highestN: typeRank, countObjects: TRUE];
IF wordRank
THEN
PrintRank[tc: tc, out: log, highestN: typeRank, countObjects: FALSE];
};
Bitmap utils
Flags:
TYPE =
RECORD [
marked:
BOOL ←
FALSE,
marked { is or has been in the list of things to trace.
stacked:
BOOL ←
FALSE,
stacked { is a member of the current path being traced.
On second pass, if any, the meaning is negated.
cyclic: BOOL ← FALSE,
legal:
BOOL ←
FALSE
legal { contains a REF.
];
nullFlags: Flags = [];
FlagsArrayPtr: TYPE = LONG POINTER TO FlagsArray;
FlagsArray: TYPE = PACKED ARRAY FlagIndex OF Flags;
BasePtr: TYPE = LONG POINTER TO BaseArray;
BaseArray: TYPE = ARRAY BaseIndex OF FlagsArrayPtr;
FlagMod: NAT = 4096;
FlagIndex: TYPE = [0..FlagMod);
logFlagMod: NAT = 12;
BaseMod: NAT = 4096;
BaseIndex: TYPE = [0..BaseMod);
CyclicRef:
UNSAFE
PROC [base: BasePtr, ref:
REF]
RETURNS [
BOOL] =
UNCHECKED {
nhp: Allocator.NHeaderP = LOOPHOLE[ref, Allocator.NHeaderP] - SIZE[Allocator.NormalHeader];
RETURN [ReadFlags[base, nhp].cyclic];
};
ReadFlags:
UNSAFE
PROC [base: BasePtr, nhp: Allocator.NHeaderP]
RETURNS [Flags] =
UNCHECKED {
ln: Basics.LongNumber = LOOPHOLE[nhp];
ptr: FlagsArrayPtr = base[Basics.DoubleShiftRight[ln, logFlagMod+1].lo];
IF ln.lc >= Allocator.LastAddress THEN Crash[nhp];
IF ln.lo MOD 2 = 1 THEN Crash[nhp];
IF ptr = NIL THEN RETURN [nullFlags];
RETURN [ptr[(ln.lo / 2) MOD FlagMod]];
};
WriteFlags:
UNSAFE
PROC [base: BasePtr, nhp: Allocator.NHeaderP, flags: Flags] =
UNCHECKED {
ln: Basics.LongNumber = LOOPHOLE[nhp];
xHi: NAT = Basics.DoubleShiftRight[ln, logFlagMod+1].lo;
ptr: FlagsArrayPtr ← base[xHi];
IF ln.lc >= Allocator.LastAddress THEN Crash[nhp];
IF ln.lo MOD 2 = 1 THEN Crash[nhp];
IF ptr =
NIL
THEN {
Sigh, we need to allocate a new goodie
interval: VM.Interval ← VM.SimpleAllocate[VM.PagesForWords[ SIZE[FlagsArray] ]];
base[xHi] ← ptr ← VM.AddressForPageNumber[interval.page];
ptr^ ← ALL[nullFlags];
};
ptr[(ln.lo / 2) MOD FlagMod] ← flags;
};
SetupBase:
UNSAFE
PROC
RETURNS [base: BasePtr] =
UNCHECKED {
interval: VM.Interval ← VM.SimpleAllocate[VM.PagesForWords[ SIZE[BaseArray] ]];
base ← VM.AddressForPageNumber[interval.page];
base^ ← ALL[NIL];
};
CleanupBase:
UNSAFE
PROC [base: BasePtr] =
UNCHECKED {
pages: NAT = VM.PagesForWords[ SIZE[FlagsArray] ];
FOR i:
NAT
IN BaseIndex
DO
IF base[i] #
NIL
THEN {
page: VM.PageNumber = VM.PageNumberForAddress[base[i]];
base[i] ← NIL;
VM.Free[ [page, pages] ];
};
ENDLOOP;
VM.Free[ [VM.PageNumberForAddress[base], VM.PagesForWords[SIZE[BaseArray]] ]];
};
Utilities
Switches: TYPE = PACKED ARRAY CHAR['a..'z] OF BOOL;
TypeIndex: TYPE = [0..LAST[SafeStorage.TypeIndex]-256];
LowRC: NAT = LAST[Allocator.RefCount]/4;
MidRC: NAT = LAST[Allocator.RefCount]/2;
HighRC: NAT = LAST[Allocator.RefCount];
TypeCounts: TYPE = REF TypeCountsRep;
TypeCountsRep:
TYPE =
RECORD [
types: INT ← 0,
objects: INT ← 0,
words: INT ← 0,
lowRC: ARRAY [0..LowRC] OF INT ← ALL[0],
midRC: INT ← 0,
highRC: INT ← 0,
overRC: INT ← 0,
inZCT: INT ← 0,
inZCT0: INT ← 0,
counts: SEQUENCE max: TypeIndex OF TypeCount
];
TypeCount:
TYPE =
RECORD [
objects: INT,
words: INT
];
TypeAccum: TYPE = REF TypeAccumRep;
TypeAccumRep:
TYPE =
RECORD [
SEQUENCE max: TypeIndex OF TypeAccumEntry
];
TypeAccumEntry:
TYPE =
RECORD [
type: TypeIndex ← 0,
count: INT ← 0
];
InitTypeCounts:
PROC
RETURNS [tc: TypeCounts] =
TRUSTED {
index: TypeIndex ← RTTypesBasicPrivate.MapTiTd.length;
WHILE index > 0
DO
IF RTTypesBasicPrivate.MapTiTd[index-1] # NIL THEN EXIT;
index ← index-1;
ENDLOOP;
tc ← NEW[TypeCountsRep[index+64]];
FOR i: TypeIndex
IN [0..index+64)
DO
tc[i] ← [0, 0];
ENDLOOP;
};
SampleTypeCounts:
PROC [tc: TypeCounts, includeFree:
BOOL, Filter: VertexTest ← True] =
TRUSTED {
eachObject: InfoProc =
TRUSTED {
[type: SafeStorage.Type, size: INT, object: Allocator.HeaderP, objectHP: Allocator.NHeaderP] RETURNS [continue: BOOLEAN]
trueZero: BOOL ← FALSE;
IF NOT includeFree AND type = SafeStorage.nullType THEN RETURN [TRUE];
IF NOT Filter[objectHP] THEN RETURN [TRUE];
tc.objects ← tc.objects + 1;
tc.words ← tc.words + size;
IF type < tc.max
THEN
TRUSTED {
tcp: LONG POINTER TO TypeCount ← @tc[type];
IF tcp.objects = 0
THEN {
First object for this type, so bump the type count
tcp.objects ← 1; tcp.words ← size; tc.types ← tc.types + 1;
}
ELSE {
Previous objects for this type, just bump the counts
tcp.objects ← tcp.objects + 1; tcp.words ← tcp.words + size;
};
IF type = SafeStorage.nullType THEN RETURN [TRUE];
};
Take stats on reference counts
SELECT
TRUE
FROM
objectHP.rcOverflowed => tc.overRC ← tc.overRC + 1;
ENDCASE =>
SELECT objectHP.refCount
FROM
0 => {
tc.lowRC[0] ← tc.lowRC[0] + 1;
trueZero ← TRUE;
};
<= LowRC =>
tc.lowRC[objectHP.refCount] ← tc.lowRC[objectHP.refCount] + 1;
<= MidRC =>
tc.midRC ← tc.midRC + 1;
ENDCASE =>
tc.highRC ← tc.highRC + 1;
IF objectHP.inZCT
THEN {
tc.inZCT ← tc.inZCT + 1;
IF trueZero THEN tc.inZCT0 ← tc.inZCT0 + 1;
};
continue ← TRUE;
};
EnumerateCollectableStorage[eachObject];
};
DeltaTypeCounts:
PROC [old, new: TypeCounts]
RETURNS [delta: TypeCounts] = {
Takes the difference in statistics between an old and a new sample. We return a new object to avoid destroying the old and new stats objects.
delta ← NEW[TypeCountsRep[new.max]];
delta.types ← new.types - old.types;
delta.objects ← new.objects - old.objects;
delta.words ← new.words - old.words;
FOR i: [0..LowRC]
IN [0..LowRC]
DO
delta.lowRC[i] ← new.lowRC[i] - old.lowRC[i];
ENDLOOP;
delta.midRC ← new.midRC - old.midRC;
delta.highRC ← new.highRC - old.highRC;
delta.overRC ← new.overRC - old.overRC;
delta.inZCT ← new.inZCT - old.inZCT;
delta.inZCT0 ← new.inZCT0 - old.inZCT0;
FOR i: TypeIndex
IN [0..new.max)
DO
oldTypeCount: TypeCount ← IF i < old.max THEN old[i] ELSE [0, 0];
newTypeCount: TypeCount ← new[i];
delta[i] ← [
objects: newTypeCount.objects - oldTypeCount.objects,
words: newTypeCount.words - oldTypeCount.words];
ENDLOOP;
};
PrintTypeCounts:
PROC [tc: TypeCounts, out:
STREAM, msg:
ROPE ←
NIL] = {
Prints the type counts in a suitable narrow fashion. Note that we have to contend with negative values for objects and words, since this routine also prints out deltas.
IF msg # NIL THEN IO.PutRope[out, msg];
IO.PutF1[out, "\n-- Heap storage statistics (%g) --\n", [time[BasicTime.Now[]]] ];
IO.PutF[out, " types: %g, objects: %g, words: %g", [integer[tc.types]], [integer[tc.objects]], [integer[tc.words]] ];
IF tc.objects > 0
AND tc.words > 0
THEN {
The words/object average makes sense (more or less)
avg: INT ← (tc.words+tc.objects/2) / tc.objects;
IO.PutF1[out, " (words/obj: %g)", [integer[avg]] ];
};
FOR i:
NAT
IN [0..LowRC]
DO
IF i
MOD 4 = 0
THEN IO.PutF[out, "\n RC - %g: %g", [integer[i]], [integer[tc.lowRC[i]]] ]
ELSE IO.PutF[out, ", %g: %g", [integer[i]], [integer[tc.lowRC[i]]] ];
ENDLOOP;
IO.PutF[out, "\n RC IN [%g..%g]: %g",
[integer[LowRC+1]], [integer[MidRC]], [integer[tc.midRC]] ];
IO.PutF[out, ", IN [%g..%g]: %g",
[integer[MidRC+1]], [integer[HighRC]], [integer[tc.highRC]] ];
IO.PutF1[out, ", overflow: %g\n", [integer[tc.overRC]] ];
IO.PutF[out, " inZCT: %g, inZCT0; %g\n", [integer[tc.inZCT]], [integer[tc.inZCT0]] ];
};
PrintRank:
PROC [tc: TypeCounts, out:
STREAM, highestN:
NAT ← 16, countObjects:
BOOL] = {
IF highestN # 0
THEN {
highestAccum: TypeAccum ← NEW[TypeAccumRep[highestN]];
worstRank: TypeIndex ← highestN-1;
worstCount: INT ← 0;
IO.PutRope[out,
IF countObjects THEN "\n-- Rank by objects --\n" ELSE "\n-- Rank by words --\n"];
FOR i:
NAT
IN [0..highestN)
DO
highestAccum[i] ← [0, 0];
ENDLOOP;
FOR type: TypeIndex
IN [0..tc.max)
DO
tCount: TypeCount ← tc[type];
count: INT ← ABS[IF countObjects THEN tCount.objects ELSE tCount.words];
IF count > worstCount
THEN {
WHILE worstRank > 0
DO
IF count <= highestAccum[worstRank-1].count THEN EXIT;
highestAccum[worstRank] ← highestAccum[worstRank-1];
worstRank ← worstRank - 1;
ENDLOOP;
highestAccum[worstRank] ← [type, count];
worstRank ← highestN-1;
worstCount ← highestAccum[worstRank].count;
};
ENDLOOP;
FOR rank:
NAT
IN [0..highestN)
DO
type: TypeIndex ← highestAccum[rank].type;
words: INT ← tc[type].words;
objects: INT ← tc[type].objects;
IF words = 0 AND objects = 0 THEN EXIT;
IO.PutF[out, " rank %g, words: %g, objects: %g",
[integer[rank]], [integer[words]], [integer[objects]] ];
IO.PutF1[out, ", type: %bB\n ", [cardinal[type]] ];
PrintType[out, type];
IO.PutRope[out, "\n"];
ENDLOOP;
};
};
PrintType:
PROC [out:
STREAM, type: TypeIndex] = {
This routine prints a type defensively.
innerPrint:
PROC = {
PrintTV.PrintType[type: [type], put: out, depth: typeDepth, width: typeWidth, verbose: typeVerbose];
};
msg: ROPE ← NIL;
IF type = 0 THEN msg ← "(free)" ELSE msg ← BackStop.Call[innerPrint];
IF msg # NIL THEN IO.PutRope[out, msg];
};
ProcessSwitches:
PROC [swIn: Switches, arg:
ROPE]
RETURNS [switches: Switches] = {
sense: BOOL ← TRUE;
switches ← swIn;
FOR index:
INT
IN [0..Rope.Length[arg])
DO
char: CHAR ← Rope.Fetch[arg, index];
SELECT char
FROM
'- => LOOP;
'~ => {sense ← NOT sense; LOOP};
IN ['a..'z] => switches[char] ← sense;
IN ['A..'Z] => switches[char + ('a-'A)] ← sense;
ENDCASE;
sense ← TRUE;
ENDLOOP;
};
PrintHeader:
PROC [out:
STREAM, nhp: Allocator.NHeaderP] =
TRUSTED {
IO.PutF1[out, "[inZCT: %g", [rope[IF nhp.inZCT THEN "TRUE" ELSE "FALSE"]] ];
IO.PutF1[out, ", maybeOnStack: %g", [rope[IF nhp.maybeOnStack THEN "TRUE" ELSE "FALSE"]] ];
IO.PutF1[out, "\n bsi: %g", IF nhp.blockSizeIndex = Allocator.bsiEscape THEN [rope["bsiEscape"]] ELSE [cardinal[nhp.blockSizeIndex]] ];
IO.PutF1[out, ", f: %g", [rope[IF nhp.f THEN "TRUE" ELSE "FALSE"]] ];
IF nhp.f
THEN {
td: RTTypesBasicPrivate.PTypeDesc ← RTTypesBasicPrivate.MapTiTd[nhp.type];
IF td.numberPackageRefs > nhp.refCount
THEN {
Remember this guy!
IO.PutF1[out, " (prc: %g)", [integer[td.numberPackageRefs]] ];
};
};
IO.PutF1[out, ", refCount: %g", [cardinal[nhp.refCount]] ];
IF nhp.rcOverflowed THEN IO.PutRope[out, " (overflowed)" ];
IO.PutF1[out, "\n typePad: %g", [cardinal[nhp.typePad]] ];
IO.PutF1[out, ", type: %g (", [cardinal[nhp.type]] ];
PrintType[out, nhp.type];
IO.PutRope[out, ")]" ];
};
InfoProc:
TYPE =
UNSAFE PROC [type: SafeStorage.Type, size:
INT, object: Allocator.HeaderP, objectHP: Allocator.NHeaderP]
RETURNS [continue:
BOOLEAN ←
TRUE];
EnumerateCollectableStorage:
PROC [callBack: InfoProc ←
NIL] =
TRUSTED {
This procedure enumerates all of the objects in the heap. All interesting monitor locks are held while doing this. Each object is verified as it is examined. An optional callback procedure can examine each object as well.
haveAllocatorLocked:
PROC =
TRUSTED {
haveRCOvLocked:
PROC =
TRUSTED {
haveZCTLocked:
PROC =
TRUSTED {
qx: Allocator.QuantumIndex ← FIRST[Allocator.QuantumIndex];
lag1: Allocator.NHeaderP ← NIL;
lag2: Allocator.NHeaderP ← NIL;
lag3: Allocator.NHeaderP ← NIL;
lag4: Allocator.NHeaderP ← NIL;
DO
hp: Allocator.HeaderP ← NIL;
blockSize: LONG CARDINAL ← 0;
Scan for the next start of a quantum run
WHILE
NOT AllocatorOps.quantumMap[qx]
DO
IF qx = LAST[Allocator.QuantumIndex] THEN RETURN;
qx ← qx + 1;
ENDLOOP;
hp ← Basics.DoubleShiftLeft[[lc[qx]], Allocator.logPagesPerQuantum+VM.logWordsPerPage].lp;
DO
start: Allocator.QuantumIndex ← qx;
nhp: Allocator.NHeaderP ← LOOPHOLE[hp];
IF LOOPHOLE[hp, LONG CARDINAL] >= Allocator.LastAddress THEN RETURN;
IF nhp.blockSizeIndex = Allocator.bsiEscape
THEN {
ehp: Allocator.EHeaderP ← LOOPHOLE[hp];
nhp ← nhp + (SIZE[Allocator.ExtendedHeader] - SIZE[Allocator.NormalHeader]);
SELECT ehp.sizeTag
FROM
words => blockSize ← ehp.extendedSize;
pages => blockSize ← VM.WordsForPages[ehp.extendedSize];
ENDCASE => {Crash[nhp]; RETURN};
}
ELSE {
blockSize ← AllocatorOps.bsiToSize[nhp.blockSizeIndex];
};
IF blockSize = 0 THEN {Crash[nhp]; RETURN};
IF blockSize >= Allocator.LastAddress THEN {Crash[nhp]; RETURN};
IF Basics.LowHalf[blockSize] MOD 2 = 1 THEN {Crash[nhp]; RETURN};
IF Basics.LowHalf[blockSize]
MOD
VM.wordsPerPage = 0
THEN {
hp must be on a page boundary!
ln: Basics.LongNumber ← [lp[hp]];
IF ln.lo MOD VM.wordsPerPage # 0 THEN {Crash[nhp]; RETURN};
};
IF callBack #
NIL
THEN {
There is a user routine to call. The user can force a crash (soft or hard) by setting foundNhp.
IF
NOT callBack[nhp.type, blockSize, hp, nhp
! RuntimeError.UNCAUGHT => Crash[nhp]
] THEN RETURN;
IF foundNhp # NIL THEN {Crash[nhp]; RETURN};
};
hp ← hp + blockSize;
qx ← AllocatorOps.AddressToQuantumIndex[LOOPHOLE[hp]];
FOR qq: Allocator.QuantumIndex
IN (start..qx)
DO
IF NOT AllocatorOps.quantumMap[qq] THEN {Crash[nhp]; RETURN};
ENDLOOP;
IF qx = LAST[Allocator.QuantumIndex] THEN RETURN;
IF NOT AllocatorOps.quantumMap[qx] THEN EXIT;
lag4 ← lag3;
lag3 ← lag2;
lag2 ← lag1;
lag1 ← nhp;
ENDLOOP;
ENDLOOP;
};
ZCT.EnterAndCallBack[haveZCTLocked];
};
ZCT.EnterRCOvAndCallBack[haveRCOvLocked];
};
AllocatorOps.EnterAndCallBack[haveAllocatorLocked];
IF foundNhp # NIL THEN ERROR CrashError[foundNhp];
};
foundNhp: Allocator.NHeaderP ← NIL;
worldSwap: BOOL ← TRUE;
Crash:
PROC [nhp: Allocator.NHeaderP] =
TRUSTED {
foundNhp ← nhp;
IF worldSwap
THEN DebuggerSwap.CallDebugger["Kosher it's not!"L]
ELSE ERROR CrashError[nhp];
};
CrashError:
ERROR [nhp: Allocator.NHeaderP] =
CODE;
VertexTest:
TYPE =
UNSAFE
PROC [nhp: Allocator.NHeaderP]
RETURNS [
BOOL];
GoalConsumer:
TYPE =
UNSAFE
PROC [startNHP, goalNHP: Allocator.NHeaderP];
EnumerateEdges:
UNSAFE
PROC [base: BasePtr, Startworthy, Transparent,
IsGoal: VertexTest,
ConsumeGoal: GoalConsumer, cumulative:
BOOL] =
TRUSTED {
objects: INT ← 0;
countObject: InfoProc = {
[type: SafeStorage.Type, size: INT, object: Allocator.HeaderP, objectHP: Allocator.NHeaderP] RETURNS [continue: BOOLEAN]
pr: LONG POINTER ← objectHP+SIZE[Allocator.NormalHeader];
SELECT
TRUE
FROM
type = nullType => {};
type = CODE[ROPE] => {};
type > RTTypesBasicPrivate.MapTiTd.length => {};
objectHP.refCount = 0 => {};
ENDCASE => {
Immediately cull things that only refer forwards.
rcmx: RCMap.Index = RTTypesBasicPrivate.MapTiRcmx[type];
SELECT rcmx
FROM
FIRST[RCMap.Index], LAST[RCMap.Index] => {};
ENDCASE => {
Worth enumerating
eachRef:
UNSAFE
PROC [p:
REF] = {
IF p #
NIL
THEN {
IF none
THEN {
none ← FALSE;
WriteFlags[base, objectHP, [legal: TRUE]];
};
};
};
none: BOOL ← TRUE;
RTTypesBasicPrivate.MapRefs[pr, rcmx, eachRef];
};
objects ← objects + 1;
};
RETURN [TRUE];
};
stackSpace: VM.Interval;
EnumerateCollectableStorage[countObject];
stackSpace ←
VM.SimpleAllocate[
VM.PagesForWords[
SIZE[Allocator.NHeaderP]*(objects+100) ]];
{ENABLE UNWIND => VM.Free[stackSpace];
stackStart: LONG POINTER TO Allocator.NHeaderP = VM.AddressForPageNumber[stackSpace.page];
stackEnd: LONG POINTER TO Allocator.NHeaderP = stackStart + SIZE[Allocator.NHeaderP]*(objects+99);
trace: InfoProc = {
[type: SafeStorage.Type, size: INT, object: Allocator.HeaderP, objectHP: Allocator.NHeaderP] RETURNS [continue: BOOLEAN]
ff: Flags ← ReadFlags[base, objectHP];
IF ff.legal
AND
NOT ff.marked
AND Startworthy[objectHP]
THEN {
ExploreFrom[objectHP, TRUE];
IF NOT cumulative THEN ExploreFrom[objectHP, FALSE];
};
RETURN [TRUE];
};
ExploreFrom:
UNSAFE
PROC [start: Allocator.NHeaderP, firstPass:
BOOL] = {
Marked: PROC [flags: Flags] RETURNS [marked: BOOL] = CHECKED INLINE {marked ← flags.marked = firstPass};
Mark: PROC [ofl: Flags] RETURNS [nfl: Flags] = CHECKED INLINE {nfl ← ofl; nfl.marked ← firstPass};
stackP: LONG POINTER TO Allocator.NHeaderP ← stackStart;
stackP^ ← start;
DO
p: Allocator.NHeaderP ← stackP^;
type: Type = p.type;
rcmx: RCMap.Index = RTTypesBasicPrivate.MapTiRcmx[type];
eachRef:
UNSAFE
PROC [p:
REF] = {
IF p #
NIL
THEN {
nhp: Allocator.NHeaderP ← LOOPHOLE[p, Allocator.NHeaderP] - SIZE[Allocator.NormalHeader];
ff: Flags ← ReadFlags[base, nhp];
IF Marked[ff]
THEN {
--we've seen it before
IF NOT firstPass THEN NULL--on second pass, don't worry about detecting cycles.
ELSE
IF ff.stacked
THEN {
--It's in the current path, and thus is part of a cycle.
stackQ: LONG POINTER TO Allocator.NHeaderP ← stackP;
IF
NOT ff.cyclic
THEN {
ff.cyclic ← TRUE;
WriteFlags[base, nhp, ff];
};
We also have to mark all objects that are in this cycle.
DO
pp: Allocator.NHeaderP = stackQ^;
pf: Flags = ReadFlags[base, pp];
IF pp = nhp THEN EXIT;
IF stackQ = stackStart THEN EXIT;
IF pf.stacked AND NOT pf.cyclic THEN WriteFlags[base, pp, ff];
stackQ ← stackQ - SIZE[Allocator.NHeaderP];
ENDLOOP;
}
ELSE NULL--it's already queued to be traced, or has already been traced; in either case, nothing needs to be done about it here.
}
ELSE {
--Never seen before
IF IsGoal[nhp] THEN ConsumeGoal[start, nhp];
IF ff.legal
AND Transparent[nhp]
THEN {
We want to trace through it. Just mark it as being seen and push it on the stack. Don't mark it as 'stacked' yet, since we are not yet tracing from it.
ff ← Mark[ff];
stackP ← stackP + SIZE[Allocator.NHeaderP];
IF stackP = stackEnd THEN Crash[nhp];
stackP^ ← nhp;
WriteFlags[base, nhp, ff];
};
};
};
};
pf: Flags ← ReadFlags[base, p];
IF NOT pf.legal THEN Crash[p];
pf.stacked ← TRUE;
pf ← Mark[pf];
WriteFlags[base, p, pf];
RTTypesBasicPrivate.MapRefs[p+SIZE[Allocator.NormalHeader], rcmx, eachRef];
Now we have to scan through the stack for something that is on the stack but not yet enumerated (marked as 'stacked'). If we don't find such a thing, then we are done.
DO
pp: Allocator.NHeaderP ← stackP^;
ff: Flags ← ReadFlags[base, pp];
IF NOT ff.stacked THEN EXIT;
ff.stacked ← FALSE;
WriteFlags[base, pp, ff];
IF stackP = stackStart THEN RETURN;
stackP ← stackP - SIZE[Allocator.NHeaderP];
ENDLOOP;
At this point we have stackP # intervalLimit and the flags indicate that we have not yet enumerated from this header.
ENDLOOP;
}--end ExploreFrom--;
EnumerateCollectableStorage[trace];
}--end stackSpace--;
VM.Free[stackSpace];
}--end EnumerateEdges--;