DIRECTORY Allocator USING [bsiEscape, ExtendedHeader, HeaderP, LastAddress, logPagesPerQuantum, NHeaderP, NormalHeader, QuantumIndex, wordsPerQuantum], AllocatorOps USING [AddressToQuantumIndex, BlockSize, EnterAndCallBack, IsValidRef, NHPToREF, quantumMap, REFToNHP, Reset, TAndSFreeObject], Basics USING [BITAND, CARD, DoubleShiftLeft, LowHalf], Collector USING [EstablishTAndSProc], DebuggerSwap USING [CallDebugger], LoadState USING [Acquire, ConfigID, EnumerateAllModules, GlobalFrameToType, local, ModuleIndex, ModuleToGlobalFrame, Release], PrincOps USING [GlobalFrameHandle, logWordsPerPage, wordsPerPage], RCMap USING [nullIndex], RCMicrocodeOps USING [ASSIGNREF, OnZ, RCOverflowOccurred, RCUnderflowOccurred, ZCTFull], RTFrameHeapSnapshot USING [AcquireFrameHeapSnapshot, MapUncountedBodies, ReleaseFrameHeapSnapshot], RTTypesBasicPrivate USING [MapRefs, MapTiRcmx, NumberPackageRefs], SafeStorage USING [GetReferentType, nullType, ReclaimCollectibleObjects, Type], VM USING [AddressForPageNumber, Free, PageNumberForAddress, SimpleAllocate], ZCT USING [EnterAndCallBack, EnterRCOvAndCallBack, InnerHandleRCOverflow, InnerHandleRCUnderflow, RCOvReset, zct, zctBlockWords]; TraceAndSweepImpl: PROGRAM IMPORTS AllocatorOps, Basics, Collector, DebuggerSwap, LoadState, RCMicrocodeOps, RTFrameHeapSnapshot, RTTypesBasicPrivate, SafeStorage, VM, ZCT = BEGIN OPEN Allocator; CARD: TYPE = Basics.CARD; Type: TYPE = SafeStorage.Type; nullType: Type = SafeStorage.nullType; global: REF GlobalRep _ NEW[GlobalRep]; GlobalRep: TYPE = RECORD [ objectsSeen: CARD _ 0, objectsReclaimed: CARD _ 0, objectsKept: CARD _ 0, nGlobalFrames: CARD _ 0, nRCGlobalFrames: CARD _ 0, nLocalFrames: CARD _ 0, nRetainedObjects: CARD _ 0, nRefsPushed: CARD _ 0, suspect: LONG POINTER _ LOOPHOLE[LONG[-1]], suspectRef: LONG POINTER _ NIL, -- a LOOPHOLE'd REF destructiveTestEnabled: BOOL _ FALSE, findingCircularGarbage: BOOL _ FALSE, forgiving: BOOL _ TRUE, suspectRefFoundInLocalFrame: BOOL _ FALSE, circularStructureFound: BOOL _ FALSE, forgivingLog: ForgivingLog _ NIL, forgivingLogCount: INT _ 0, forgivingLogIndex: [0..ForgivingLogEntries) _ 0, refStackChain: RefStack _ NIL, refStackFree: RefStack _ NIL, gfhToSuspectRef: PrincOps.GlobalFrameHandle _ NIL, nextRpaIndex: CARDINAL _ 0, rpa: ARRAY [0..maxNRPIndex] OF LONG POINTER _ ALL[NIL] ]; maxNRPIndex: CARDINAL = 20; SuspectSeen: SIGNAL = CODE; ForgivingLogPages: NAT = 4; ForgivingLog: TYPE = LONG POINTER TO ForgivingLogRep; ForgivingLogRep: TYPE = ARRAY [0..ForgivingLogEntries) OF ForgivingLogEntry; ForgivingLogEntry: TYPE = RECORD [ nhp: Allocator.NHeaderP, header: Allocator.NormalHeader ]; ForgivingLogEntries: NAT = (ForgivingLogPages*PrincOps.wordsPerPage)/SIZE[ForgivingLogEntry]; RefStackRep: TYPE = RECORD [ next: RefStack _ NIL, size: CARDINAL _ 0, max: CARDINAL _ RefStackMax, refs: SEQUENCE COMPUTED CARDINAL OF LONG POINTER ]; RefStack: TYPE = LONG POINTER TO RefStackRep; RefStackPages: NAT = 4; RefStackMax: NAT = (PrincOps.wordsPerPage*RefStackPages - (SIZE[RefStackRep])) / (SIZE[LONG POINTER]); APNodes: TYPE = ARRAY [0..maxNReferers) OF NHeaderP; suspectReferers: REF APNodes _ NEW[APNodes _ ALL[NIL]]; maxNReferers: NAT = 10; nSuspectReferers: NAT _ 0; AIPNodes: TYPE = ARRAY [0..maxNInnerReferers) OF NHeaderP; innerReferers: REF AIPNodes _ NEW[AIPNodes _ ALL[NIL]]; maxNInnerReferers: NAT = 50; nInnerReferers: NAT _ 0; WhoPointsTo: PROC [nhp: NHeaderP, cycleLimit: NAT _ 20] RETURNS [ referers: REF APNodes, gfh: PrincOps.GlobalFrameHandle, foundInLocalFrame, foundInCircularStructure: BOOL, cycles: NAT _ 0] = { FOR i: NAT IN [0..maxNReferers) DO suspectReferers[i] _ NIL ENDLOOP; suspectReferers[0] _ nhp; nSuspectReferers _ 1; FOR i: NAT IN [0..maxNInnerReferers) DO innerReferers[i] _ NIL ENDLOOP; nInnerReferers _ 0; global.gfhToSuspectRef _ NIL; global.suspectRefFoundInLocalFrame _ FALSE; global.circularStructureFound _ FALSE; UNTIL global.gfhToSuspectRef # NIL OR global.circularStructureFound OR global.suspectRefFoundInLocalFrame OR nSuspectReferers > 1 OR nSuspectReferers = 0 OR cycles = cycleLimit DO global.suspectRef _ LOOPHOLE[AllocatorOps.NHPToREF[suspectReferers[0]], LONG POINTER]; FOR i: NAT IN [0..maxNReferers) DO suspectReferers[i] _ NIL ENDLOOP; nSuspectReferers _ 0; SafeStorage.ReclaimCollectibleObjects[suspendMe: TRUE, traceAndSweep: TRUE]; cycles _ cycles + 1; ENDLOOP; global.suspectRef _ NIL; RETURN[ referers: suspectReferers, gfh: global.gfhToSuspectRef, foundInLocalFrame: global.suspectRefFoundInLocalFrame, foundInCircularStructure: global.circularStructureFound, cycles: cycles]; }; FindCircularGarbage: PROC = { SafeStorage.ReclaimCollectibleObjects[suspendMe: TRUE]; global.findingCircularGarbage _ TRUE; SafeStorage.ReclaimCollectibleObjects[suspendMe: TRUE, traceAndSweep: TRUE]; global.findingCircularGarbage _ FALSE; }; ActuallyDoIt: PROC [ignoreStack: BOOL _ FALSE] = { clearRC: PROC [nhp: NHeaderP] = { global.objectsSeen _ global.objectsSeen + 1; nhp.inZCT _ FALSE; nhp.maybeOnStack _ FALSE; nhp.refCount _ 0; nhp.rcOverflowed _ FALSE; }; global.objectsSeen _ 0; global.objectsKept _ global.objectsReclaimed _ 0; global.nRCGlobalFrames _ global.nGlobalFrames _ global.nLocalFrames _ 0; global.nRetainedObjects _ 0; global.nextRpaIndex _ 0; global.nRefsPushed _ 0; ZCT.zct.bsiToFreeList _ ALL[NIL]; AllocatorOps.Reset[]; ZCT.zct.markingDecrements _ FALSE; ZCT.zct.rp _ ZCT.zct.wp _ ZCT.zct.rp - Basics.BITAND[ Basics.LowHalf[LOOPHOLE[ZCT.zct.rp, CARD]], ZCT.zctBlockWords-1 ]; ZCT.RCOvReset[]; AllObjects[clearRC]; IF NOT ignoreStack THEN RTFrameHeapSnapshot.MapUncountedBodies[TraceLocalFrame]; [] _ LoadState.local.EnumerateAllModules[order: newestFirst, proc: TraceGlobalFrame]; UNTIL global.refStackChain = NIL DO TraceRefsInObject[LOOPHOLE[PopRef[], REF]] ENDLOOP; AllObjects[fudgeFinalized]; UNTIL global.refStackChain = NIL DO TraceRefsInObject[LOOPHOLE[PopRef[], REF]] ENDLOOP; FreeRefStacks[]; AllObjects[visitOneObject]; IF global.objectsReclaimed + global.objectsKept # global.objectsSeen THEN ERROR; }; DoTraceAndSweepCollection: PROC = { haveAllocatorLocked: PROC = { haveRCOvLocked: PROC = { haveZCTLocked: PROC = { RTFrameHeapSnapshot.AcquireFrameHeapSnapshot[]; { ENABLE UNWIND => RTFrameHeapSnapshot.ReleaseFrameHeapSnapshot[]; ActuallyDoIt[ignoreStack: FALSE]; IF global.destructiveTestEnabled THEN { ActuallyDoIt[ignoreStack: TRUE]; DebuggerSwap.CallDebugger["destructive collection finished."L]; }; RTFrameHeapSnapshot.ReleaseFrameHeapSnapshot[]; }; }; ZCT.EnterAndCallBack[haveZCTLocked]; }; ZCT.EnterRCOvAndCallBack[haveRCOvLocked]; }; LoadState.local.Acquire[exclusive]; AllocatorOps.EnterAndCallBack[haveAllocatorLocked ! UNWIND => LoadState.local.Release[]]; LoadState.local.Release[]; }; fudgeFinalized: PROC [nhp: NHeaderP] = { header: Allocator.NormalHeader _ nhp^; SELECT TRUE FROM header.f AND NOT header.inZCT => { ref: REF _ AllocatorOps.NHPToREF[nhp]; nhp.inZCT _ TRUE; IF RTTypesBasicPrivate.MapTiRcmx[SafeStorage.GetReferentType[ref]] # RCMap.nullIndex THEN PushRef[LOOPHOLE[ref, LONG POINTER]]; }; ENDCASE; }; visitOneObject: PROC [nhp: NHeaderP] = { header: Allocator.NormalHeader _ nhp^; SELECT TRUE FROM NOT header.f AND NOT header.inZCT => { IF global.findingCircularGarbage THEN global.objectsKept _ global.objectsKept + 1 ELSE {global.objectsReclaimed _ global.objectsReclaimed + 1; AllocatorOps.TAndSFreeObject[nhp]}; RETURN; }; header.f => { packageCount: NAT _ RTTypesBasicPrivate.NumberPackageRefs[nhp.type]; dummy: REF _ AllocatorOps.NHPToREF[nhp]; FOR i: CARDINAL IN [1..packageCount] DO IF nhp.refCount = 0 AND NOT nhp.rcOverflowed THEN { IF global.forgivingLog = NIL THEN global.forgivingLog _ VM.AddressForPageNumber[VM.SimpleAllocate[ForgivingLogPages].page]; global.forgivingLog[global.forgivingLogIndex] _ ForgivingLogEntry[nhp, header]; global.forgivingLogCount _ global.forgivingLogCount + 1; IF NOT global.forgiving THEN DebuggerSwap.CallDebugger["Unforgivable!"L]; IF global.forgivingLogIndex = ForgivingLogEntries-1 THEN global.forgivingLogIndex _ 0 ELSE global.forgivingLogIndex _ global.forgivingLogIndex + 1; EXIT; }; RCMicrocodeOps.ASSIGNREF[rhs: NIL, lhs: @dummy ! RCMicrocodeOps.RCUnderflowOccurred => { ZCT.InnerHandleRCUnderflow[dummy]; RETRY}]; ENDLOOP; header _ nhp^; }; ENDCASE; global.objectsKept _ global.objectsKept + 1; IF NOT global.findingCircularGarbage THEN nhp.inZCT _ FALSE; IF header.refCount = 0 AND NOT header.rcOverflowed THEN { IF NOT header.f THEN { global.rpa[global.nextRpaIndex] _ LOOPHOLE[AllocatorOps.NHPToREF[nhp], LONG POINTER]; IF global.nextRpaIndex < maxNRPIndex THEN global.nextRpaIndex _ global.nextRpaIndex + 1; global.nRetainedObjects _ global.nRetainedObjects + 1; }; RCMicrocodeOps.OnZ[nhp ! RCMicrocodeOps.ZCTFull => DebuggerSwap.CallDebugger["TAndS disaster."L]]; }; }; TraceLocalFrame: PROC [d: LONG DESCRIPTOR FOR ARRAY OF WORD] = { pa: LONG POINTER TO LONG POINTER = LOOPHOLE[BASE[d], LONG POINTER TO LONG POINTER]; nWords: CARDINAL = LENGTH[d]; global.nLocalFrames _ global.nLocalFrames + 1; IF nWords >= SIZE[REF] THEN FOR i: CARDINAL IN [0..nWords-SIZE[REF]] DO addr: LONG POINTER = (pa+i)^; IF addr # NIL AND AllocatorOps.IsValidRef[addr] THEN { ref: REF = LOOPHOLE[addr, REF]; nhp: Allocator.NHeaderP _ AllocatorOps.REFToNHP[ref]; IF global.suspectRef # NIL AND addr = global.suspectRef THEN global.suspectRefFoundInLocalFrame _ TRUE; IF NOT nhp.inZCT THEN { nhp.inZCT _ TRUE; IF RTTypesBasicPrivate.MapTiRcmx[SafeStorage.GetReferentType[ref]] # RCMap.nullIndex THEN PushRef[LOOPHOLE[ref, LONG POINTER]]; }; }; ENDLOOP; }; TraceGlobalFrame: PROC [configID: LoadState.ConfigID, moduleIndex: LoadState.ModuleIndex] RETURNS [stop: BOOL _ FALSE] = { gfh: PrincOps.GlobalFrameHandle = LoadState.local.ModuleToGlobalFrame[configID, moduleIndex]; type: Type _ LoadState.local.GlobalFrameToType[gfh]; procRef: PROC [ref: REF] = { SELECT LOOPHOLE[ref, LONG POINTER] FROM NIL => RETURN; global.suspectRef => global.gfhToSuspectRef _ gfh; ENDCASE; { nhp: Allocator.NHeaderP _ AllocatorOps.REFToNHP[ref]; dummy: REF _ NIL; IF NOT nhp.inZCT THEN { nhp.inZCT _ TRUE; IF RTTypesBasicPrivate.MapTiRcmx[SafeStorage.GetReferentType[ref]] # RCMap.nullIndex THEN PushRef[LOOPHOLE[ref, LONG POINTER]]; }; RCMicrocodeOps.ASSIGNREF[rhs: ref, lhs: @dummy ! RCMicrocodeOps.RCOverflowOccurred => { ZCT.InnerHandleRCOverflow[ref]; RETRY}; ]; }; }; global.nGlobalFrames _ global.nGlobalFrames + 1; IF type # nullType THEN { global.nRCGlobalFrames _ global.nRCGlobalFrames + 1; RTTypesBasicPrivate.MapRefs[LONG[gfh], RTTypesBasicPrivate.MapTiRcmx[type], procRef]; }; RETURN[FALSE]; }; TraceRefsInObject: PROC [container: REF] = { type: Type _ SafeStorage.GetReferentType[container]; refererPushed: BOOL _ FALSE; procRef: PROC [ref: REF] = { SELECT LOOPHOLE[ref, LONG POINTER] FROM NIL => RETURN; global.suspectRef => IF NOT refererPushed THEN { nhp: NHeaderP = AllocatorOps.REFToNHP[container]; refererPushed _ TRUE; IF nSuspectReferers < maxNReferers THEN { suspectReferers[nSuspectReferers] _ nhp; nSuspectReferers _ nSuspectReferers + 1; }; FOR i: NAT IN [0..nInnerReferers) DO IF nhp = innerReferers[i] THEN GO TO circular; ENDLOOP; IF NOT global.circularStructureFound AND nInnerReferers < maxNInnerReferers THEN { innerReferers[nInnerReferers] _ nhp; nInnerReferers _ nInnerReferers + 1; }; EXITS circular => global.circularStructureFound _ TRUE; }; ENDCASE; { nhp: Allocator.NHeaderP _ AllocatorOps.REFToNHP[ref]; dummy: REF _ NIL; IF NOT nhp.inZCT THEN { nhp.inZCT _ TRUE; IF RTTypesBasicPrivate.MapTiRcmx[SafeStorage.GetReferentType[ref]] # RCMap.nullIndex THEN PushRef[LOOPHOLE[ref, LONG POINTER]]; }; RCMicrocodeOps.ASSIGNREF[rhs: ref, lhs: @dummy ! RCMicrocodeOps.RCOverflowOccurred => { ZCT.InnerHandleRCOverflow[ref]; RETRY}; ]; }; }; DO oldNRefsPushed: CARD = global.nRefsPushed; RTTypesBasicPrivate.MapRefs[ LOOPHOLE[container, LONG POINTER], RTTypesBasicPrivate.MapTiRcmx[type], procRef]; IF global.nRefsPushed # oldNRefsPushed + 1 THEN RETURN; container _ LOOPHOLE[PopRef[], REF]; type _ SafeStorage.GetReferentType[container]; refererPushed _ FALSE; ENDLOOP; }; AllObjects: PROC [visit: PROC [NHeaderP]] = { qi: QuantumIndex _ FIRST[QuantumIndex]; DO hp: HeaderP; blockSize: INT _ 0; UNTIL AllocatorOps.quantumMap[qi] DO IF qi = LAST[QuantumIndex] THEN RETURN; qi _ qi+1; ENDLOOP; FOR hp _ QuantumIndexToLP[qi], hp + blockSize WHILE LOOPHOLE[hp, CARD] < LastAddress AND AllocatorOps.quantumMap[LPToQuantumIndex[hp]] DO nhp: NHeaderP _ LOOPHOLE[hp]; IF nhp.blockSizeIndex = bsiEscape THEN nhp _ nhp + (SIZE[ExtendedHeader] - SIZE[NormalHeader]); blockSize _ AllocatorOps.BlockSize[hp]; visit[nhp]; ENDLOOP; IF LOOPHOLE[hp, CARD] >= LastAddress THEN RETURN; qi _ LPToQuantumIndex[hp]; ENDLOOP; }; logWordsPerQuantum: NAT = PrincOps.logWordsPerPage+Allocator.logPagesPerQuantum; LPToQuantumIndex: PROC [lp: LONG POINTER] RETURNS [QuantumIndex] = INLINE { RETURN[AllocatorOps.AddressToQuantumIndex[LOOPHOLE[lp]]]; }; QuantumIndexToLP: PROC [qi: QuantumIndex] RETURNS [LONG POINTER] = INLINE { RETURN [Basics.DoubleShiftLeft[ [pair[lo: qi, hi: 0]], logWordsPerQuantum].lp ]; }; PushRef: PROC [pref: LONG POINTER] = { stack: RefStack _ global.refStackChain; IF stack = NIL OR stack.size = stack.max THEN { IF global.refStackFree = NIL THEN { stack _ VM.AddressForPageNumber[VM.SimpleAllocate[RefStackPages].page]; stack.next _ NIL; stack.size _ 0; stack.max _ RefStackMax; } ELSE { stack _ global.refStackFree; global.refStackFree _ stack.next; }; stack.next _ global.refStackChain; global.refStackChain _ stack; }; stack[stack.size] _ pref; stack.size _ stack.size + 1; global.nRefsPushed _ global.nRefsPushed + 1; }; PopRef: PROC RETURNS [pref: LONG POINTER] = { stack: RefStack _ global.refStackChain; IF stack # NIL THEN { size: CARDINAL _ stack.size; IF size = 0 THEN { global.refStackChain _ stack.next; stack.next _ global.refStackFree; global.refStackFree _ stack; IF (stack _ global.refStackChain) = NIL THEN RETURN [NIL]; IF (size _ stack.size) = 0 THEN ERROR; }; size _ size - 1; stack.size _ size; RETURN [stack[size]]; }; RETURN [NIL]; }; FreeRefStacks: PROC = { IF global.refStackChain # NIL THEN ERROR; -- should never happen, but check anyway WHILE global.refStackFree # NIL DO stack: RefStack _ global.refStackFree.next; VM.Free[[page: VM.PageNumberForAddress[global.refStackFree], count: RefStackPages]]; global.refStackFree _ stack ENDLOOP; }; Collector.EstablishTAndSProc[DoTraceAndSweepCollection] END. TraceAndSweepImpl.mesa Copyright c 1985, 1986 by Xerox Corporation. All rights reserved. Russ Atkinson (RRA) May 7, 1986 7:43:04 pm PDT This module implements a trace-and-sweep garbage collector for Cedar ("DoTraceAndSweepCollection"). It uses auxilliary storage for a reference stack. To allow subsequent incremental collection, DoTraceAndSweepCollection restores reference counts to correct values. No collectible storage is allocated while DoTraceAndSweepCollection is active, nor are any REFs change in counted storage during that time. Client processes that attempt to do so are suspended until after DoTraceAndSweepCollection finishes. Variables number of objects retained only because REFs appear onstack support for WhoPointsTo and FindCircularGarbage destructiveTestEnabled TRUE => do a second TandS without the stack scan, then punt findingCircularGarbage TRUE => remember stuff found, don't reclaim it IF TRUE, then logs finalizable objects with counts that are too small IF FALSE, then crashes for finalizable objects with counts that are too small ForgivingLog declarations Leave room for the header info PROCS Look up the referer chain. Stop if a gfh found, or if foundInLocalFrame, or if a loop is found, or if more than 1 referer is found, or if no referers are found (changed conserv scan) This guy first does a standard incremental collection. Then it goes thru the motions of a TandS, but does not reclaim any garbage. Instead, it leaves the marked bit set. Leave the f flag and the type field alone Initialize Clear all free lists Clear the zero-count table mask rp by (LastAddress-(zctBlockWords-1)) Clear all ref counts and count objects trace from all local and global frames in the world continue marking from the ref stack and from objects RRA: trace from all objects with finalization and RC = 0; otherwise we get situations where subsidiary refs are visible from the object when it is taken from the finalization queue, but the subsidiary objects have been reclaimed! We have to trace from objects found in that last sweep. Free the objects Reclaim unmarked and free stuff, rebuild the zct and remember retained stuff due to frame heap only here with the loader and allocator locked here with the loader, allocator and RCOvBank locked here with the loader, allocator, RCOvBank and ZCT locked START haveRCOvLocked HERE Acquire the lock on the ref counting machinery, do the work, then release the lock. START haveAllocatorLocked HERE Acquire the lock on the RCOverflowBank, do the work, then release the lock. START DoTraceAndSweepCollection HERE First, acquire the lock on the loader (REFs in GF's are counted) Next, acquire the lock on the allocator, do the work, then release the lock. Release the loader's lock Trace from an object with finalization enabled and with RC = 0. We have already traced out all objects that started from local or global frames. This sample is useful for debugging This object has finalization enabled, and has never been seen! So we trace out references from it now. However, it is not proper to increment the RC for this visit. Reclaim unmarked and free stuff, rebuild the zct and remember retained stuff due to only frame heap This sample is useful for debugging This one is NOT being retained (and has no finalization) This object has finalization enabled, so adjust refCount to reflect the package count. RRA: We are trying to decrement a reference count for a finalizable object that already has a reference count of 0. The most likely cause of this is that a permanent reference is not being maintained to the object. If only a circularity is holding the count high, then this bug may ONLY surface during trace and sweep. We keep a log of this stuff to allow the world to continue. If we are not global.forgiving this fault we crash and burn immediately. Decrement the reference count Resample to make it all consistent At this point we keep the object in play Clear the mark. The reference count is 0. put it on zct mark refs in a local frame if this REF is valid, non-NIL this REF is valid and not seen before this procedure is used to mark refs from a global frame the algorithm is essentially the same as for regular objects Mark the object as being seen. this REF is valid and not seen before Increment the reference count traces all references from the given reference Mark the object as being seen. this REF is valid and not seen before Increment the reference count this proc visits all collectible objects (free AND allocated) Find the beginning of the next run of quanta from the quantum map Start parsing at the beginning of this run The shift distance = Log2[wordsPerPage*pagesPerQuantum] pushes ref to object onto the reference stack time to get a new stack node oh well, nothing comes for free pops ref to object from the reference stack START TraceAndSweepImpl HERE ΚΨ˜codešœ™Kšœ Οmœ7™BK™.—K˜Kšœύ™ύ˜šΟk ˜ Kšœ žœ~˜Kšœ žœz˜ŒKšœžœžœžœ˜6Kšœ žœ˜%Kšœ žœ˜"Kšœ žœo˜~Kšœ žœ4˜BKšœžœ ˜Kšœžœž œ9˜XKšœžœJ˜cKšœžœ)˜BKšœ žœ>˜OKšžœžœD˜LKšžœžœx˜K˜——šΟnœž˜Kšžœ‚žœž˜Kšœžœžœ ˜K˜Kšžœžœ žœ˜šœžœ˜Kšœ&˜&—K˜šœ ™ K˜Kšœžœ žœ ˜'šœ žœžœ˜Kšœ žœ˜Kšœžœ˜Kšœ žœ˜Kšœžœ˜Kšœžœ˜Kšœžœ˜šœžœ˜Kšœ;™;—Kšœ žœ˜Kš œ žœžœžœžœ˜+šœ žœžœžœΟc˜4Kšœ/™/—šœžœžœ˜&Kšœžœ7™R—šœžœžœ˜&Kšœžœ*™E—šœ žœžœ˜Kšžœžœ>™EKšžœžœE™M—Kšœžœžœ˜*Kšœžœžœ˜%Kšœžœ˜!Kšœžœ˜Kšœ0˜0Kšœžœ˜Kšœžœ˜Kšœ.žœ˜2Kšœžœ˜Kš œžœžœžœžœžœžœ˜6K˜—šœ žœ˜K˜—KšŸ œžœžœ˜K˜šœ™KšŸœžœ˜Kš œžœžœžœžœ˜5Kšœžœžœžœ˜Lšœžœžœ˜"Kšœ˜Kšœ˜K˜—KšŸœžœ-žœ˜]K˜—šœ žœžœ˜Kšœžœ˜Kšœžœ˜Kšœžœ˜Kš œžœžœžœžœžœž˜0Kšœ˜—š œ žœžœžœžœ ˜-KšŸ œžœ˜š Ÿ œžœ+žœžœžœžœ˜fKšœ™——K˜Kšœ žœžœžœ ˜4Kš œžœ žœ žœžœ˜7Kšœžœ˜Kšœžœ˜K˜Kšœ žœžœžœ ˜:Kš œžœ žœ žœžœ˜7Kšœžœ˜Kšœžœ˜K˜——šœ™K˜šŸ œžœžœžœ žœXžœ žœ ˜ΑKšœΆ™ΆKš žœžœžœžœžœžœ˜DKšœ˜K˜K˜Kš žœžœžœžœžœžœ˜GK˜K˜Kšœžœ˜Kšœ%žœ˜+Kšœ žœ˜&K˜šžœžœžœžœ$žœžœžœžœ˜΄Kšœžœ,žœžœ˜VKš žœžœžœžœžœžœ˜DK˜Kšœ1žœžœ˜LK˜Kšžœ˜—Kšœžœ˜šžœ˜Kšœ˜Kšœ˜Kšœ6˜6Kšœ8˜8K˜—K˜K™—šŸœžœ˜Kšœͺ™ͺKšœ1žœ˜7Kšœ žœ˜%Kšœ1žœžœ˜LKšœ žœ˜&K˜K˜—šŸ œžœžœžœ˜2šΟbœžœ˜!Kšœ,˜,Kšœ žœ˜Kšœžœ˜Kšœ)™)Kšœ˜Kšœžœ˜Kšœ˜K™—Kšœ ™ Kšœ˜Kšœ1˜1KšœH˜HKšœ˜Kšœ˜K˜šœ˜Kšœ™—Kšžœžœžœ˜!šœ˜Kšœ™—Kšžœžœ˜"šžœ˜ Kšœžœ˜ šœžœ˜ šœ žœ˜Kšœžœžœ žœ˜+Kšžœ˜Kšœ˜——Kšœ*™*—K˜Kšœ&™&Kšžœ ˜šœ˜Kšœ7™7—Kšžœžœ žœ9˜PšœU˜UKšœ4™4—šžœžœž˜#Kšœžœ žœ˜*Kšžœ˜K˜—šœ˜Kšœε™ε—šžœžœž˜#Kšœžœ žœ˜*Kšžœ˜Kšœ7™7—K˜K˜K™šœ™K™c—Kšœ˜KšžœCžœžœ˜PKšœ˜K˜—šŸœžœ˜#š‘œžœ˜Kšœ)™)š‘œžœ˜Kšœ3™3š‘ œžœ˜Kšœ8™8Kšœ/˜/šœžœžœ3˜BKšœžœ˜!šžœžœ˜'Kšœžœ˜ K˜?Kšœ˜—K˜/K˜—K˜—šžœž™KšœS™S—Kšžœ!˜$K˜—šžœž™KšœK™K—Kšžœ&˜)Kšœ˜—K™šžœž™$Kšœ@™@—šœ#˜#KšœL™L—šœ4žœ˜YKšœ™—Kšœ˜Kšœ˜K˜—š‘œžœ˜(K™‘šœ&˜&Kšœ#™#—šžœžœž˜šœ žœžœ˜"Kšœ¦™¦Kšœžœ˜&Kšœ žœ˜Kš žœSžœ žœžœžœ˜K˜—Kšžœ˜—K˜K˜—š‘œžœ˜(K™cšœ&˜&Kšœ#™#—šžœžœž˜šžœ žœžœ˜&Kšœ8™8šžœ˜ Kšžœ,˜0Kšžœ\˜`—Kšžœ˜K˜—šœ ˜ KšœV™VKšœžœ3˜DKšœžœ˜(šžœžœžœž˜'šžœžœžœžœ˜3Kšžœ™žœ§™Ηšžœžœž˜!Kšœžœžœ)˜Y—KšœO˜OKšœ8˜8Kšžœžœžœ-˜Išžœ1˜3Kšžœ˜!Kšžœ9˜=—Kšžœ˜K˜—Kšœ™šœž œžœ ˜.šœ)˜)Kšžœ žœ˜+——Kšžœ˜—šœ˜Kšœ"™"—K˜—Kšžœ˜K˜—Kšœ(™(K˜Kšœ-˜-šžœžœž˜)Kšœ™Kšœ žœ˜—šžœžœžœžœ˜9Kšœ™šžœžœ žœ˜Kšœ"žœžœžœ˜UKšžœ#žœ/˜XKšœ6˜6K˜—K™ šœ˜KšœK˜K—Kšœ˜—K˜K˜—šŸœžœžœž œžœžœžœžœ˜@Kšœ™Kšœžœžœžœžœžœžœžœžœžœžœžœžœ˜SKšœžœžœ˜Kšœ/˜/šžœ žœžœžœ˜š žœžœžœ žœžœž˜+Kšœžœžœ ˜šžœžœžœžœ˜6Kšœ™Kšœžœžœžœ˜Kšœ5˜5šžœžœžœž˜