<> <> <> <> <> <> <> DIRECTORY AssociativeCache, Basics USING [BITSHIFT], CacheModels, Convert, DragOpsCross USING [Half, Word, ZerosWord], DragOpsCrossUtils USING [AddDelta, HalfShift, HalfToCard, HalfXor, LowHalf, SingleWordShiftRight, WordToHalves], IO, Rope; AssociativeCacheImpl: CEDAR PROGRAM IMPORTS Basics, Convert, DragOpsCrossUtils, IO, Rope EXPORTS AssociativeCache = BEGIN OPEN DragOpsCrossUtils; Word: TYPE = DragOpsCross.Word; Half: TYPE = DragOpsCross.Half; ZerosWord: Word = DragOpsCross.ZerosWord; <> Cache: TYPE = CacheModels.Cache; maxQuadsPerLine: NAT = 64; maxLines: NAT = 16*1024; wordsPerPage: CARDINAL = CacheModels.wordsPerPage; QuadIndex: TYPE = [0..maxQuadsPerLine); PageEntry: TYPE = REF PageEntryRep; PageEntryRep: TYPE = RECORD [ next: PageEntry _ NIL, pageAddr: Word _ ZerosWord, useCount: INT _ 0 ]; QuadBitVector: TYPE = PACKED ARRAY QuadIndex OF BOOL; None: QuadBitVector = ALL[FALSE]; HashEntry: TYPE = REF HashEntryRep; HashEntryRep: TYPE = RECORD [ next: HashEntry _ NIL, lineAddr: Word _ ZerosWord, index: NAT _, chunkPresent: QuadBitVector _ None, dirty: QuadBitVector _ None, referenced: QuadBitVector _ None ]; CacheData: TYPE = REF CacheDataRep; CacheDataRep: TYPE = RECORD [ stats: CacheStats _ [], hashVector: HashVector _ NIL, pageEntryCount: INT _ 0, pageList: PageEntry _ NIL, freePageList: PageEntry _ NIL, victimIndex: CARDINAL _ 0, lru: BOOL _ FALSE, rover: NAT _ 0, quadsPerLine: NAT _ 2, wordsPerQuad: NAT _ 4, logWordsPerQuad: NAT, realCache: CacheModels.Cache _ NIL, mapCache: CacheModels.Cache _ NIL, lineTable: SEQUENCE linesInCache: NAT OF HashEntry ]; CacheStats: TYPE = RECORD [ probes: INT _ 0, readProbes: INT _ 0, writeProbes: INT _ 0, writeMisses: INT _ 0, writeClean: INT _ 0, chunkMisses: INT _ 0, lineMisses: INT _ 0, jumpMisses: INT _ 0, mapMisses: INT _ 0, dirtyWrites: INT _ 0, missCount: ARRAY [1..maxQuadsPerLine] OF INT _ ALL[0] ]; HashLim: CARDINAL = 512; HashVector: TYPE = REF HashVectorRep; HashVectorRep: TYPE = ARRAY [0..HashLim) OF HashEntry; LogBaseTwo: PROC [n: NAT] RETURNS [NAT] = { IF n = 1 THEN RETURN[0] ELSE RETURN[1+LogBaseTwo[n/2]]; }; NewCache: PUBLIC PROC [lines: NAT _ 100, quadsPerLine: NAT _ 2, wordsPerQuad: NAT _ 4, lru: BOOL _ FALSE, realCache, mapCache: CacheModels.Cache _ NIL] RETURNS [cache: CacheModels.Cache] = { <> IF quadsPerLine > maxQuadsPerLine THEN quadsPerLine _ maxQuadsPerLine; IF lines > maxLines THEN lines _ maxLines; cache _ NEW[CacheModels.CacheRec _ [ private: NEW[CacheDataRep[lines] _ [quadsPerLine: quadsPerLine, wordsPerQuad: wordsPerQuad, logWordsPerQuad: LogBaseTwo[wordsPerQuad], lru: lru, realCache: realCache, mapCache: mapCache, lineTable: NULL]], fetch: Fetch, store: Store, reset: Reset, flush: Flush, print: Print, data: NIL]]; Reset[cache]; }; Reset: PUBLIC CacheModels.CacheResetProc -- [cache: CacheModels.Cache] -- = { <> private: CacheData = NARROW[cache.private]; private.pageList _ NIL; private.hashVector _ NEW[HashVectorRep _ ALL[NIL]]; private.victimIndex _ 0; private.pageEntryCount _ 0; FOR i: NAT IN [0..private.linesInCache) DO private[i] _ NEW[HashEntryRep _ [index: i]]; ENDLOOP; private.stats _ []; -- zero out statistics IF private.realCache # NIL THEN private.realCache.reset[private.realCache]; IF private.mapCache # NIL THEN private.mapCache.reset[private.mapCache]; }; Flush: PUBLIC CacheModels.CacheFlushProc -- [cache: CacheModels.Cache] -- = { <> private: CacheData = NARROW[cache.private]; private.pageList _ NIL; private.hashVector^ _ ALL[NIL]; private.victimIndex _ 0; private.pageEntryCount _ 0; FOR i: NAT IN [0..private.linesInCache) DO private[i]^ _ [index: i]; ENDLOOP; IF private.realCache # NIL THEN private.realCache.flush[private.realCache]; IF private.mapCache # NIL THEN private.mapCache.flush[private.mapCache]; }; Print: PUBLIC CacheModels.CachePrintProc -- [cache: CacheModels.Cache, stream: STREAM, name: ROPE, resetStatistics: BOOL _ TRUE] -- = { Realn: PROC [r: REAL, n: NAT] RETURNS [IO.Value] = { RETURN [[rope[Convert.RopeFromReal[r, n]]]]}; private: CacheData = NARROW[cache.private]; quadMisses: INT _ private.stats.chunkMisses; misses: INT _ quadMisses + private.stats.lineMisses; probes: INT _ private.stats.probes; rProbes: REAL _ probes; rMisses: REAL _ misses; stream.PutF[ "\nStats for %g (associative, lines: %g, quads/line: %g, words/quad: %g)\n probes: %g", [rope[IF private.lru THEN Rope.Cat[name, " (pseudo-lru)"] ELSE name]], [integer[private.linesInCache]], [integer[private.quadsPerLine]], [integer[private.wordsPerQuad]], [integer[probes]]]; IF probes = 0 THEN RETURN; stream.PutF[ " (read: %g, write: %g)\n", [integer[private.stats.readProbes]], [integer[private.stats.writeProbes]]]; stream.PutF[ " write misses: %g, writes to clean quads: %g\n", [integer[private.stats.writeMisses]], [integer[private.stats.writeClean]]]; stream.PutF[ " misses: %g (%g (%g%%) existing line)\n", [integer[misses]], [integer[private.stats.chunkMisses]], IF misses = 0 THEN [rope["-"]] ELSE Realn[(quadMisses/rMisses)*100, 3]]; IF private.stats.jumpMisses # 0 THEN stream.PutF[ " misses caused by jumps: %g (%g%% of probes)\n", [integer[private.stats.jumpMisses]], Realn[(private.stats.jumpMisses/rProbes)*100, 3]]; IF probes # 0 THEN stream.PutF[ " miss rate: %g%%\n", Realn[(misses/rProbes)*100, 3]]; stream.PutF[ " map misses: %g (%g%%)\n dirty writes: %g\n", [integer[private.stats.mapMisses]], Realn[(private.stats.mapMisses/rProbes)*100, 3], [integer[private.stats.dirtyWrites]]]; IF misses # 0 THEN stream.PutF[" line occupancy:\n"]; FOR i: INT IN [1..private.quadsPerLine] DO stream.PutF[" %g quad(s)/line: %g\n", [integer[i]], Realn[(private.stats.missCount[i]/rMisses)*100, 3]]; ENDLOOP; }; Fetch: PUBLIC CacheModels.CacheFetchProc -- [cache: CacheModels.Cache, addr: DragOpsCross.Word, fromJump: BOOL _ FALSE] -- = { Access[cache, addr, fromJump, FALSE]; }; Store: PUBLIC CacheModels.CacheStoreProc -- [cache: CacheModels.Cache, addr: DragOpsCross.Word] -- = { Access[cache, addr, FALSE, TRUE]; }; Access: PROC [cache: Cache, addr: Word, fromJump, write: BOOL] = { data: CacheData = NARROW[cache.private]; oldEntry: BOOL _ TRUE; victim: CARDINAL _ data.victimIndex; wordsPerLine: NAT = data.quadsPerLine * data.wordsPerQuad; indexInLine: NAT = HalfToCard[LowHalf[addr]] MOD wordsPerLine; chunk: QuadIndex = Basics.BITSHIFT[indexInLine, -data.logWordsPerQuad]; lineAddr: Word = AddDelta[-INT[indexInLine], addr]; hashIndex: CARDINAL; hashEntry: HashEntry; halfHash: Half _ HalfXor[WordToHalves[lineAddr][0], WordToHalves[lineAddr][1]]; data.stats.probes _ data.stats.probes + 1; IF write THEN data.stats.writeProbes _ data.stats.writeProbes + 1 ELSE data.stats.readProbes _ data.stats.readProbes + 1; halfHash _ HalfXor[halfHash, HalfShift[halfHash, -8]]; hashIndex _ HalfToCard[halfHash] MOD HashLim; hashEntry _ data.hashVector[hashIndex]; <> WHILE hashEntry # NIL DO IF hashEntry.lineAddr = lineAddr THEN { IF data.lru THEN hashEntry.referenced[chunk] _ TRUE; IF write THEN { IF NOT hashEntry.dirty[chunk] THEN data.stats.writeClean _ data.stats.writeClean + 1; hashEntry.dirty[chunk] _ TRUE; }; IF ~hashEntry.chunkPresent[chunk] THEN { nPresentBits: NAT _ 0; IF data.realCache # NIL THEN data.realCache.fetch[data.realCache, addr]; IF write THEN data.stats.writeMisses _ data.stats.writeMisses + 1; IF fromJump THEN data.stats.jumpMisses _ data.stats.jumpMisses + 1; data.stats.chunkMisses _ data.stats.chunkMisses + 1; hashEntry.chunkPresent[chunk] _ TRUE; <> FOR i: INT IN [0..data.quadsPerLine) DO IF hashEntry.chunkPresent[i] THEN nPresentBits _ nPresentBits+1; ENDLOOP; data.stats.missCount[nPresentBits] _ data.stats.missCount[nPresentBits]+1; }; RETURN; }; hashEntry _ hashEntry.next; ENDLOOP; <> IF fromJump THEN data.stats.jumpMisses _ data.stats.jumpMisses + 1; data.stats.lineMisses _ data.stats.lineMisses + 1; IF write THEN data.stats.writeMisses _ data.stats.writeMisses + 1; data.stats.missCount[1] _ data.stats.missCount[1]+1; { <> indexInPage: CARDINAL = HalfToCard[LowHalf[lineAddr]] MOD wordsPerPage; pageAddr: Word = AddDelta[-INT[indexInPage], lineAddr]; pageId: Word _ SingleWordShiftRight[pageAddr, CacheModels.logWordsPerPage]; FOR pageEntry: PageEntry _ data.pageList, pageEntry.next WHILE pageEntry # NIL DO IF pageEntry.pageAddr = pageAddr THEN GOTO Found; ENDLOOP; data.stats.mapMisses _ data.stats.mapMisses + 1; IF data.mapCache # NIL THEN data.mapCache.fetch[data.mapCache, pageId]; EXITS Found => NULL; }; { <> hashEntry _ data.lineTable[victim]; IF data.lru THEN { WHILE hashEntry.referenced # None DO hashEntry.referenced _ None; victim _ victim+1; IF victim = data.linesInCache THEN victim _ 0; hashEntry _ data.lineTable[victim]; ENDLOOP; data.victimIndex _ victim; }; }; { <> lag: PageEntry _ NIL; oldLineAddr: Word = hashEntry.lineAddr; oldIndexInPage: CARDINAL = HalfToCard[LowHalf[oldLineAddr]] MOD wordsPerPage; oldPageAddr: Word = AddDelta[-INT[oldIndexInPage], oldLineAddr]; headHashEntry: HashEntry; oldHashIndex: CARDINAL; oldHalfHash: Half _ HalfXor[ WordToHalves[oldLineAddr][0], WordToHalves[oldLineAddr][1]]; oldHalfHash _ HalfXor[oldHalfHash, HalfShift[oldHalfHash, -8]]; oldHashIndex _ HalfToCard[oldHalfHash] MOD HashLim; headHashEntry _ data.hashVector[oldHashIndex]; <> IF headHashEntry = hashEntry THEN data.hashVector[oldHashIndex] _ hashEntry.next ELSE WHILE headHashEntry # NIL DO IF hashEntry = headHashEntry.next THEN { headHashEntry.next _ hashEntry.next; EXIT}; headHashEntry _ headHashEntry.next ENDLOOP; <> FOR pageEntry: PageEntry _ data.pageList, pageEntry.next WHILE pageEntry # NIL DO IF pageEntry.pageAddr = oldPageAddr THEN { <> IF (pageEntry.useCount _ pageEntry.useCount - 1) <= 0 THEN { <> IF lag = NIL THEN data.pageList _ pageEntry.next ELSE lag.next _ pageEntry.next; data.pageEntryCount _ data.pageEntryCount - 1; pageEntry.next _ data.freePageList; data.freePageList _ pageEntry; }; EXIT }; lag _ pageEntry; ENDLOOP; IF hashEntry.dirty # None THEN { FOR i: NAT IN [0..data.quadsPerLine) DO IF hashEntry.dirty[i] THEN { IF data.realCache # NIL THEN data.realCache.store[data.realCache, AddDelta[i, oldLineAddr]]; data.stats.dirtyWrites _ data.stats.dirtyWrites + 1; }; ENDLOOP; hashEntry.dirty _ None; }; }; <> { indexInPage: CARDINAL = HalfToCard[LowHalf[lineAddr]] MOD wordsPerPage; pageAddr: Word = AddDelta[-INT[indexInPage], lineAddr]; pageEntry: PageEntry _ data.pageList; <> hashEntry.next _ data.hashVector[hashIndex]; data.hashVector[hashIndex] _ hashEntry; hashEntry.lineAddr _ lineAddr; hashEntry.chunkPresent _ None; hashEntry.chunkPresent[chunk] _ TRUE; IF data.realCache # NIL THEN data.realCache.fetch[data.realCache, addr]; WHILE pageEntry # NIL DO IF pageEntry.pageAddr = pageAddr THEN { <> pageEntry.useCount _ pageEntry.useCount + 1; GO TO oldEntry; }; pageEntry _ pageEntry.next; ENDLOOP; <> data.pageEntryCount _ data.pageEntryCount + 1; pageEntry _ data.freePageList; IF pageEntry = NIL THEN pageEntry _ NEW[PageEntryRep] ELSE data.freePageList _ pageEntry.next; pageEntry^ _ [next: data.pageList, pageAddr: pageAddr, useCount: 1]; data.pageList _ pageEntry; EXITS oldEntry => NULL; }; IF write THEN { IF NOT hashEntry.dirty[chunk] THEN data.stats.writeClean _ data.stats.writeClean + 1; hashEntry.dirty[chunk] _ TRUE; }; <> victim _ victim + 1; data.victimIndex _ IF victim = data.linesInCache THEN 0 ELSE victim; }; END. <> <>