<> <> <> <> <> DIRECTORY BTree USING [Compare, Entry, EntryFromRecord, EntrySize, EntSize, Error, Key, PageNumber, Record, ReferencePage, ReleasePage, UpdateState, UpdateType], BTreeInternal USING [AdjustTreeState, AllocatePage, AssignRefESR, BackUpAndRemoveEntry, BackUpOneEntry, BTreeEntry, BTreeEntrySize, BTreePagePtr, Bug, Compare, entry0Offset, entry1Offset, EntryFromRecord, EntryOrdinal, entryOverhead, EntrySize, EntryTable, EntryTableRec, EntSeqRecord, FreePage, freePageMarker, GetDefaultPathStk, GetHeapAndTable, Heap, HeapIndex, InsertRecords, Lock, maxLevelsInTree, nilOffset, nilPage, PathEntryLE, PathStk, PathStkEntry, PathStkIndex, PathStkObject, PathToMaxDescendant, ReferencePage, ReferenceStack, ReleasePage, RemoveEntry, RepairOffsets, ReturnDefaultPathStk, ReturnHeapAndTable, Tree, TreeObject, Unlock, WriteStatePage], PrincOpsUtils USING [LongCopy, LongMove]; BTreeWrite: PROGRAM IMPORTS BTree, BTreeInternal, PrincOpsUtils EXPORTS BTree, BTreeInternal = { OPEN BTree, BTreeInternal; CARD: TYPE = LONG CARDINAL; <> Tree: TYPE = REF TreeObject; TreeObject: PUBLIC TYPE = BTreeInternal.TreeObject; PathStk: TYPE = REF PathStkObject; PathStkObject: PUBLIC TYPE = BTreeInternal.PathStkObject; PathStkEntryPtr: TYPE = LONG POINTER TO PathStkEntry; BTreeEntryPtr: TYPE = LONG POINTER TO BTreeEntry; DeleteKey: PUBLIC SAFE PROC [tree: Tree, key: Key, pathStk: PathStk _ NIL, useExistingPath: BOOL _ FALSE] RETURNS [found: BOOL] = TRUSTED { FatherMayNeedWork: PROC RETURNS [needsWork: BOOL] = { <> pagePtr, otherPtr: BTreePagePtr; fatherPSE: PathStkEntryPtr; fatherFreeWords: CARDINAL; pse _ @pathStk.path[pathStk.top]; IF pse.eslFront=NIL THEN needsWork _ FALSE ELSE { tree.InsertRecords[pathStk]; needsWork _ pathStk.top#0 AND pathStk.path[pathStk.top-1].eslFront#NIL; }; pagePtr _ tree.ReferencePage[pse.pageNumber]; IF pathStk.top=1 AND pagePtr.freeWords=tree.maxFreeWords THEN { <> tree.state.rootPage _ pagePtr.minPage; tree.state.depth _ tree.state.depth-1; tree.ReleasePage[pse.pageNumber]; tree.FreePage[pse.pageNumber]; RETURN [FALSE]; }; IF pathStk.top=1 OR tree.maxFreeWords-pagePtr.freeWords >= tree.prettyFull THEN { tree.ReleasePage[pse.pageNumber]; RETURN; }; <> AppendEntSeqRecord[pse: pse, esr: MakeEntSeqRecord[entSeq: @pagePtr.entries, length: tree.maxFreeWords-pagePtr.freeWords]]; fatherPSE _ @pathStk.path[pathStk.top-1]; otherPtr _ tree.ReferencePage[fatherPSE.pageNumber]; fatherFreeWords _ otherPtr.freeWords; tree.ReleasePage[fatherPSE.pageNumber]; IF fatherPSE.offset < nilOffset+(tree.state.pageSize-fatherFreeWords) OR fatherPSE.eslFront#NIL THEN { <> rtBroPg: PageNumber; esr: REF EntSeqRecord; [esr: esr, grPage: rtBroPg] _ tree.RemoveEntry[fatherPSE]; AppendEntSeqRecord[pse: pse, esr: esr]; otherPtr _ tree.ReferencePage[fatherPSE.pageNumber, write]; IF otherPtr[fatherPSE.lastOffset].grPage # pse.pageNumber THEN ERROR Bug[mcCreightWasWrong]; otherPtr[fatherPSE.lastOffset].grPage _ rtBroPg; tree.ReleasePage[fatherPSE.pageNumber]; otherPtr _ tree.ReferencePage[rtBroPg, write]; otherPtr.minPage _ pagePtr.minPage; tree.ReleasePage[rtBroPg]; tree.ReleasePage[pse.pageNumber]; tree.FreePage[pse.pageNumber]; pse.pageNumber _ rtBroPg; pse.offset _ entry1Offset; pse.lastOffset _ entry0Offset; pse.nextToLastOffset _ nilOffset; } ELSE { <> esr: REF EntSeqRecord; tree.ReleasePage[pse.pageNumber]; [esr: esr] _ tree.BackUpAndRemoveEntry[fatherPSE]; PushEntSeqRecord[pse: pse, esr: esr]; tree.FreePage[pse.pageNumber]; otherPtr _ tree.ReferencePage[fatherPSE.pageNumber]; pse.pageNumber _ otherPtr[fatherPSE.lastOffset].grPage; tree.ReleasePage[fatherPSE.pageNumber]; pagePtr _ tree.ReferencePage[pse.pageNumber]; pse.offset _ nilOffset+(tree.state.pageSize-pagePtr.freeWords); pse.lastOffset _ entry1Offset; pse.nextToLastOffset _ entry0Offset; tree.ReleasePage[pse.pageNumber]; tree.RepairOffsets[pse]; }; IF pse.eslFront#NIL THEN tree.InsertRecords[pathStk]; RETURN [TRUE]; }; pathStkWasNil: BOOL _ pathStk=NIL; pse: PathStkEntryPtr; tree.Lock[update]; IF pathStkWasNil THEN { IF useExistingPath THEN ERROR Error[nilPathStk]; pathStk _ tree.GetDefaultPathStk[]; }; { <> ENABLE UNWIND => { IF pathStkWasNil THEN tree.ReturnDefaultPathStk[pathStk]; tree.Unlock[]; }; origStkTop: PathStkIndex; pagePtr: BTreePagePtr _ NIL; descendantPg: PageNumber; simpleDelete: BOOL; <> equal: BOOL _ tree.PathEntryLE[key: key, pathStk: pathStk, useExistingPath: useExistingPath].equal; IF ~equal THEN { IF pathStkWasNil THEN tree.ReturnDefaultPathStk[pathStk]; tree.Unlock[]; RETURN [FALSE]; }; origStkTop _ pathStk.top; [ptr: pagePtr, pse: pse] _ tree.ReferenceStack[pathStk]; descendantPg _ pagePtr[pse.nextToLastOffset].grPage; simpleDelete _ descendantPg=nilPage AND tree.maxFreeWords - (pagePtr.freeWords + tree.BTreeEntrySize[@pagePtr[pse.lastOffset]]) >= tree.prettyFull; tree.ReleasePage[pse.pageNumber]; tree.version _ tree.version+1; <> <> tree.BackUpOneEntry[pse]; -- pse.offset should index deletion victim IF simpleDelete THEN { tree.AdjustTreeState[update: unchanged, deltaEntryCount: -1]; pagePtr _ tree.ReferencePage[pse.pageNumber, write]; [] _ tree.RemoveEntry[pse]; tree.ReleasePage[pse.pageNumber, IF tree.longUpdate THEN unchanged ELSE endOfUpdate]; } ELSE { tree.AdjustTreeState[update: startOfUpdate, deltaEntryCount: -1]; <> tree.PathToMaxDescendant[pathStk: pathStk, page: descendantPg]; IF pathStk.top > origStkTop THEN { dpse: PathStkEntryPtr = @pathStk.path[pathStk.top]; leafESR: REF EntSeqRecord _ tree.BackUpAndRemoveEntry[dpse].esr; [grPage: leafESR.entSeqP.grPage] _ tree.RemoveEntry[pse]; <> AppendEntSeqRecord[pse: pse, esr: leafESR]; } ELSE [] _ tree.RemoveEntry[pse]; tree.GetHeapAndTable[pathStk]; DO needsWork: BOOL _ FatherMayNeedWork[ ! UNWIND => tree.ReturnHeapAndTable[pathStk]]; IF pathStk.top=0 OR (~needsWork AND pathStk.top<=origStkTop) THEN EXIT ELSE pathStk.top _ pathStk.top-1; ENDLOOP; pathStk.top _ 0; tree.ReturnHeapAndTable[pathStk]; tree.AdjustTreeState[update: endOfUpdate, deltaEntryCount: 0]; }; }; IF pathStkWasNil THEN tree.ReturnDefaultPathStk[pathStk]; tree.Unlock[]; RETURN [TRUE]; }; UpdateRecord: PUBLIC SAFE PROC [tree: Tree, key: Key, pathStk: PathStk _ NIL, useExistingPath: BOOL _ FALSE, record: Record, updateType: UpdateType _ insertOrReplace] = TRUSTED { ProduceEntry: PROC [entry: Entry] = { PrincOpsUtils.LongCopy[to: entry, from: tree.EntryFromRecord[record], nwords: words]; }; words: EntSize = tree.EntrySize[tree.EntryFromRecord[record]]; UpdateEntry[tree: tree, key: key, pathStk: pathStk, useExistingPath: useExistingPath, words: words, Proc: ProduceEntry, updateType: updateType]; }; UpdateEntry: PUBLIC PROC [tree: Tree, key: Key, pathStk: PathStk _ NIL, useExistingPath: BOOL _ FALSE, words: EntSize, Proc: UNSAFE PROC [entry: Entry], updateType: UpdateType _ insertOrReplace] = { CallEntryProc: PROC [entry: Entry] = { Proc[entry]; IF tree.EntrySize[entry]#words OR tree.Compare[key, entry]#equal THEN ERROR Error[wrongEntryProduced]; }; pathStkWasNil: BOOL _ pathStk=NIL; tree.Lock[update]; IF pathStkWasNil THEN { IF useExistingPath THEN ERROR Error[nilPathStk]; pathStk _ tree.GetDefaultPathStk[]; }; <> { ENABLE UNWIND => { IF pathStkWasNil THEN tree.ReturnDefaultPathStk[pathStk]; tree.Unlock[]; }; leafStkTop: PathStkIndex; equal: BOOL; pse: PathStkEntryPtr; pagePtr: BTreePagePtr _ NIL; foundEntSize: CARDINAL _ 0; -- zero means there is not an existing entry with this key IF CARDINAL[words+entryOverhead] NOT IN [1+entryOverhead..tree.maxFreeWords] THEN ERROR Error[entrySizesWrong]; [equal: equal, depth: leafStkTop] _ tree.PathEntryLE[key: key, pathStk: pathStk, useExistingPath: useExistingPath]; IF equal THEN { IF updateType=insert THEN ERROR Error[wrongUpdateType]; [pse: pse, ptr: pagePtr] _ tree.ReferenceStack[pathStk]; foundEntSize _ tree.EntrySize[@pagePtr[pse.lastOffset].entry]; tree.ReleasePage[pse.pageNumber]; } ELSE IF updateType=replace THEN ERROR Error[wrongUpdateType]; <> <<1. If replacing an existing entry of the same size, just overwrite it.>> <<2. If the new entry fits on the page (after removing the old entry if any), just slide up the entries beyond the insertion point and insert the new entry.>> <<3. Otherwise, leave the new entry as an EntSeqRecord at the appropriate stack level, and let InsertRecords cope with the problem.>> <> tree.version _ tree.version+1; -- invalidate existing PathStks that refer to this tree pse _ @pathStk.path[pathStk.top]; IF words=foundEntSize THEN { <> tree.AdjustTreeState[update: unchanged, deltaEntryCount: 0]; pagePtr _ tree.ReferencePage[pse.pageNumber, write]; CallEntryProc[@pagePtr[pse.lastOffset].entry]; tree.ReleasePage[pse.pageNumber, IF tree.longUpdate THEN unchanged ELSE endOfUpdate]; } ELSE { removedEntGrPage: PageNumber _ nilPage; newEntryFits: BOOL _ FALSE; IF foundEntSize=0 THEN { <> pathStk.top _ leafStkTop; pse _ @pathStk.path[pathStk.top]; }; <> IF pathStk.top>0 THEN { pagePtr _ tree.ReferencePage[pse.pageNumber]; newEntryFits _ CARDINAL[words+entryOverhead] <= CARDINAL[pagePtr.freeWords + (IF foundEntSize=0 THEN 0 ELSE foundEntSize+entryOverhead)]; tree.ReleasePage[pse.pageNumber]; }; tree.AdjustTreeState[update: IF newEntryFits THEN unchanged ELSE startOfUpdate, deltaEntryCount: IF foundEntSize=0 THEN 1 ELSE 0]; IF pathStk.top>0 THEN pagePtr _ tree.ReferencePage[pse.pageNumber, write]; IF foundEntSize#0 THEN <> [grPage: removedEntGrPage] _ tree.BackUpAndRemoveEntry[pse]; IF newEntryFits THEN { <> entPtr: BTreeEntryPtr _ @pagePtr[pse.offset]; PrincOpsUtils.LongMove[to: entPtr+words+entryOverhead, from: entPtr, nwords: nilOffset+(tree.state.pageSize-pagePtr.freeWords)-pse.offset]; CallEntryProc[@entPtr.entry]; entPtr.grPage _ removedEntGrPage; pagePtr.freeWords _ pagePtr.freeWords - (words+entryOverhead); tree.ReleasePage[pse.pageNumber, IF tree.longUpdate THEN unchanged ELSE endOfUpdate]; } ELSE { <> esr: REF EntSeqRecord _ NEW[EntSeqRecord[words+entryOverhead]]; esr.entSeqP _ LOOPHOLE[BASE[DESCRIPTOR[esr.entSeq]]]; esr.entSeqLen _ words+entryOverhead; CallEntryProc[@esr.entSeqP.entry]; esr.entSeqP.grPage _ removedEntGrPage; AppendEntSeqRecord[pse: pse, esr: esr]; IF pathStk.top>0 THEN tree.ReleasePage[pse.pageNumber]; tree.GetHeapAndTable[pathStk]; WHILE pathStk.path[pathStk.top].eslFront#NIL DO tree.InsertRecords[pathStk ! UNWIND => tree.ReturnHeapAndTable[pathStk]]; IF pathStk.top=0 THEN EXIT ELSE pathStk.top _ pathStk.top-1; ENDLOOP; tree.ReturnHeapAndTable[pathStk]; tree.AdjustTreeState[update: endOfUpdate, deltaEntryCount: 0]; }; }; }; IF pathStkWasNil THEN tree.ReturnDefaultPathStk[pathStk]; tree.Unlock[]; }; SetUpdateInProgress: PUBLIC SAFE PROC [tree: Tree, updateInProgress: BOOL] = TRUSTED { tree.Lock[update]; tree.longUpdate _ updateInProgress; tree.AdjustTreeState[update: IF updateInProgress THEN startOfUpdate ELSE endOfUpdate, deltaEntryCount: 0]; tree.Unlock[]; }; <> AdjustTreeState: PUBLIC PROC [tree: Tree, update: UpdateState, deltaEntryCount: INTEGER] = { IF tree.maintainRecomputableState THEN { <> IF tree.state.entryCount#LAST[CARD] THEN tree.state.entryCount _ tree.state.entryCount+deltaEntryCount; IF update#unchanged AND ~(tree.state.updateInProgress AND tree.longUpdate) THEN { tree.state.updateInProgress _ update=startOfUpdate OR tree.longUpdate; tree.WriteStatePage[update: update]; }; } ELSE IF deltaEntryCount#0 AND tree.state.entryCount#LAST[CARD] THEN { <> tree.state.entryCount _ LAST[CARD]; tree.WriteStatePage[]; }; }; InsertRecords: PUBLIC PROC [tree: Tree, pathStk: PathStk] = { pse: PathStkEntryPtr = @pathStk.path[pathStk.top]; IF pse.eslFront#NIL THEN { pathStk.entryTable.length _ 0; pathStk.entryTable.map[0].cumEntSize _ 0; FOR esr: REF EntSeqRecord _ pse.eslFront, esr.fwdP UNTIL esr=NIL DO AppendEntSeqLengths[tree: tree, pathStk: pathStk, esr: esr]; ENDLOOP; IF pathStk.top=0 THEN { <> wordsToInsert: CARDINAL = EntryIntervalSize[pathStk: pathStk, leftFather: 0]; newRootPage: PageNumber = tree.AllocatePage[]; pagePtr: BTreePagePtr = tree.ReferencePage[newRootPage, write]; IF tree.state.depth >= maxLevelsInTree THEN ERROR Error[depthExceeded]; pagePtr.minPage _ pathStk.path[0].leastSon; tree.ReleasePage[newRootPage]; IF wordsToInsert > tree.maxFreeWords THEN ERROR Bug[newRootOverflow]; WritePage[tree: tree, pse: @pathStk.path[0], number: newRootPage, words: wordsToInsert]; tree.state.rootPage _ newRootPage; tree.state.depth _ tree.state.depth+1; } ELSE { pagePtr: BTreePagePtr = tree.ReferencePage[pse.pageNumber, write]; tailBlkPtr: BTreeEntryPtr = @pagePtr[pse.offset]; tailBlkLen: CARDINAL = (nilOffset+tree.state.pageSize-pagePtr.freeWords)-pse.offset; wordsToInsert: CARDINAL = EntryIntervalSize[pathStk: pathStk]; IF wordsToInsert<=pagePtr.freeWords THEN { <> PrincOpsUtils.LongMove[to: tailBlkPtr+wordsToInsert, from: tailBlkPtr, nwords: nilOffset+(tree.state.pageSize-pagePtr.freeWords)-pse.offset]; DepositESL[tree: tree, pse: pse, block: tailBlkPtr, length: wordsToInsert]; pagePtr.freeWords _ pagePtr.freeWords-wordsToInsert; tree.ReleasePage[pse.pageNumber]; } ELSE { <> rtBroPg1: PageNumber; esr: REF EntSeqRecord _ MakeEntSeqRecord[@pagePtr[entry1Offset], pse.offset-entry1Offset]; PushEntSeqRecord[pse: pse, esr: esr]; PushEntSeqLengths[tree: tree, pathStk: pathStk, esr: esr]; esr _ MakeEntSeqRecord[tailBlkPtr, tailBlkLen]; AppendEntSeqRecord[pse: pse, esr: esr]; AppendEntSeqLengths[tree: tree, pathStk: pathStk, esr: esr]; tree.ReleasePage[pse.pageNumber]; rtBroPg1 _ ComplexInsertRecords[tree: tree, pathStk: pathStk]; IF rtBroPg1#nilPage THEN HairyInsertRecords[tree: tree, pathStk: pathStk, rtBroPg1: rtBroPg1]; }; }; IF pse.eslFront#NIL THEN ERROR Bug[entriesLeftOver]; }; }; MakeEntSeqRecord: PUBLIC PROC [entSeq: BTreeEntryPtr, length: CARDINAL] RETURNS [esr: REF EntSeqRecord] = { IF length=0 THEN RETURN [NIL]; esr _ NEW[EntSeqRecord[length]]; esr.entSeqP _ LOOPHOLE[BASE[DESCRIPTOR[esr.entSeq]]]; esr.entSeqLen _ length; PrincOpsUtils.LongCopy[to: esr.entSeqP, from: entSeq, nwords: length]; }; AppendEntSeqRecord: PUBLIC PROC [pse: PathStkEntryPtr, esr: REF EntSeqRecord] = { IF esr#NIL THEN { esr.fwdP _ NIL; IF pse.eslRear=NIL THEN AssignRefESR[@pse.eslFront, esr] ELSE pse.eslRear.fwdP _ esr; AssignRefESR[@pse.eslRear, esr]; }; }; PushEntSeqRecord: PUBLIC PROC [pse: PathStkEntryPtr, esr: REF EntSeqRecord] = { IF esr#NIL THEN { esr.fwdP _ pse.eslFront; AssignRefESR[@pse.eslFront, esr]; IF pse.eslRear=NIL THEN AssignRefESR[@pse.eslRear, esr]; }; }; RemoveEntry: PUBLIC PROC [tree: Tree, pse: PathStkEntryPtr, ignoreESL: BOOL _ FALSE] RETURNS [esr: REF EntSeqRecord _ NIL, grPage: PageNumber] = { IF ignoreESL OR pse.eslFront=NIL THEN { <> pagePtr: BTreePagePtr = tree.ReferencePage[pse.pageNumber, write]; entSize: CARDINAL = tree.BTreeEntrySize[@pagePtr[pse.offset]]; esr _ MakeEntSeqRecord[entSeq: @pagePtr[pse.offset], length: entSize]; pagePtr.freeWords _ pagePtr.freeWords+entSize; PrincOpsUtils.LongCopy[to: @pagePtr[pse.offset], from: @pagePtr[pse.offset]+entSize, nwords: nilOffset+(tree.state.pageSize-pagePtr.freeWords)-pse.offset]; tree.ReleasePage[pse.pageNumber]; } ELSE { <> entSize: CARDINAL = tree.BTreeEntrySize[pse.eslFront.entSeqP]; esr _ NEW[EntSeqRecord[entSize]]; esr.entSeqP _ LOOPHOLE[BASE[DESCRIPTOR[esr.entSeq]]]; esr.entSeqLen _ entSize; DepositESL[tree: tree, pse: pse, block: esr.entSeqP, length: entSize]; }; grPage _ esr.entSeqP.grPage; esr.entSeqP.grPage _ nilPage; IF grPage # nilPage THEN { pagePtr: BTreePagePtr = tree.ReferencePage[grPage]; esr.entSeqP.grPage _ pagePtr.minPage; tree.ReleasePage[grPage]; }; }; BackUpAndRemoveEntry: PUBLIC PROC [tree: Tree, pse: PathStkEntryPtr] RETURNS [esr: REF EntSeqRecord, grPage: PageNumber] = { tree.BackUpOneEntry[pse]; [esr: esr, grPage: grPage] _ tree.RemoveEntry[pse: pse, ignoreESL: TRUE]; }; AllocatePage: PUBLIC SAFE PROC [tree: Tree] RETURNS [number: PageNumber] = TRUSTED { pagePtr: BTreePagePtr; IF tree.state.firstFreePage=nilPage THEN { number _ (tree.state.greatestPage _ tree.state.greatestPage+1); pagePtr _ tree.ReferencePage[number, new]; } ELSE { number _ tree.state.firstFreePage; pagePtr _ tree.ReferencePage[number, write]; IF pagePtr.freeWords#freePageMarker THEN ERROR Bug[pageNotFree]; tree.state.firstFreePage _ pagePtr.minPage; }; pagePtr.freeWords _ tree.maxFreeWords; tree.ReleasePage[number]; }; FreePage: PUBLIC SAFE PROC [tree: Tree, number: PageNumber] = TRUSTED { pagePtr: BTreePagePtr = tree.ReferencePage[number, write]; IF pagePtr.freeWords=freePageMarker THEN ERROR Bug[pageAlreadyFree]; pagePtr.freeWords _ freePageMarker; pagePtr.minPage _ tree.state.firstFreePage; tree.state.firstFreePage _ number; tree.ReleasePage[number]; }; LongMove: PUBLIC PROC [to, from: LONG POINTER, nWords: CARDINAL] = { PrincOpsUtils.LongMove[to: to, from: from, nwords: nWords]; }; <> AppendEntSeqLengths: PROC [tree: Tree, pathStk: PathStk, esr: REF EntSeqRecord] = { <> IF esr#NIL THEN { entryTable: REF EntryTable _ pathStk.entryTable; index: EntryOrdinal _ entryTable.length; wordsLeft: CARDINAL _ esr.entSeqLen; entry: BTreeEntryPtr _ esr.entSeqP; WHILE wordsLeft>0 DO entrySize: CARDINAL = tree.BTreeEntrySize[entry]; index _ index+1; IF index >= entryTable.maxLength THEN ERROR Bug[tooManyEntriesInPage]; entryTable.map[index].cumEntSize _ entryTable.map[index-1].cumEntSize+entrySize; entry _ entry+entrySize; wordsLeft _ wordsLeft-entrySize; ENDLOOP; entryTable.length _ index; }; }; PushEntSeqLengths: PROC [tree: Tree, pathStk: PathStk, esr: REF EntSeqRecord] = { <> IF esr#NIL THEN { entryTable: REF EntryTable = pathStk.entryTable; oldLen: EntryOrdinal = entryTable.length; tempFirstOldIndex: EntryOrdinal = entryTable.maxLength-oldLen; newLen: EntryOrdinal; delta: CARDINAL; <> PrincOpsUtils.LongMove[to: @entryTable.map[tempFirstOldIndex], from: @entryTable.map[1], nwords: oldLen*SIZE[EntryTableRec]]; <> entryTable.length _ 0; AppendEntSeqLengths[tree: tree, pathStk: pathStk, esr: esr]; newLen _ entryTable.length; IF newLen >= tempFirstOldIndex THEN ERROR Bug[tooManyEntriesInPage]; entryTable.length _ newLen+oldLen; <> delta _ entryTable.map[newLen].cumEntSize; FOR i: EntryOrdinal IN [0..oldLen) DO entryTable.map[newLen+1+i].cumEntSize _ entryTable.map[tempFirstOldIndex+i].cumEntSize+delta; ENDLOOP; }; }; DepositESL: PROC [tree: Tree, pse: PathStkEntryPtr, block: BTreeEntryPtr, length: CARDINAL] = { <> WHILE length#0 AND pse.eslFront#NIL DO esr: REF EntSeqRecord _ pse.eslFront; entSeqP: BTreeEntryPtr = esr.entSeqP; IF esr.entSeqLen <= length THEN { PrincOpsUtils.LongCopy[to: block, from: entSeqP, nwords: esr.entSeqLen]; block _ block+esr.entSeqLen; length _ length-esr.entSeqLen; AssignRefESR[@pse.eslFront, esr.fwdP]; esr.fwdP _ NIL; } ELSE { firstEntSize: CARDINAL = tree.BTreeEntrySize[entSeqP]; IF firstEntSize <= length THEN { PrincOpsUtils.LongCopy[to: block, from: entSeqP, nwords: firstEntSize]; block _ block+firstEntSize; length _ length-firstEntSize; esr.entSeqP _ entSeqP+firstEntSize; esr.entSeqLen _ esr.entSeqLen-firstEntSize; } ELSE ERROR Bug[depositESL]; -- block would end in middle of entry }; ENDLOOP; IF length#0 THEN ERROR Bug[depositESL]; -- ESL exhausted IF pse.eslFront=NIL THEN AssignRefESR[@pse.eslRear, NIL]; }; EntryIntervalSize: PROC [pathStk: PathStk, leftFather, rightFather: EntryOrdinal _ 0] RETURNS [words: CARDINAL] = { <> IF rightFather=0 THEN rightFather _ pathStk.entryTable.length+1; RETURN [pathStk.entryTable.map[rightFather-1].cumEntSize - pathStk.entryTable.map[leftFather].cumEntSize]; }; <> <> ComplexInsertRecords: PROC [tree: Tree, pathStk: PathStk] RETURNS [rtBroPg1: PageNumber] = { <> pse: PathStkEntryPtr = @pathStk.path[pathStk.top]; fatherPSE: PathStkEntryPtr = @pathStk.path[pathStk.top-1]; entryTable: REF EntryTable = pathStk.entryTable; oneBrotherEnough: BOOL _ FALSE; fatherIndex, bestFatherIndex: EntryOrdinal; bestFatherSize: CARDINAL _ tree.maxFreeWords+1; fatherPSE.leastSon _ pse.pageNumber; -- in case this is the root page splitting rtBroPg1 _ nilPage; IF pathStk.top>1 THEN { rtBroPg1 _ FindRightBrother[tree: tree, pathStk: pathStk, spaceNeeded: -tree.maxFreeWords]; IF rtBroPg1=nilPage THEN <> rtBroPg1 _ FindLeftBrother[tree: tree, pathStk: pathStk, spaceNeeded: -tree.maxFreeWords]; }; IF rtBroPg1=nilPage THEN rtBroPg1 _ tree.AllocatePage[]; <> IF entryTable.length<3 THEN ERROR Bug[tooFewEntries]; <> fatherIndex _ FillLeftPage[tree: tree, pathStk: pathStk]; <> DO pl0, pl1, fatherSize: CARDINAL; pl1 _ EntryIntervalSize[pathStk: pathStk, leftFather: fatherIndex]; IF pl1 > tree.maxFreeWords THEN EXIT; pl0 _ EntryIntervalSize[pathStk: pathStk, rightFather: fatherIndex]; IF pl0=0 OR pl0+pl1 > tree.maxFreeWords+tree.awfullyFull THEN EXIT; <> fatherSize _ IndexedEntrySize[pathStk: pathStk, index: fatherIndex]; IF fatherSize> TrickleDown: PROC [emptyIndex: HeapIndex, entry: EntryOrdinal] = { sonSize: CARDINAL = IndexedEntrySize[pathStk: pathStk, index: entry]; son: HeapIndex _ emptyIndex; DO father: HeapIndex _ son/2; fatherEnt: EntryOrdinal; IF father<=0 THEN EXIT; fatherEnt _ heap.entries[father]; IF IndexedEntrySize[pathStk: pathStk, index: fatherEnt] <= sonSize THEN EXIT; heap.entries[son] _ fatherEnt; entryTable.map[fatherEnt].heapPos _ son; son _ father; ENDLOOP; heap.entries[son] _ entry; entryTable.map[entry].heapPos _ son; }; entryTable: REF EntryTable = pathStk.entryTable; heap: REF Heap = pathStk.heap; pse: PathStkEntryPtr = @pathStk.path[pathStk.top]; fatherPSE: PathStkEntryPtr = @pathStk.path[pathStk.top-1]; -- father's pse rtBroPg2: PageNumber; fatherIndex, fatherIndex2, bestFatherIndex, bestFatherIndex2: EntryOrdinal; minFeasIndex, maxFeasIndex: EntryOrdinal; bestFatherSizeSum: CARDINAL _ 2*tree.maxFreeWords + 1; twoBrothersEnough: BOOL _ FALSE; breakSize1, breakSize2, totalSize: CARDINAL; fatherESR: REF EntSeqRecord; <> fatherIndex _ FillLeftPage[tree: tree, pathStk: pathStk]; fatherIndex2 _ FillLeftPage[tree: tree, pathStk: pathStk, leftFather: fatherIndex]; <> rtBroPg2 _ FindRightBrother[tree: tree, pathStk: pathStk, spaceNeeded: EntryIntervalSize[pathStk: pathStk, leftFather: fatherIndex2] + 2*tree.breathingSpace]; IF rtBroPg2=nilPage THEN { <> fe2: EntryOrdinal = FillRightPage[tree: tree, pathStk: pathStk]; fe: EntryOrdinal = FillRightPage[tree: tree, pathStk: pathStk, rightFather: fe2]; rtBroPg2 _ FindLeftBrother[tree: tree, pathStk: pathStk, spaceNeeded: EntryIntervalSize[pathStk: pathStk, leftFather: 0, rightFather: fe] + 2*tree.breathingSpace]; IF rtBroPg2=nilPage THEN rtBroPg2 _ tree.AllocatePage[] -- still no luck, allocate new page ELSE { <> fatherIndex _ FillLeftPage[tree: tree, pathStk: pathStk]; fatherIndex2 _ FillLeftPage[tree: tree, pathStk: pathStk, leftFather: fatherIndex]; }; }; IF entryTable.length < 5 THEN ERROR Bug[tooFewEntries]; <> <> heap.length _ 0; maxFeasIndex _ fatherIndex2; WHILE EntryIntervalSize[pathStk: pathStk, leftFather: maxFeasIndex] <= tree.fairlyFull DO maxFeasIndex _ maxFeasIndex-1; ENDLOOP; minFeasIndex _ maxFeasIndex+1; WHILE EntryIntervalSize[pathStk: pathStk, rightFather: fatherIndex] > (IF twoBrothersEnough THEN tree.prettyFull ELSE 0) DO WHILE EntryIntervalSize[pathStk: pathStk, leftFather: fatherIndex, rightFather: minFeasIndex-1] > 0 AND EntryIntervalSize[pathStk: pathStk, leftFather: minFeasIndex-1] <= tree.maxFreeWords DO minFeasIndex _ minFeasIndex-1; IF minFeasIndex <= maxFeasIndex THEN { <> heap.length _ heap.length+1; TrickleDown[emptyIndex: heap.length, entry: minFeasIndex]; }; ENDLOOP; WHILE EntryIntervalSize[pathStk: pathStk, leftFather: fatherIndex, rightFather: maxFeasIndex] > tree.maxFreeWords DO IF maxFeasIndex >= minFeasIndex THEN { <> heapPos: HeapIndex = entryTable.map[maxFeasIndex].heapPos; heap.length _ heap.length-1; IF heapPos <= heap.length THEN { replacementEntry: EntryOrdinal = heap.entries[heap.length+1]; IF IndexedEntrySize[pathStk: pathStk, index: replacementEntry] <= IndexedEntrySize[pathStk: pathStk, index: maxFeasIndex] THEN TrickleDown[emptyIndex: heapPos, entry: replacementEntry] ELSE { <> emptyIndex: HeapIndex _ heapPos; entrySize: CARDINAL = IndexedEntrySize[pathStk: pathStk, index: replacementEntry]; DO son: HeapIndex _ emptyIndex*2; sonEntry: EntryOrdinal; IF son > heap.length THEN EXIT; sonEntry _ heap.entries[son]; IF son < heap.length AND IndexedEntrySize[pathStk: pathStk, index: heap.entries[son+1]] < IndexedEntrySize[pathStk: pathStk, index: sonEntry] THEN { son _ son+1; sonEntry _ heap.entries[son]; }; IF IndexedEntrySize[pathStk: pathStk, index: sonEntry] >= entrySize THEN EXIT; heap.entries[emptyIndex] _ sonEntry; entryTable.map[sonEntry].heapPos _ emptyIndex; emptyIndex _ son; ENDLOOP; heap.entries[emptyIndex] _ replacementEntry; entryTable.map[replacementEntry].heapPos _ emptyIndex; }; }; }; maxFeasIndex _ maxFeasIndex-1; ENDLOOP; IF heap.length>0 THEN { fatherSizeSum: CARDINAL; fatherIndex2 _ heap.entries[1]; fatherSizeSum _ IndexedEntrySize[pathStk: pathStk, index: fatherIndex] + IndexedEntrySize[pathStk: pathStk, index: fatherIndex2]; IF fatherSizeSum> breakSize1 _ EntryIntervalSize[pathStk: pathStk, rightFather: bestFatherIndex]; breakSize2 _ EntryIntervalSize[pathStk: pathStk, rightFather: bestFatherIndex2]; totalSize _ EntryIntervalSize[pathStk: pathStk]; WritePage[tree: tree, pse: pse, number: pse.pageNumber, words: breakSize1]; fatherESR _ WriteRightBrother[tree: tree, pse: pse, rtBroPg: rtBroPg1, words: breakSize2-breakSize1]; PushEntSeqRecord[pse: fatherPSE, esr: WriteRightBrother[tree: tree, pse: pse, rtBroPg: rtBroPg2, words: totalSize-breakSize2]]; PushEntSeqRecord[pse: fatherPSE, esr: fatherESR]; }; FindRightBrother: PROC [tree: Tree, pathStk: PathStk, spaceNeeded: INTEGER] RETURNS [rtBroPg: PageNumber] = { <> pse: PathStkEntryPtr = @pathStk.path[pathStk.top]; fatherPSE: PathStkEntryPtr = @pathStk.path[pathStk.top-1]; fatherEntSize: CARDINAL; pagePtr: BTreePagePtr; fatherESR, rtBroESR: REF EntSeqRecord; IF fatherPSE.eslFront=NIL THEN { pagePtr _ tree.ReferencePage[fatherPSE.pageNumber]; IF fatherPSE.offset = nilOffset+(tree.state.pageSize-pagePtr.freeWords) THEN { <> tree.ReleasePage[fatherPSE.pageNumber]; RETURN [nilPage]; }; fatherEntSize _ tree.BTreeEntrySize[@pagePtr[fatherPSE.offset]]; rtBroPg _ pagePtr[fatherPSE.offset].grPage; tree.ReleasePage[fatherPSE.pageNumber]; } ELSE { fatherEntSize _ tree.BTreeEntrySize[fatherPSE.eslFront.entSeqP]; rtBroPg _ fatherPSE.eslFront.entSeqP.grPage; }; pagePtr _ tree.ReferencePage[rtBroPg]; IF LOOPHOLE[pagePtr.freeWords-fatherEntSize, INTEGER] < spaceNeeded THEN { <> tree.ReleasePage[rtBroPg]; RETURN [nilPage]; }; rtBroESR _ MakeEntSeqRecord[entSeq: @pagePtr.entries, length: tree.maxFreeWords-pagePtr.freeWords]; tree.ReleasePage[rtBroPg]; [esr: fatherESR] _ tree.RemoveEntry[pse: fatherPSE]; AppendEntSeqLengths[tree: tree, pathStk: pathStk, esr: fatherESR]; AppendEntSeqRecord[pse: pse, esr: fatherESR]; AppendEntSeqLengths[tree: tree, pathStk: pathStk, esr: rtBroESR]; AppendEntSeqRecord[pse: pse, esr: rtBroESR]; }; FindLeftBrother: PROC [tree: Tree, pathStk: PathStk, spaceNeeded: INTEGER] RETURNS [ltBroPg: PageNumber] = { <> pse: PathStkEntryPtr = @pathStk.path[pathStk.top]; fatherPSE: PathStkEntryPtr = @pathStk.path[pathStk.top-1]; fatherPagePtr, ltBroPagePtr, rtBroPagePtr: BTreePagePtr; fatherESR, ltBroESR: REF EntSeqRecord; fatherEntSize: CARDINAL; rtBroOfLtBroPg: PageNumber; IF fatherPSE.offset <= entry1Offset THEN RETURN [nilPage]; fatherPagePtr _ tree.ReferencePage[fatherPSE.pageNumber]; ltBroPg _ fatherPagePtr[fatherPSE.nextToLastOffset].grPage; rtBroOfLtBroPg _ fatherPagePtr[fatherPSE.lastOffset].grPage; fatherEntSize _ tree.BTreeEntrySize[@fatherPagePtr[fatherPSE.lastOffset]]; tree.ReleasePage[fatherPSE.pageNumber]; ltBroPagePtr _ tree.ReferencePage[ltBroPg]; IF LOOPHOLE[ltBroPagePtr.freeWords-fatherEntSize, INTEGER] < spaceNeeded THEN { tree.ReleasePage[ltBroPg]; RETURN [nilPage]; }; ltBroESR _ MakeEntSeqRecord[entSeq: @ltBroPagePtr.entries, length: tree.maxFreeWords-ltBroPagePtr.freeWords]; fatherPagePtr _ tree.ReferencePage[fatherPSE.pageNumber, write]; fatherPagePtr[fatherPSE.nextToLastOffset].grPage _ rtBroOfLtBroPg; tree.ReleasePage[fatherPSE.pageNumber]; [esr: fatherESR] _ tree.BackUpAndRemoveEntry[pse: fatherPSE]; rtBroPagePtr _ tree.ReferencePage[rtBroOfLtBroPg, write]; fatherESR.entSeqP.grPage _ rtBroPagePtr.minPage; rtBroPagePtr.minPage _ ltBroPagePtr.minPage; tree.ReleasePage[rtBroOfLtBroPg]; tree.ReleasePage[ltBroPg]; PushEntSeqLengths[tree: tree, pathStk: pathStk, esr: fatherESR]; PushEntSeqRecord[pse: pse, esr: fatherESR]; PushEntSeqLengths[tree: tree, pathStk: pathStk, esr: ltBroESR]; PushEntSeqRecord[pse: pse, esr: ltBroESR]; }; WriteRightBrother: PROC [tree: Tree, pse: PathStkEntryPtr, rtBroPg: PageNumber, words: CARDINAL] RETURNS [fatherESR: REF EntSeqRecord] = { <> pagePtr: BTreePagePtr; minPage: PageNumber; [esr: fatherESR, grPage: minPage] _ tree.RemoveEntry[pse: pse]; words _ words-fatherESR.entSeqLen; pagePtr _ tree.ReferencePage[rtBroPg, write]; pagePtr.minPage _ minPage; tree.ReleasePage[rtBroPg]; WritePage[tree: tree, pse: pse, number: rtBroPg, words: words]; fatherESR.entSeqP.grPage _ rtBroPg; }; WritePage: PROC [tree: Tree, pse: PathStkEntryPtr, number: PageNumber, words: CARDINAL] = { <> pagePtr: BTreePagePtr = tree.ReferencePage[number, write]; DepositESL[tree: tree, pse: pse, block: @pagePtr.entries, length: words]; pagePtr.freeWords _ tree.maxFreeWords-words; tree.ReleasePage[number]; }; IndexedEntrySize: PROC [pathStk: PathStk, index: EntryOrdinal] RETURNS [words: CARDINAL] = INLINE { RETURN [EntryIntervalSize[pathStk: pathStk, leftFather: index-1, rightFather: index+1]]; }; FillLeftPage: PROC [tree: Tree, pathStk: PathStk, leftFather, rightFather: EntryOrdinal _ 0] RETURNS [midFather: EntryOrdinal] = { <> IF rightFather=0 THEN rightFather _ pathStk.entryTable.length+1; midFather _ leftFather+2; WHILE midFather> IF rightFather=0 THEN rightFather _ pathStk.entryTable.length+1; midFather _ rightFather-2; WHILE midFather>leftFather+2 AND EntryIntervalSize[pathStk: pathStk, leftFather: midFather-1, rightFather: rightFather] <= tree.maxFreeWords DO midFather _ midFather-1; ENDLOOP; }; }.