DIRECTORY Basics USING [Comparison, PartialComparison, UnsafeBlock], BasicTime USING [GMT, Now, nullGMT], CedarProcess USING [DoWithPriority, Priority], Commander USING [CommandProc, Handle, Register], CommanderOps USING [Failed, NextArgument], DFUtilities USING [DirectoryItem, FileItem, Filter, IncludeItem, ParseFromStream, ProcessItemProc, UsingList], FileNames, IO, IOClasses, PFS, PFSNames, PFSPrefixMap, Process USING [CheckForAbort, GetPriority, Pause, Priority, SecondsToTicks], RedBlackTree USING [Compare, Create, Delete, DestroyTable, EachNode, EnumerateIncreasing, GetKey, Insert, Lookup, Table], Rope, SharedErrors, TrickleChargeServer; TrickleChargeServerImpl: CEDAR MONITOR IMPORTS BasicTime, CedarProcess, Commander, CommanderOps, DFUtilities, FileNames, IO, IOClasses, PFS, PFSNames, PFSPrefixMap, Process, RedBlackTree, Rope, SharedErrors EXPORTS TrickleChargeServer = BEGIN GMT: TYPE = BasicTime.GMT; ROPE: TYPE = Rope.ROPE; STREAM: TYPE = IO.STREAM; PATH: TYPE = PFS.PATH; PFSErrorReportProc: TYPE = TrickleChargeServer.PFSErrorReportProc; TCReportProc: TYPE = TrickleChargeServer.TCReportProc; Switches: TYPE = PACKED ARRAY CHAR['a..'z] OF BOOL; TCType: TYPE = { fullDirectory, oneDF, allDFs, individualDFs }; TCInfoList: TYPE = LIST OF TCInfo; TCInfo: TYPE = REF TCInfoRec; TCInfoRec: TYPE = RECORD[ prefix, fromTranslation, toTranslation: ROPE, fromTranslationPath: PATH, otherArg: ROPE, -- for oneDF and allDFs prefixPath: PATH, fromPTable, toPTable: PFSPrefixMap.PrefixTableList, switches: Switches, -- switches in effect at argument capture time useCopy: BOOL ¬ TRUE, -- rather than retrieve tcType: TCType ¬ fullDirectory, fullCmd: ROPE, -- for TCReportProc to use tcReportHasBeenCalled: BOOL ¬ FALSE -- has a TCReportProc been called, if there is one(s) ]; FileEntry: TYPE = REF FileEntryRep; FileEntryRep: TYPE = RECORD [ fullSrcName: ROPE, fullSrcNamePath: PATH, uid: PFS.UniqueID, len: INT, state: FileEntryState ]; FileEntryState: TYPE = {init, fetching, storing, moved}; dirDoc: ROPE = " srcDir dstDir moves files from srcDir to dstDir -a fileName: append output messages to fileName -b: bestEfforts (ignore unknown prefixes) -d: debug (inhibits file transfer) -o fileName: output messages to new fileName -r: retrieve rather than copy -v: verify (no transfers, messages only for missing files) -x: debugging output "; allDfsDoc: ROPE = "prefixMapEntry fromTranslation toTranslation {subdirForDfs} moves files from srcDir to dstDir -a fileName: append output messages to fileName -b: bestEfforts (ignore unknown prefixes) -d: debug (inhibits file transfer) -m: move dfs even if errors -o fileName: output messages to new fileName -r: retrieve rather than copy -v: verify (no transfers, messages only for missing files) -x: debugging output "; oneDfDoc: ROPE = "prefixMapEntry fromTranslation toTranslation subDirAndDfName moves files from srcDir to dstDir -a fileName: append output messages to fileName -b: bestEfforts (ignore unknown prefixes) -d: debug (inhibits file transfer) -l: localDF (assumes df name is local or full ({not prefixMapEntry relative}) -m: move dfs even if errors -o fileName: output messages to new fileName -r: retrieve rather than copy -v: verify (no transfers, messages only for missing files) -x: debugging output "; courierMaxRetries: NAT ¬ 150; courierRetrySeconds: NAT ¬ 5*60; maxRetries: NAT ¬ 10; retrySeconds: NAT ¬ 20; repeatSeconds: NAT ¬ 1800; secondsBetweenMoves: NAT ¬ 0; maxPauseTime: NAT = 1800; registeredPEProcsList: LIST OF PFSErrorReportProc ¬ NIL; TCReportProcRec: TYPE ~ RECORD[proc: TCReportProc, callIfOK: BOOL]; registeredTCProcsList: LIST OF TCReportProcRec ¬ NIL; tempDFPath: PATH ~ PFS.PathFromRope["/tmp/TrickleChargeTemp.df"]; bangH: ROPE ~ "!H"; DoIt: PROC [tcInfo: TCInfo, table: RedBlackTree.Table, out: STREAM] = { bestEfforts: BOOL ¬ tcInfo.switches['b]; doFlush: BOOL ¬ tcInfo.switches['a] OR tcInfo.switches['o]; debug: BOOL ¬ tcInfo.switches['d]; verify: BOOL ¬ tcInfo.switches['v]; localDF: BOOL ¬ tcInfo.switches['l]; outputForDebugging: BOOL ¬ tcInfo.switches['x]; useRetrieve: BOOL ¬ tcInfo.switches['r]; moveDfsEvenIfErrors: BOOL ¬ tcInfo.switches['m]; enumerateForDfFiles: BOOL ¬ tcInfo.tcType = allDFs; dfList: LIST OF FileEntry ¬ NIL; errorsEncountered: BOOL ¬ FALSE; processingImports: BOOL ¬ tcInfo.switches['e]; filter: DFUtilities.Filter ¬ [FALSE, all, all, IF ( processingImports ) THEN all ELSE defining]; EachEntry: RedBlackTree.EachNode = { WITH data SELECT FROM entry: FileEntry => { IF entry.state # moved THEN { ext: ROPE ¬ FindExt[entry.fullSrcName]; IF Rope.Equal[ext, "df", FALSE] THEN dfList ¬ CONS[entry, dfList] ELSE MoveFile[entry]; }; RETURN; }; ENDCASE => ERROR; }; MoveFile: PROC [entry: FileEntry] = { isOnDst: BOOL ¬ FALSE; dstBytes: INT ¬ 0; dstUID: PFS.UniqueID ¬ PFS.nullUniqueID; fullDstPath: PATH ¬ PFSPrefixMap.Translate[entry.fullSrcNamePath, tcInfo.toPTable]; Process.CheckForAbort[]; [uniqueID: dstUID, bytes: dstBytes] ¬ PFS.FileInfo[fullDstPath ! PFS.Error => CONTINUE ]; IF EqualUIDs[dstUID, entry.uid] AND dstBytes = entry.len THEN { isOnDst ¬ TRUE; filesAlreadyThere ¬ CheckCount[filesAlreadyThere, out, doFlush]; }; SELECT TRUE FROM debug => { ShowEntry[out, entry]; IF isOnDst THEN out.PutRope[" (already on destination)\n"]; out.PutF1["~~fullDstPath: %g\n", [rope[PFS.RopeFromPath[fullDstPath]]] ]; }; isOnDst => { entry.state ¬ moved; }; verify => { ShowEntry[out, entry]; out.PutRope[" (NOT on destination)\n"]; }; ENDCASE => { dstName: ROPE = PFS.RopeFromPath[fullDstPath]; retriedCount: INT ¬ 0; Yes: PFS.NameConfirmProc ~ { RETURN[TRUE] }; DO ENABLE PFS.Error => { sleepSec: NAT ¬ 0; out.PutF["PFS.Error when storing %g\n %g\n", [rope[dstName]], [rope[error.explanation]] ]; IF doFlush THEN out.Flush[]; SELECT error.code FROM $connectionRejected => sleepSec ¬ retrySeconds; $CrRPC => sleepSec ¬ courierRetrySeconds; $quotaExceeded, $volumeFull => REJECT; ENDCASE; FOR pL: LIST OF PFSErrorReportProc ¬ GetPEProcsList[], pL.rest UNTIL pL=NIL DO pL.first[entry.fullSrcName, dstName, error, sleepSec]; ENDLOOP; SELECT error.code FROM $connectionRejected => IF retriedCount < maxRetries THEN { retriedCount ¬ retriedCount + 1; Process.Pause[Process.SecondsToTicks[retrySeconds]]; LOOP; }; $CrRPC => IF retriedCount < courierMaxRetries THEN { retriedCount ¬ retriedCount + 1; Process.Pause[Process.SecondsToTicks[courierRetrySeconds]]; LOOP; }; $connectionTimedOut => IF retriedCount < maxRetries THEN { retriedCount ¬ retriedCount + 1; LOOP; }; ENDCASE; GO TO failed; }; IF NOT useRetrieve THEN { PFS.Copy[from: entry.fullSrcNamePath, to: fullDstPath, wantedUniqueID: entry.uid, confirmProc: Yes]; EXIT } ELSE { outputStream: IO.STREAM ~ PFS.StreamOpen[fileName: fullDstPath, accessOptions: create, wantedUniqueID: entry.uid ! PFS.Error => IF error.group = user THEN { out.PutF["Cannot create %s: %s\n", [rope[dstName]], [rope[error.explanation]]]; IF doFlush THEN out.Flush[]; GO TO failed; }]; pipeBufferLength: INT ~ 2048; toPipe, fromPipe: IO.STREAM; Fetcher: PROC ~ { pfsRemoteStreamProc: PFS.RetrieveConfirmProc ~ { RETURN [toPipe]; }; PFS.Retrieve[name: entry.fullSrcNamePath, wantedUniqueID: entry.uid, proc: pfsRemoteStreamProc]; IO.Close[toPipe]; }; Storer: PROC ~ TRUSTED { buffer: PACKED ARRAY [0..pipeBufferLength) OF CHAR; block: Basics.UnsafeBlock ¬ [base: LOOPHOLE[LONG[@buffer]], startIndex: 0, count: pipeBufferLength]; count: INT; DO count ¬ fromPipe.UnsafeGetBlock[block]; IF count < pipeBufferLength THEN { block.count ¬ count; outputStream.UnsafePutBlock[block]; outputStream.Close[]; RETURN; } ELSE outputStream.UnsafePutBlock[block]; ENDLOOP; }; [toPipe, fromPipe] ¬ IOClasses.CreatePipe[pipeBufferLength]; TRUSTED { SharedErrors.Fork[LIST[Storer, Fetcher]]; EXIT }; }; ENDLOOP; entry.state ¬ moved; filesMoved ¬ filesMoved + 1; out.PutF["Moved %g\n to %g\n", [rope[PFS.RopeFromPath[entry.fullSrcNamePath]]], [rope[PFS.RopeFromPath[fullDstPath]]] ]; IF doFlush THEN out.Flush[]; IF secondsBetweenMoves # 0 THEN Process.Pause[Process.SecondsToTicks[secondsBetweenMoves]]; EXITS failed => { errorsEncountered ¬ TRUE }; }; }; VisitEntry: PROC [name: PATH, uid: PFS.UniqueID] = { new: FileEntry ¬ NIL; bytes: INT ¬ 0; isa: BOOL; suffix: PATH ¬ NIL; ropeName: ROPE ¬ PFS.RopeFromPath[name]; xFromName, fileInfoName: PATH; Process.CheckForAbort[]; WITH RedBlackTree.Lookup[table, ropeName] SELECT FROM entry: FileEntry => IF EqualUIDs[entry.uid, uid] THEN RETURN; ENDCASE; [isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.prefixPath, name]; IF NOT isa OR Rope.Match["fs.13.*", PFSNames.ComponentRope[PFSNames.ShortName[name]] ] THEN { IF reportIgnoring THEN out.PutF1["\t**Ignoring the file %g\n", [rope[ropeName]] ]; RETURN; }; xFromName ¬ PFSPrefixMap.Translate[name, tcInfo.fromPTable]; [fullFName: fileInfoName, uniqueID: uid, bytes: bytes] ¬ PFS.FileInfo[name: xFromName, wantedUniqueID: uid ! PFS.Error => IF ( error.code = $unknownFile ) OR ( error.code = $unknownUniqueID ) OR ( error.code = $unknownCreatedTime ) OR ( error.code = $unknownServer AND bestEfforts ) THEN { out.PutF["\n*PFS.Error[%g]\n\tasking for %g(%g)\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[xFromName]]], IF uid.egmt.gmt = BasicTime.nullGMT THEN [rope["nullGMT"]] ELSE [time[uid.egmt.gmt]] ]; IF doFlush THEN out.Flush[]; fileInfoName ¬ NIL; CONTINUE; } ELSE REJECT]; IF fileInfoName = NIL THEN RETURN; WITH RedBlackTree.Lookup[table, ropeName] SELECT FROM entry: FileEntry => { IF EqualUIDs[entry.uid, uid] THEN RETURN; [] ¬ RedBlackTree.Delete[table, ropeName]; }; ENDCASE; new ¬ NEW[FileEntryRep ¬ [ fullSrcName: ropeName, fullSrcNamePath: fileInfoName, uid: uid, len: bytes, state: init]]; RedBlackTree.Insert[table, new, ropeName]; filesSeenDuringEnumeration ¬ CheckCount[filesSeenDuringEnumeration, out, doFlush]; bytesSeenDuringEnumeration ¬ bytesSeenDuringEnumeration + bytes; }; VisitClosure: PROC [tcInfo: TCInfo, dfName: PATH, uid: PFS.UniqueID, visitor: PROC [name: PATH, uid: PFS.UniqueID], usingList: REF DFUtilities.UsingList ¬ NIL] = { ENABLE PFS.Error => IF ( error.code = $unknownServer ) OR ( error.code = $unknownFile ) THEN { out.PutF["\n****PFS.Error[%g], in dfFile: %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; }; EachItem: DFUtilities.ProcessItemProc = { WITH item SELECT FROM dir: REF DFUtilities.DirectoryItem => prefix ¬ PFS.PathFromRope[dir.path1]; file: REF DFUtilities.FileItem => { name: PATH = PFSNames.ExpandName[PFS.PathFromRope[file.name], prefix]; visitor[name, UIDFromGMT[file.date.gmt]]; }; incl: REF DFUtilities.IncludeItem => { thisUID: PFS.UniqueID = UIDFromGMT[incl.date.gmt]; path1: PATH ¬ PFS.PathFromRope[incl.path1]; visitor[path1, thisUID]; VisitClosure[tcInfo, path1, thisUID, visitor]; }; ENDCASE => { i: INT ¬ 0; }; -- handy for setting breakpoints - (bj) }; prefix: PATH ¬ NIL; in: STREAM; fileNamePath: PATH ¬ PFSPrefixMap.Translate[dfName, tcInfo.fromPTable]; IF NOT useRetrieve THEN in ¬ PFS.StreamOpen[ fileName: fileNamePath, wantedUniqueID: uid ! PFS.Error => IF error.code = $unknownFile THEN { out.PutF["PFS.Error[%g] - from dfFile %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; in ¬ NIL; CONTINUE; } ELSE REJECT ] ELSE { ok: BOOL ¬ DoTCRetrieve[from: fileNamePath, to: tempDFPath, wantedUniqueID: uid, out: out, doFlush: doFlush]; IF ok THEN in ¬ PFS.StreamOpen[tempDFPath]; }; IF in#NIL THEN { filter.list ¬ usingList; DFUtilities.ParseFromStream[in, EachItem, filter -- global variable, hack for now! - (bj) ! UNWIND => in.Close[]]; in.Close[] }; }; filesMoved: INT ¬ 0; filesInCache: INT ¬ 0; filesAlreadyThere: INT ¬ 0; filesSeenDuringEnumeration: INT ¬ 0; bytesSeenDuringEnumeration: INT ¬ 0; RedBlackTree.DestroyTable[table]; -- clear the table from the last run IF tcInfo.otherArg # NIL THEN out.PutF["\n\tMoving files (%g) from %g to %g\n", [rope[tcInfo.otherArg]], [rope[tcInfo.fromTranslation]], [rope[tcInfo.toTranslation]] ] ELSE out.PutF["\n\tMoving files from %g to %g\n", [rope[tcInfo.fromTranslation]], [rope[tcInfo.toTranslation]] ]; out.PutF1["\n***** Building file table at %g\n", [time[BasicTime.Now[]]] ]; IF doFlush THEN out.Flush[]; SELECT tcInfo.tcType FROM oneDF => { IF localDF THEN VisitClosure[tcInfo, PFS.PathFromRope[tcInfo.otherArg.Concat[bangH]], PFS.nullUniqueID, VisitEntry] ELSE VisitClosure[tcInfo, PFS.PathFromRope[tcInfo.prefix.Cat[tcInfo.otherArg, bangH]], PFS.nullUniqueID, VisitEntry]; }; allDFs => { EachDfFile: PFS.InfoProc = { dstUID: PFS.UniqueID ¬ PFS.nullUniqueID; doTheEnumerate: BOOL ¬ FALSE; isa: BOOL; suffix: PATH; Process.CheckForAbort[]; IF outputForDebugging THEN out.PutF1["~~%g\n", [rope[PFS.RopeFromPath[fullFName]]] ]; [isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.fromTranslationPath, fullFName]; IF NOT isa THEN { out.PutF["\t*** %g is not a prefix of %g => ignoring\n", [rope[tcInfo.fromTranslation]], [rope[PFS.RopeFromPath[fullFName]]] ]; IF doFlush THEN out.Flush[]; RETURN; }; [uniqueID: dstUID] ¬ PFS.FileInfo[PFSPrefixMap.Translate[fullFName, tcInfo.toPTable] ! PFS.Error => IF error.group=user THEN { doTheEnumerate ¬ TRUE; CONTINUE }]; IF NOT EqualUIDs[dstUID, uniqueID] THEN doTheEnumerate ¬ TRUE; IF doTheEnumerate THEN VisitClosure[tcInfo: tcInfo, dfName: fullFName, uid: PFS.nullUniqueID, visitor: VisitEntry]; }; who: ROPE ~ tcInfo.prefix.Concat["/Top/*.df!H"]; IF outputForDebugging THEN out.PutF1["$$doing %g\n", [rope[who]] ]; PFS.EnumerateForInfo[PFSPrefixMap.Translate[PFS.PathFromRope[who], tcInfo.fromPTable], EachDfFile]; }; fullDirectory => { -- Trickling a whole directory. bangH: PATH ~ PFS.PathFromRope["*!h"]; depth: INT ¬ 1; EachFile: PFS.InfoProc = { isa: BOOL; suffix: PATH; ropeName: ROPE; Process.CheckForAbort[]; IF fileType=PFS.tDirectory THEN { newPath: PATH; short: ROPE ¬ PFSNames.ComponentRope[PFSNames.ShortName[fullFName]]; IF short.Equal["."] OR short.Equal[".."] THEN RETURN; newPath ¬ PFSNames.Cat[fullFName, bangH]; PFS.EnumerateForInfo[newPath, EachFile]; RETURN; }; [isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.prefixPath, fullFName]; IF isa THEN { new: FileEntry ¬ NIL; ropeName ¬ PFS.RopeFromPath[fullFName]; WITH RedBlackTree.Lookup[table, ropeName] SELECT FROM entry: FileEntry => { IF EqualUIDs[entry.uid, uniqueID] THEN RETURN; [] ¬ RedBlackTree.Delete[table, ropeName]; }; ENDCASE; new ¬ NEW[FileEntryRep ¬ [ fullSrcName: ropeName, fullSrcNamePath: PFSPrefixMap.Translate[fullFName, tcInfo.fromPTable], uid: uniqueID, len: bytes, state: init]]; RedBlackTree.Insert[table, new, ropeName]; filesSeenDuringEnumeration ¬ CheckCount[filesSeenDuringEnumeration, out, doFlush]; bytesSeenDuringEnumeration ¬ bytesSeenDuringEnumeration + bytes; }; }; PFS.EnumerateForInfo[PFS.PathFromRope[tcInfo.fromTranslation.Concat["*!h"]], EachFile]; }; ENDCASE => { out.PutRope["***unknown case - exiting\n"]; RETURN; }; out.PutF["\nEnumerated new files: %g, bytes: %g\n", [integer[filesSeenDuringEnumeration]], [integer[bytesSeenDuringEnumeration]] ]; out.PutF1["\n***** Moving files at %g\n", [time[BasicTime.Now[]]] ]; IF doFlush THEN out.Flush[]; RedBlackTree.EnumerateIncreasing[table, EachEntry]; IF errorsEncountered AND NOT moveDfsEvenIfErrors THEN { msg: ROPE ~ "There were errors so no df's will be moved\n"; out.PutRope[msg]; NotifyTCReportProcs[IO.PutFR["%g (cmd: %g)", [rope[msg]], [rope[tcInfo.fullCmd]] ], TRUE]; tcInfo.tcReportHasBeenCalled ¬ TRUE; } ELSE { IF dfList # NIL THEN { IF errorsEncountered AND moveDfsEvenIfErrors THEN { msg: ROPE ~"\n***There were errors; df's will be moved anyway\n"; out.PutRope[msg]; NotifyTCReportProcs[ IO.PutFR["%g (cmd: %g)", [rope[msg]], [rope[tcInfo.fullCmd]] ], TRUE]; tcInfo.tcReportHasBeenCalled ¬ TRUE; }; out.PutRope["\n Starting to move df's\n"]; FOR entryList: LIST OF FileEntry ¬ dfList, entryList.rest WHILE entryList # NIL DO MoveFile[entryList.first]; ENDLOOP; }; }; out.PutF1["\n{Done at %g}\n", [time[BasicTime.Now[]]] ]; out.PutF["\tFiles moved: %g, alreadyRemote: %g\n\n", [integer[filesMoved]], [integer[filesAlreadyThere]] ]; IF doFlush THEN out.Flush[]; }; DoTCRetrieve: PROC[from, to: PATH, wantedUniqueID: PFS.UniqueID, out: STREAM, doFlush: BOOL] RETURNS[ok: BOOL ¬ TRUE] = { ENABLE PFS.Error => { out.PutF1["PFS.Error from DoTCRetrieve: %g\n", [rope[error.explanation]] ]; IF doFlush THEN out.Flush[]; GOTO failed }; outputStream: IO.STREAM ~ PFS.StreamOpen[fileName: to, accessOptions: create ! PFS.Error => IF error.group = user THEN { out.PutF["Cannot create %s: %s\n", [rope[PFS.RopeFromPath[to]]], [rope[error.explanation]]]; IF doFlush THEN out.Flush[]; GO TO failed; }]; pipeBufferLength: INT ~ 2048; toPipe, fromPipe: IO.STREAM; Fetcher: PROC ~ { pfsRemoteStreamProc: PFS.RetrieveConfirmProc ~ { RETURN [toPipe]; }; PFS.Retrieve[name: from, wantedUniqueID: wantedUniqueID, proc: pfsRemoteStreamProc]; IO.Close[toPipe]; }; Storer: PROC ~ TRUSTED { buffer: PACKED ARRAY [0..pipeBufferLength) OF CHAR; block: Basics.UnsafeBlock ¬ [base: LOOPHOLE[LONG[@buffer]], startIndex: 0, count: pipeBufferLength]; count: INT; DO count ¬ fromPipe.UnsafeGetBlock[block]; IF count < pipeBufferLength THEN { block.count ¬ count; outputStream.UnsafePutBlock[block]; outputStream.Close[]; RETURN; } ELSE outputStream.UnsafePutBlock[block]; ENDLOOP; }; [toPipe, fromPipe] ¬ IOClasses.CreatePipe[pipeBufferLength]; TRUSTED { SharedErrors.Fork[LIST[Storer, Fetcher]] }; EXITS failed => ok ¬ FALSE; }; EqualUIDs: PROC[uid1, uid2: PFS.UniqueID] RETURNS[BOOL] = { RETURN[ ( uid1.egmt.gmt = uid2.egmt.gmt ) ]; -- all for now }; UIDFromGMT: PROC [gmt: BasicTime.GMT] RETURNS [PFS.UniqueID] ~ INLINE { RETURN [[egmt: [gmt: gmt, usecs: 0]]] }; CheckCount: PROC[num: INT, out: STREAM, doFlush: BOOL] RETURNS[res: INT] = { IF ( res ¬ num + 1 ) MOD 10 = 0 THEN IF res MOD 100 = 0 THEN { out.PutF1["(%g) ", [integer[res]] ]; IF doFlush THEN out.Flush[]; } ELSE out.PutChar['.]; }; FindExt: PROC[name: ROPE] RETURNS[ext: ROPE] = { short: ROPE ¬ FileNames.GetShortName[name, TRUE]; pos: INT ¬ Rope.FindBackward[short, "."]; IF pos = -1 THEN RETURN[short]; RETURN[Rope.Substr[short, pos+1]]; }; ShowTable: PROC [out: STREAM, table: RedBlackTree.Table] = { EachEntry: RedBlackTree.EachNode = { WITH data SELECT FROM entry: FileEntry => ShowEntry[out, entry]; ENDCASE => ERROR; }; RedBlackTree.EnumerateIncreasing[table, EachEntry]; }; ShowEntry: PROC [out: STREAM, entry: FileEntry] = { IO.PutF[out, "[name: %g, date: %g, len: %g, state: ", [rope[PFS.RopeFromPath[entry.fullSrcNamePath]]], [time[entry.uid.egmt.gmt]], [integer[entry.len]] ]; SELECT entry.state FROM init => out.PutRope["init]\n"]; fetching => out.PutRope["fetching]\n"]; storing => out.PutRope["storing]\n"]; moved => out.PutRope["moved]\n"]; ENDCASE; }; GetKey: RedBlackTree.GetKey = { RETURN [data]; }; Compare: RedBlackTree.Compare = { key: ROPE ¬ NIL; WITH k SELECT FROM ent: FileEntry => key ¬ ent.fullSrcName; rope: ROPE => key ¬ rope; ENDCASE => ERROR; WITH data SELECT FROM ent: FileEntry => RETURN [Rope.Compare[key, ent.fullSrcName, FALSE]]; ENDCASE; ERROR; }; ConvertPCom: PROC[pCom: Basics.PartialComparison] RETURNS[Basics.Comparison] = { SELECT pCom FROM incomparable, less => RETURN[less]; equal => RETURN[equal]; greater => RETURN[greater]; ENDCASE => RETURN[less]; }; AllDfsCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[allDFs, cmd] }; OneDfCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[oneDF, cmd] }; IndDfCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[individualDFs, cmd] }; DirCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[fullDirectory, cmd] }; RetAllDfsCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[allDFs, cmd, TRUE] }; RetOneDfCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[oneDF, cmd, TRUE] }; RetIndDfCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[individualDFs, cmd, TRUE] }; RetDirCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[fullDirectory, cmd, TRUE] }; Common: PROC[tcType: TCType, cmd: Commander.Handle, useRetrieve: BOOL ¬ FALSE] RETURNS[result: REF, msg: ROPE] = { ProcessSwitches: PROC [arg: ROPE] RETURNS[Switches]= { sense: BOOL ¬ TRUE; switches: Switches ¬ ALL[FALSE]; FOR index: INT IN [0..Rope.Length[arg]) DO char: CHAR ¬ Rope.Fetch[arg, index]; SELECT char FROM '- => LOOP; '~ => {sense ¬ NOT sense; LOOP}; 'a, 'A => { outFileName ¬ CommanderOps.NextArgument[cmd]; switches[char] ¬ sense }; 'o, 'O => { outFileName ¬ CommanderOps.NextArgument[cmd]; switches[char] ¬ sense }; IN ['a..'z] => switches[char] ¬ sense; IN ['A..'Z] => switches[char + ('a-'A)] ¬ sense; ENDCASE; sense ¬ TRUE; ENDLOOP; RETURN[switches]; }; out: STREAM ¬ cmd.out; outFileName: ROPE; oldPriority: Process.Priority ¬ Process.GetPriority[]; table: RedBlackTree.Table ¬ RedBlackTree.Create[getKey: GetKey, compare: Compare]; this: TCInfo ¬ NEW[TCInfoRec]; this.tcType ¬ tcType; DO arg: ROPE ¬ CommanderOps.NextArgument[cmd ! CommanderOps.Failed => { msg ¬ errorMsg; GO TO failed } ]; ch: CHAR; Process.CheckForAbort[]; IF arg = NIL THEN EXIT; ch ¬ Rope.Fetch[arg, 0]; SELECT TRUE FROM ( ch = '- ) AND ( arg.Length[] = 2 ) => -- switch this.switches ¬ ProcessSwitches[arg]; ( ch = '{ ) => LOOP; -- ignore ( ch = '} ) => LOOP; -- ignore ( ch = '$ ) => LOOP; -- ignore ENDCASE => { -- translations or other things IF this.prefix = NIL THEN { this.prefix ¬ arg; this.prefixPath ¬ PFS.PathFromRope[this.prefix]; LOOP; }; IF this.fromTranslation = NIL THEN { this.fromTranslation ¬ arg; this.fromTranslationPath ¬ PFS.PathFromRope[arg]; LOOP; }; IF this.toTranslation = NIL THEN { this.toTranslation ¬ arg; LOOP; }; SELECT TRUE FROM ( this.otherArg # NIL) => { msg ¬ IO.PutFR1["Extra argument (%g)", [rope[arg]] ]; GO TO failed; }; ENDCASE => this.otherArg ¬ arg; }; ENDLOOP; IF ( this.prefix = NIL ) OR ( this.fromTranslation = NIL ) OR ( this.toTranslation = NIL ) THEN { msg ¬ "Not enough arguments given.\n"; GOTO failed }; IF useRetrieve THEN this.switches['r] ¬ TRUE; IF outFileName # NIL THEN { outPath: PFS.PATH ~ PFS.PathFromRope[outFileName]; outStream: STREAM; appending: BOOL ~ this.switches['a]; outStream ¬ PFS.StreamOpen[outPath, IF appending THEN $append ELSE $create ! PFS.Error => { out.PutRope[error.explanation]; CONTINUE} ]; IF outStream # NIL THEN { out.PutF["Messages will be %g %g\n", [rope[IF appending THEN "appended to" ELSE "written on"]], [rope[outFileName]] ]; out ¬ outStream; }; }; { Action: PROC = { this.fromPTable ¬ PFSPrefixMap.InsertIntoNewPTable[ PFS.PathFromRope[this.prefix], PFS.PathFromRope[this.fromTranslation] ]; this.toPTable ¬ PFSPrefixMap.InsertIntoNewPTable[ PFS.PathFromRope[this.prefix], PFS.PathFromRope[this.toTranslation] ]; this.toPTable ¬ PFSPrefixMap.Insert[ PFS.PathFromRope[this.fromTranslation], PFS.PathFromRope[this.toTranslation], this.toPTable ]; this.fullCmd ¬ Rope.Cat[cmd.command, " ", cmd.commandLine]; IF this.tcType = individualDFs THEN IndividualDFs[this, table, out] ELSE DoIt[this, table, out ! PFS.Error => { msg: ROPE ~ IO.PutFR["PFS.Error[%g], stopping this round.\n\t\t(at %g)\n\n", [rope[error.explanation]], [time[BasicTime.Now[]]] ]; out.PutRope[msg]; IF outFileName # NIL THEN out.Flush[]; NotifyTCReportProcs[IO.PutFR["%g (cmd: %g)\n", [rope[msg]], [rope[this.fullCmd]] ], TRUE]; this.tcReportHasBeenCalled ¬ TRUE; CONTINUE; }; ]; IF outFileName # NIL THEN out.Close[]; }; CedarProcess.DoWithPriority[background, Action]; IF NOT this.tcReportHasBeenCalled THEN NotifyTCReportProcs[IO.PutFR1["TrickleCharge (%g) finished OK\n", [rope[this.fullCmd]] ], FALSE]; }; EXITS failed => {result ¬ $Failure}; }; IndividualDFs: PROC[tcInfo: TCInfo, table: RedBlackTree.Table, out: STREAM] = { doFlush: BOOL ¬ tcInfo.switches['a] OR tcInfo.switches['o]; dfList: LIST OF ROPE ¬ NIL; filter: DFUtilities.Filter ¬ [FALSE, all, all, defining]; VisitClosure1: PROC [tcInfo: TCInfo, dfName: PATH, uid: PFS.UniqueID, visitor: PROC [name: PATH, uid: PFS.UniqueID], usingList: REF DFUtilities.UsingList ¬ NIL] = { ENABLE PFS.Error => IF ( error.code = $unknownServer ) OR ( error.code = $unknownFile ) THEN { out.PutF["\n****PFS.Error[%g], in dfFile: %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; }; EachItem1: DFUtilities.ProcessItemProc = { WITH item SELECT FROM dir: REF DFUtilities.DirectoryItem => prefix ¬ PFS.PathFromRope[dir.path1]; file: REF DFUtilities.FileItem => { out.PutF1["\n*** fileName found in IndividualDF driver file (%g)", [rope[file.name]]]; out.PutRope["\n\tit will be ignored\n"]; }; incl: REF DFUtilities.IncludeItem => dfList ¬ CONS[incl.path1, dfList]; ENDCASE => { i: INT ¬ 0; }; -- handy for setting breakpoints - (bj) }; prefix: PATH ¬ NIL; in: STREAM; fileNamePath: PATH ¬ PFSPrefixMap.Translate[dfName, tcInfo.fromPTable]; IF NOT tcInfo.switches['r] THEN in ¬ PFS.StreamOpen[ fileName: fileNamePath, wantedUniqueID: uid ! PFS.Error => IF error.code = $unknownFile THEN { out.PutF["PFS.Error[%g] - from dfFile %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; in ¬ NIL; CONTINUE; } ELSE REJECT ] ELSE { ok: BOOL ¬ DoTCRetrieve[from: fileNamePath, to: tempDFPath, wantedUniqueID: uid, out: out, doFlush: doFlush]; IF ok THEN in ¬ PFS.StreamOpen[tempDFPath]; }; IF in#NIL THEN { filter.list ¬ usingList; DFUtilities.ParseFromStream[in, EachItem1, filter -- global variable, hack for now! - (bj) ! UNWIND => in.Close[]]; in.Close[] }; }; IF tcInfo.switches['l] THEN VisitClosure1[tcInfo, PFS.PathFromRope[tcInfo.otherArg.Concat[bangH]], PFS.nullUniqueID, NIL] ELSE VisitClosure1[tcInfo, PFS.PathFromRope[tcInfo.prefix.Cat[tcInfo.otherArg, bangH]], PFS.nullUniqueID, NIL]; tcInfo.tcType ¬ oneDF; -- doing one df tcInfo.switches['l] ¬ TRUE; -- make it look like a local df (this is a full name FOR rL: LIST OF ROPE ¬ dfList, rL.rest UNTIL rL = NIL DO tcInfo.otherArg ¬ rL.first; DoIt[tcInfo, table, out ! PFS.Error => { msg: ROPE ~ IO.PutFR["PFS.Error[%g]; problems with %g.\n\t\t(at %g)\n\n", [rope[error.explanation]], [rope[rL.first]], [time[BasicTime.Now[]]] ]; out.PutRope[msg]; IF doFlush THEN out.Flush[]; NotifyTCReportProcs[msg, TRUE]; tcInfo.tcReportHasBeenCalled ¬ TRUE; CONTINUE; }; ]; ENDLOOP; }; Path: PROC[pt: PFS.PATH] RETURNS[rp: ROPE] ~ { RETURN[PFS.RopeFromPath[pt] ] }; Translate: PROC[name: PATH, pTable: PFSPrefixMap.PrefixTableList] RETURNS [rp: ROPE] = { RETURN[PFS.RopeFromPath[PFSPrefixMap.Translate[name, pTable]]] }; NotifyTCReportProcs: PROC[msg: ROPE, isError: BOOL] ¬ { FOR tL: LIST OF TCReportProcRec ¬ registeredTCProcsList, tL.rest UNTIL tL = NIL DO IF isError OR tL.first.callIfOK THEN tL.first.proc[msg, NOT isError]; ENDLOOP; }; reportIgnoring: BOOL ¬ FALSE; TCReportIgnores: ENTRY Commander.CommandProc ~ { reportIgnoring ¬ TRUE }; TCDontReportIgnores: ENTRY Commander.CommandProc ~ { reportIgnoring ¬ FALSE }; RegisterPFSErrorReportProc: PUBLIC ENTRY PROC[proc: PFSErrorReportProc] = { FOR pL: LIST OF PFSErrorReportProc ¬ registeredPEProcsList, pL.rest UNTIL pL=NIL DO IF pL.first = proc THEN RETURN; ENDLOOP; registeredPEProcsList ¬ CONS[proc, registeredPEProcsList]; }; GetPEProcsList: ENTRY PROC RETURNS[pL: LIST OF PFSErrorReportProc] = { RETURN[registeredPEProcsList] }; RegisterTCReportProc: PUBLIC ENTRY PROC[proc: TCReportProc, callIfOK: BOOL] = { FOR pL: LIST OF TCReportProcRec ¬ registeredTCProcsList, pL.rest UNTIL pL=NIL DO IF pL.first.proc = proc THEN RETURN; ENDLOOP; registeredTCProcsList ¬ CONS[[proc, callIfOK], registeredTCProcsList]; }; GetTCProcsList: ENTRY PROC RETURNS[pL: LIST OF TCReportProcRec] = { RETURN[registeredTCProcsList] }; Commander.Register[ key: "TrickleChargeOneDF", proc: OneDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "TrickleChargeByIndividualDF", proc: IndDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "TrickleChargeAllDFs", proc: AllDfsCmdProc, doc: allDfsDoc, interpreted: TRUE]; Commander.Register[ key: "TrickleChargeDirectory", proc: DirCmdProc, doc: dirDoc, interpreted: TRUE]; Commander.Register[ key: "RetTrickleChargeOneDF", proc: RetOneDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "RetTrickleChargeByIndividualDF", proc: RetIndDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "RetTrickleChargeAllDFs", proc: RetAllDfsCmdProc, doc: allDfsDoc, interpreted: TRUE]; Commander.Register[ key: "RetTrickleChargeDirectory", proc: RetDirCmdProc, doc: dirDoc, interpreted: TRUE]; Commander.Register[ key: "TCOne", proc: OneDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "TCInd", proc: IndDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "TCAll", proc: AllDfsCmdProc, doc: allDfsDoc, interpreted: TRUE]; Commander.Register[ key: "TCDir", proc: DirCmdProc, doc: dirDoc, interpreted: TRUE]; Commander.Register[ key: "RTCOne", proc: RetOneDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "RTCInd", proc: RetIndDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "RTCAll", proc: RetAllDfsCmdProc, doc: allDfsDoc, interpreted: TRUE]; Commander.Register[ key: "RTCDir", proc: RetDirCmdProc, doc: dirDoc, interpreted: TRUE]; Commander.Register[ key: "TCReportIgnores", proc: TCReportIgnores]; Commander.Register[ key: "TCDontReportIgnores"]; END. œTrickleChargeServerImpl.mesa Copyright Σ 1985, 1988, 1990, 1991, 1992 by Xerox Corporation. All rights reserved. Russ Atkinson (RRA) June 20, 1985 12:54:23 pm PDT Dave Rumph, September 13, 1988 6:28:12 pm PDT Bob Hagmann July 18, 1985 4:38:42 pm PDT Rick Beach, January 23, 1986 1:37:04 pm PST Eric Nickell February 25, 1986 12:10:10 pm PST Carl Hauser, October 20, 1987 2:20:57 pm PDT Bill Jackson (bj) April 14, 1988 8:41:57 pm PDT Willie-Sue, October 19, 1988 4:20:11 pm PDT Last edited by: Mik Lamming - January 26, 1989 4:09:53 pm GMT Mik Lamming, January 27, 1989 10:17:50 am GMT Willie-Sue Orr, March 28, 1990 4:29:38 pm PST -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 Willie-s, July 21, 1992 1:09 pm PDT Implementation points: 1. Version numbers should be retained in order to keep the validity of the hints in DF files. 2. Don't move files that are already there. Types includes version number really just create date of the file for now byte count of the file (useful redundancy) indicates the state of the file (obvious) Documentation Option variables # of times to retry CrRPC error wait a while before retrying CrRPC error # of times to retry connectionRejected from STP # of seconds between retry attemps # of seconds between repeats (when using the R switch) # of seconds to wait after a file transfer (to keep IFS load down) Command Procedures A Programmer's quick hack if you want to experiment, you can try other values here! - (bj) [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE] called for each item in the table to do the moving. df files are collected and moved last as a "commit" of the successful completion of the tricklecharge pass. The remote file is already present, so we don't need to move it We are verifying stuff and the entry is NOT on the destination Sigh, we actually have to ship the bits It may be worth retrying later It may be worth retrying later Retry, this time establishing the connection first This procedure is used to visit each file in a simple DF closure, where the imports are NOT followed, but the inclusions ARE followed. This is to deal with generated files that have been "abandoned" while being written - the name starts with fs.13. imports: REF DFUtilities.ImportsItem => { -- this stuff is for me - (bj) IF ( processingImports ) THEN { file: ROPE _ TranslateHost[tcInfo, imports.path1]; IF ( NOT imports.exported ) THEN { ERROR }; IF imports.exported THEN -- fix to make export chasing work SELECT imports.form FROM exports => { the Exports Imports Using ALL case visitor[file, thisUID]; VisitClosure[tcInfo, file, thisUID, visitor]; }; list => { the Exports Imports Using [list] case IF ( imports.list = NIL ) THEN ERROR; -- interface claims this can't happen visitor[file, imports.date.gmt]; VisitClosure[tcInfo, file, thisUID, visitor, imports.list]; }; all => { ERROR }; ENDCASE => { ERROR }; }; }; for now, simply copy the df file to /tmp. Is there something better to do?? The mainline of DoIt Phase1, build up data base. Don't move any files. Trickling a df file together with the files it controls out.PutF["oneDF\n"]; Trickling df files in a directory together with the files controlled by those df files. [fullFName: attachedTo: PATH, uniqueID: UniqueID, bytes: INT, mutability: Mutability, fileType: FileType] RETURNS [continue: BOOL] make sure we get who from the correct place [fullFName: attachedTo: PATH, uniqueID: UniqueID, bytes: INT, mutability: Mutability, fileType: FileType] RETURNS [continue: BOOL] IF ( depth _ depth + 1 ) > 10 THEN { out.PutF["depth: %g, path: %g\n", [integer[depth]], [rope[PFS.RopeFromPath[newPath]]] ]; RETURN; }; IF Rope.Match["!*", PFS.RopeFromPath[suffix]] THEN RETURN; This is likely to be the controlling file entry for an IFS (or it could just be a bogus file to be ignored) out.PutF["fulldirectory\n"]; Phase2, move files. Don't change the entries (except for the 'moved' field). Phase2 1/2: move df files last. out is for messages out.PutF["**in DoTCRetrieve, from: %g, to: %g\n", [rope[PFS.RopeFromPath[from]]], [rope[PFS.RopeFromPath[to]]] ]; RETURN[ ( uid1.egmt.time = uid2.egmt.time ) AND ( uid1.egmt.usecs = uid2.egmt.usecs ) AND ( uid1.host.a = uid2.host.a ) AND ( uid1.host.a = uid2.host.a ) ]; [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE] [data: RedBlackTree.UserData] RETURNS [RedBlackTree.Key] [k: RedBlackTree.Key, data: RedBlackTree.UserData] RETURNS [Basics.Comparison] ent: FileEntry => { pCom: PBasics.PartialComparison _ PFSNames.Compare[key, ent.fullSrcNamePath]; RETURN [ConvertPCom[pCom]]; }; ParseTimeReference: PROC [ref: ROPE] RETURNS [valid, inRange: BOOL _ TRUE] ~ { IF ref=NIL THEN RETURN [TRUE, TRUE]; { ENABLE ANY => GOTO Fail; pos1, pos2, pos3: INT; pos1 _ Rope.Find[s1: ref, s2: "("]; IF pos1=-1 THEN GOTO Fail; pos2 _ Rope.Find[s1: ref, s2: "..", pos1: pos1+1]; IF pos2=-1 THEN GOTO Fail; pos3 _ Rope.Find[s1: ref, s2: ")", pos1: pos2+2]; inRange _ Tempus.PackedToSeconds[Tempus.Parse[rope: Rope.Substr[base: ref, start: pos1+1, len: pos2-pos1-1], search: FALSE].time] > Tempus.PackedToSeconds[Tempus.Parse[rope: Rope.Substr[base: ref, start: pos2+2, len: pos3-pos2-2], search: FALSE].time]; EXITS Fail => RETURN [FALSE, FALSE] }; }; [cmd: Handle] RETURNS [result: REF _ NIL, msg: ROPE _ NIL] CommandObject = [in, out, err: STREAM, commandLine, command: ROPE, ...] When parsing the command line, be prepared for failure. The error is reported to the user for now, simply copy the df file to /tmp. Is there something better to do?? Initialization Rick Beach, January 23, 1986 1:34:16 pm PST changes to: action (local of TrickleCommandProc) changed timeLeft: NAT to INT because timeLeft could go negative! Carl Hauser, February 3, 1987 10:44:11 am PST Register the command in the local directory, not ///Commands/ Last edited by: Mik Lamming - January 25, 1989 7:36:42 pm GMT Removed ERROR clause that makes export chasing fail. changes to: imports (local of EachItem, local of VisitClosure, local of DoIt) Κ+Υ•NewlineDelimiter –(cedarcode) style˜codešœ™Kšœ ΟeœI™TK™1K™-K™(K™+K™.K™,K™/K™+K™=K™.K™ˆK™#K™™K™`K™,—K˜—šΟk ˜ Kšœžœ.˜:Kšœ žœžœ˜$Kšœ žœ˜.Kšœ žœ!˜0Kšœ žœ˜*Kšœ žœ]˜nK˜ Kšžœ˜K˜ Kšžœ˜Kšœ ˜ K˜ Kšœžœ?˜LKšœ žœg˜yKšœ˜Kšœ ˜ Kšœ˜—K˜šΠlnœžœž˜&KšžœKžœ žœC˜§Kšžœ˜K˜Kšœž˜—head™Kšžœžœ žœ˜Kšžœžœžœ˜Kšžœžœžœžœ˜Kšžœžœžœžœ˜Kšœžœ*˜BKšœžœ$˜6K˜Kš œ žœžœžœžœ žœžœ˜3Kšœžœ3˜?K˜Kšœ žœžœžœ˜"Kšœžœžœ ˜šœ žœžœ˜Kšœ(žœ˜-Kšœžœ˜Kšœ žœΟc˜'Kšœ žœ˜K˜3Kšœ .˜CKšœ žœžœ ˜-K˜Kšœœžœ ˜)Kšœžœžœ 5˜YKšœ˜K˜—Kšœ žœžœ˜#šœžœžœ˜šœ žœ˜Kšœ™—Kšœžœ˜šœžœ ˜K™+—šœžœ˜ K™*—šœ˜K™)—K˜—Kšœžœ$˜8—™ Kšœžœί˜λKšœ žœͺ˜ΉKšœ žœϋ˜‰—šœ™šœžœ˜Kšœ™—šœžœ˜ Kšœ(™(—šœ žœ˜Kšœ/™/—šœžœ˜Kšœ"™"—šœžœ˜K™6—šœžœ˜K™B—K˜Kšœžœ˜K˜Kšœžœžœžœ˜8Kšœžœžœžœ˜CKšœžœžœžœ˜5Kšœ žœžœ+˜AKšœžœ˜—™šΟnœžœ2žœ˜GKšœ žœ˜(Kšœ žœžœ˜;Kšœžœ˜"Kšœžœ˜#Kšœ žœ˜$Kšœžœ˜/Kšœ žœ˜(Kšœžœ˜0Kšœžœ˜3Kšœžœžœ žœ˜ Kšœžœžœ˜ ™Kšœžœ˜.š œžœ žœžœžœ ˜`K™AK™——•StartOfExpansion> -- [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE]šΟb œ˜$KšΠck:™:K™4šžœžœž˜˜šžœžœ˜K™kKšœžœ˜'šžœžœžœ žœ˜AKšžœ˜—K˜—Kšžœ˜K˜—Kšžœžœ˜—K˜K˜—š‘œžœ˜%Kšœ žœžœ˜Kšœ žœ˜Kšœžœ žœ˜(Kšœ žœB˜SKšœ˜šœ&žœ˜>Kšœžœ ž˜Kšœ˜—šžœžœžœ˜?Kšœ žœ˜K˜@K˜—šžœžœž˜šœ ˜ Kšœ˜Kšžœ žœ/˜>Kšœ'žœ˜IK˜—šœ ˜ Kšœ?™?K˜K˜—šœ ˜ Kšœ>™>Kšœ˜Kšœ*˜*K˜—šžœ˜ Kšœ'™'Kšœ žœžœ˜.Kšœžœ˜K–* -- [FSBackdoor.Version] RETURNS [BOOLEAN]š‘œžœžœžœ˜,šž˜šžœžœ ˜Kšœ žœ˜šœ-˜-Kšœ-˜-—Kšžœ žœ ˜šžœ ž˜K˜/K˜)Kšœžœ˜&Kšžœ˜—š žœžœžœ0žœžœž˜NKšœ6˜6Kšžœ˜—šžœ ž˜˜Kšœ™šžœžœ˜#K˜ K˜4Kšžœ˜K˜——˜ Kšœ™šžœ"žœ˜*K˜ K˜;Kšžœ˜K˜——šœ˜K™2šžœžœ˜#K˜ Kšžœ˜K˜——Kšžœ˜—Kšžœžœ˜ K˜—K˜—šžœžœ žœ˜Kšžœa˜dKšž˜K˜šžœ˜–Κ[fileName: ROPE, accessOptions: FS.AccessOptions _ read, streamOptions: FS.StreamOptions _ (3)[TRUE, TRUE, TRUE], keep: CARDINAL _ 1B (1), createByteCount: FS.ByteCount _ 2560, streamBufferParms: FS.StreamBufferParms _ [vmPagesPerBuffer: 8, nBuffers: 2], extendFileProc: FS.ExtendFileProc, wantedCreatedTime: GMT _ nullGMT, remoteCheck: BOOL _ TRUE, wDir: ROPE _ NIL, checkFileType: BOOL _ FALSE, fileType: FS.FileType _ [0B (0)]]šœžœžœžœS˜pšœžœ žœžœ˜+KšœO˜OKšžœ žœ ˜Kšžœžœ˜ K˜——K˜Kšœžœ˜Kšœžœžœ˜š‘œžœ˜š’œžœ˜0Kšžœ ˜K˜—K˜Kšžœ]˜`Kšžœ˜K˜—š‘œžœžœ˜Kš œžœžœžœžœ˜3Kšœ#žœžœ4˜dKšœžœ˜ šž˜K˜'šžœžœ˜"K˜Kšœ#˜#Kšœ˜Kšžœ˜Kšœ˜—šž˜Kšœ#˜#—Kšžœ˜K˜——K˜<šžœ˜ Kšœžœ˜)Kšž˜Kšœ˜—Kšœ˜—Kšžœ˜—K˜K˜Kšœ'žœ.žœ˜zKšžœ žœ ˜Kšžœžœ<˜[Kšžœ!žœ˜-K˜——K˜K˜—š‘ œžœžœžœ˜4KšœXΟsœ€œ ™†Kšœžœ˜Kšœžœ˜Kšœžœ˜ Kšœžœžœ˜Kšœ žœžœ˜(Kšœžœ˜Kšœ˜šžœ&žœž˜5Kšœžœžœžœ˜=Kšžœ˜—K˜<šžœžœžœIžœ˜]K™qKšžœžœ<˜RKšžœ˜K˜—K˜Kšžœžœ]˜sK˜—Kšœžœ'˜0Kšžœžœ)˜CK™+Kšžœ)žœ4˜cKšœ˜—šœ ˜3Kšœžœžœ˜&Kšœžœ˜–j -- [fullFName: ROPE, attachedTo: ROPE, created: GMT, bytes: INT, keep: CARDINAL] RETURNS [continue: BOOL]š’œžœ ˜Kš£œ£œ£œ£™‚Kšœžœ˜ Kšœžœ˜ Kšœ žœ˜Kšœ˜šžœ žœ žœ˜!Kšœ žœ˜Kšœžœ9˜DKšžœžœžœžœ˜6K˜)šžœžœ™$Kšœ:žœ™XKšžœ™K™—Kšžœ%˜(Kšžœ˜Kšœ˜—K˜Ašžœžœ˜ Kšœžœ˜Kšœ žœ˜'šžœ&žœž˜5˜Kšžœ žœžœ˜.K˜*K˜—Kšžœ˜—šžœžœžœžœ™:šœ:™:K™0——Kšœžœ™˜’Kšœ*˜*K˜RK˜@K˜—K˜—K™Kšžœžœ?˜WK˜—šžœ˜ K˜+Kšžœ˜Kšœ˜K˜——Kšœƒ˜ƒK˜KšœM™MKšœD˜DKšžœ žœ ˜Kšœ3˜3K™K™K˜šžœžœžœžœ˜7Kšœžœ2˜;K˜Kšœžœ>žœ˜ZKšœžœ˜$K˜šžœ˜šžœ žœžœ˜šžœžœžœ˜3Kšœžœ8˜AK˜Kšœžœ>žœ˜[Kšœžœ˜$K˜—Kšœ*˜*š žœ žœžœ$žœ žœž˜RK˜Kšžœ˜—K˜—K˜——Kšœ8˜8Kšœk˜kKšžœ žœ ˜K˜K˜—š‘ œžœ žœžœžœ žœžœžœžœ˜yK™–Κ[fileName: ROPE, accessOptions: FS.AccessOptions _ read, streamOptions: FS.StreamOptions _ (3)[TRUE, TRUE, TRUE], keep: CARDINAL _ 1B (1), createByteCount: FS.ByteCount _ 2560, streamBufferParms: FS.StreamBufferParms _ [vmPagesPerBuffer: 8, nBuffers: 2], extendFileProc: FS.ExtendFileProc, wantedCreatedTime: GMT _ nullGMT, remoteCheck: BOOL _ TRUE, wDir: ROPE _ NIL, checkFileType: BOOL _ FALSE, fileType: FS.FileType _ [0B (0)]]šžœžœ ˜K˜KKšžœ žœ ˜Kšžœ˜ K˜—šœžœžœžœ/˜Lšœžœ žœžœ˜+Kšœ)žœ0˜\Kšžœ žœ ˜Kšžœžœ˜ K˜——Kšœžœ˜Kšœžœžœ˜š‘œžœ˜š’œžœ˜0Kšžœ ˜K˜—K˜KšžœQ˜TKšžœ˜K˜—š‘œžœžœ˜Kš œžœžœžœžœ˜3Kšœ#žœžœ4˜dKšœžœ˜ šž˜K˜'šžœžœ˜"K˜Kšœ#˜#Kšœ˜Kšžœ˜Kšœ˜—šž˜Kšœ#˜#—Kšžœ˜K˜——Kšœ8žœžœ™qK˜K˜$Kšžœ žœ ˜K˜Kšžœ˜—K˜K˜—š ‘œžœžœžœžœ˜0Kšœžœ žœ˜1Kšœžœ!˜)Kšžœ žœžœ˜Kšžœ˜"K˜K˜—š‘ œžœžœ ˜<–> -- [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE]šœ$˜$Kš£:™:šžœžœž˜Kšœ*˜*Kšžœžœ˜—K˜—Kšœ3˜3K˜K˜—š‘ œžœžœ˜3Kšžœ:žœ[˜ššžœ ž˜Kšœ˜Kšœ'˜'Kšœ%˜%Kšœ!˜!Kšžœ˜—K˜K˜—–< -- [data: RedBlackTree.UserData] RETURNS [RedBlackTree.Key]š‘œ˜Kš£8™8Kšžœ˜K˜K˜—–R -- [k: RedBlackTree.Key, data: RedBlackTree.UserData] RETURNS [Basics.Comparison]š‘œ˜!Kš£N™NKšœžœžœ˜šžœžœž˜K˜(Kšœžœ˜Kšžœžœ˜—šžœžœž˜Kšœžœ%žœ˜Ešœ™KšœM™MKšžœ™K™—Kšžœ˜—Kšžœ˜K˜K˜—š‘ œžœ!žœ˜Pšžœž˜Kšœžœ˜#Kšœ žœ˜Kšœ žœ ˜Kšžœžœ˜—K˜—code2š ‘œžœžœžœžœžœ™NKš žœžœžœžœžœžœ™$šœ™Kšžœžœžœ™Kšœžœ™K–>[s1: ROPE, s2: ROPE, pos1: INT _ 0, case: BOOL _ TRUE]™#Kšžœ žœžœ™K–>[s1: ROPE, s2: ROPE, pos1: INT _ 0, case: BOOL _ TRUE]™2Kšžœ žœžœ™K–>[s1: ROPE, s2: ROPE, pos1: INT _ 0, case: BOOL _ TRUE]™1–9[base: ROPE, start: INT _ 0, len: INT _ 2147483647]šœ ™ Kšœkžœ™wKšœmžœ™z—Kšžœ žœžœžœ™#Kšœ™—K™K™—š‘ œB˜OK˜—š‘ œA˜MK˜—š‘ œI˜UK˜—š‘ œI˜SK˜—š‘œ@žœ˜XK˜—š‘œ?žœ˜VK˜—š‘œGžœ˜^K˜—š‘ œGžœ˜\K˜—š‘œžœ5žœžœžœ žœžœ˜rš œžœ žœžœžœžœ™:Kšœžœžœ™G—š‘œžœžœžœ ˜6Kšœžœžœ˜Kšœžœžœ˜ šžœžœžœž˜*Kšœžœ˜$šžœž˜Kšœžœ˜ Kšœžœžœ˜ K˜SK˜SKšžœ$˜&Kšžœ.˜0Kšžœ˜—Kšœžœ˜ Kšžœ˜—Kšžœ ˜K˜—Kšœžœ ˜Kšœ žœ˜K˜6–@[getKey: RedBlackTree.GetKey, compare: RedBlackTree.Compare]˜RK˜—Kšœžœ ˜K˜K˜šž˜šœžœLžœžœ ˜fKšœZ™Z—Kšœžœ˜ K˜Kšžœžœžœžœ˜K˜šžœžœž˜šœ žœ  ˜1K˜%—Kšœžœ  ˜Kšœžœ  ˜Kšœžœ  ˜šžœ ˜,šžœžœžœ˜K˜Kšœžœ˜0Kšžœ˜K˜—šžœžœžœ˜$K˜Kšœžœ˜1Kšžœ˜K˜—šžœžœžœ˜"K˜Kšžœ˜K˜—šžœžœž˜šœžœ˜Kšœžœ-˜5Kšžœžœ˜ K˜—Kšžœ˜—K˜—Kšžœ˜K˜——Kšžœž œžœž œžœžœžœ*žœ ˜—Kšžœ žœžœ˜-šžœžœžœ˜Kšœ žœžœžœ˜2Kšœ žœ˜Kšœ žœ˜$Kš œ žœžœ žœ žœ žœ,žœ˜ˆšžœ žœžœ˜Kšœ+žœ žœžœ'˜vK˜K˜—K˜K˜—˜š’œžœ˜Kšœ4žœžœ&˜|Kšœ2žœžœ$˜xKšœ%žœ%žœ3˜ƒK˜;šžœžœ ˜Cšžœ˜šœžœ ˜Kšœžœžœt˜‚K˜Kšžœžœžœ ˜&Kšœžœ>žœ˜ZKšœžœ˜"Kšžœ˜ K˜—Kšœ˜——Kšžœžœžœ ˜&K˜—K˜0Kš žœžœžœžœDžœ˜ˆK˜—K˜šž˜K˜—K˜K˜—š‘ œžœ1žœ˜OKšœ žœžœ˜;Kš œžœžœžœžœ˜Kšœžœ˜9K˜š‘ œžœžœžœžœžœžœžœžœ˜€š žœžœ žœ!žœžœ˜^KšœRžœ˜oKšžœ žœ ˜K˜K˜—š₯ œ!˜*šžœžœž˜Kšœžœ'žœ˜Kšœžœ˜#KšœV˜VK˜(K˜—šœžœ˜$Kšœ žœ˜"—Kšžœ žœ  '˜C—K˜K˜—Kšœžœžœ˜Kšœžœ˜ Kšœžœ5˜Gšžœžœž˜šœžœ ˜Kšœ˜šœ˜šœžœ žœžœ˜2KšœNžœ˜kKšžœ žœ ˜Kšœž˜ Kšžœ˜ K˜——Kšžœž˜ Kšœ˜—šžœ˜KšœL™LKšœžœe˜mKšžœžœžœ˜+K˜K˜——šžœžœžœ˜K˜šœ2 (˜ZKšœžœ˜—Kšœ ˜ Kšœ˜—K˜K˜—š žœžœžœ.žœžœ˜yKšžœžœ:žœžœ˜o—K˜Kšœ ˜&Kšœžœ 4˜Pš žœžœžœžœžœžœž˜8K˜šœ˜šœžœ ˜Kšœžœžœƒ˜‘K˜Kšžœ žœ ˜Kšœžœ˜Kšœžœ˜$Kšžœ˜ K˜—Kšœ˜—Kšžœ˜—K˜K˜—š‘œžœžœžœžœžœžœžœ˜OK˜—š ‘ œžœžœ(žœžœ˜VKšœžœžœ7˜CK˜—š‘œžœžœ žœ˜7š žœžœžœ2žœžœž˜RKšžœ žœžœžœ ˜EKšžœ˜—K˜K˜—šœžœžœ˜K˜—š‘œžœ,žœ˜IK˜—š‘œžœ,žœ˜NK˜—š‘œžœžœžœ˜Kš œžœžœ5žœžœž˜SKšžœžœžœ˜Kšžœ˜—Kšœžœ˜:˜K˜——š ‘œžœžœžœžœžœ˜Dšœžœ˜"K˜——š ‘œžœžœžœžœ˜Oš œžœžœ2žœžœž˜PKšžœžœžœ˜$Kšžœ˜—Kšœžœ*˜F˜K˜——š ‘œžœžœžœžœžœ˜AKšœžœ˜"——–x[key: ROPE, proc: Commander.CommandProc, doc: ROPE _ NIL, clientData: REF ANY _ NIL, interpreted: BOOL _ TRUE]™šœ_žœ˜eK˜—šœhžœ˜nK˜—šœbžœ˜hK˜—Kšœ_žœ˜eK˜šœežœ˜kK˜—šœnžœ˜tK˜—šœhžœ˜nK˜—Kšœežœ˜kK˜šœRžœ˜XK˜—šœRžœ˜XK˜—šœTžœ˜ZK˜—šœNžœ˜TK˜—šœVžœ˜\K˜—šœVžœ˜\K˜—šœXžœ˜^K˜—šœRžœ˜XK˜—šœC˜CK˜—šœ0˜0K˜——K˜Kšžœ˜K˜Kšœ˜™+Kš œ Οrœ œ  Πekœ§œ$™q—™-K™=—™=K™4Kšœ ¦œ;™N—K™—…—rΒΆ3