DIRECTORY Basics USING [Comparison, PartialComparison, UnsafeBlock], BasicTime USING [GMT, Now, nullGMT], CedarProcess USING [DoWithPriority, Priority], Commander USING [CommandProc, Handle, Register], CommanderOps USING [Failed, NextArgument], DFUtilities USING [DirectoryItem, FileItem, Filter, IncludeItem, ParseFromStream, ProcessItemProc, UsingList], FileNames, IO, PFS, PFSNames, PFSPrefixMap, Process USING [CheckForAbort, GetPriority, Pause, Priority, SecondsToTicks], RedBlackTree USING [Compare, Create, Delete, DestroyTable, EachNode, EnumerateIncreasing, GetKey, Insert, Lookup, Table], Rope; TCAttachmentsImpl: CEDAR MONITOR IMPORTS BasicTime, CedarProcess, Commander, CommanderOps, DFUtilities, FileNames, IO, PFS, PFSNames, PFSPrefixMap, Process, RedBlackTree, Rope = BEGIN GMT: TYPE = BasicTime.GMT; ROPE: TYPE = Rope.ROPE; STREAM: TYPE = IO.STREAM; PATH: TYPE = PFS.PATH; Switches: TYPE = PACKED ARRAY CHAR['a..'z] OF BOOL; TCType: TYPE = { fullDirectory, oneDF, allDFs, individualDFs }; TCInfoList: TYPE = LIST OF TCInfo; TCInfo: TYPE = REF TCInfoRec; TCInfoRec: TYPE = RECORD[ prefix, fromTranslation, toTranslation: ROPE, fromTranslationPath: PATH, otherArg: ROPE, -- for oneDF and allDFs prefixPath: PATH, fromPTable, toPTable: PFSPrefixMap.PrefixTableList, switches: Switches, -- switches in effect at argument capture time useCopy: BOOL ¬ TRUE, -- rather than retrieve tcType: TCType ¬ fullDirectory ]; FileEntry: TYPE = REF FileEntryRep; FileEntryRep: TYPE = RECORD [ fullSrcName: ROPE, fullSrcNamePath: PATH, uid: PFS.UniqueID, len: INT, state: FileEntryState ]; FileEntryState: TYPE = {init, fetching, storing, moved}; dirDoc: ROPE = " srcDir dstDir moves files from srcDir to dstDir -a fileName: append output messages to fileName -b: bestEfforts (ignore unknown prefixes) -d: debug (inhibits file transfer) -o fileName: output messages to new fileName -r: retrieve rather than copy -v: verify (no transfers, messages only for missing files) -x: debugging output "; allDfsDoc: ROPE = "prefixMapEntry fromTranslation toTranslation {subdirForDfs} moves files from srcDir to dstDir -a fileName: append output messages to fileName -b: bestEfforts (ignore unknown prefixes) -d: debug (inhibits file transfer) -m: move dfs even if errors -o fileName: output messages to new fileName -r: retrieve rather than copy -v: verify (no transfers, messages only for missing files) -x: debugging output "; oneDfDoc: ROPE = "prefixMapEntry fromTranslation toTranslation subDirAndDfName moves files from srcDir to dstDir -a fileName: append output messages to fileName -b: bestEfforts (ignore unknown prefixes) -d: debug (inhibits file transfer) -l: localDF (assumes df name is local or full ({not prefixMapEntry relative}) -m: move dfs even if errors -o fileName: output messages to new fileName -r: retrieve rather than copy -v: verify (no transfers, messages only for missing files) -x: debugging output "; courierMaxRetries: NAT ¬ 150; courierRetrySeconds: NAT ¬ 5*60; maxRetries: NAT ¬ 10; retrySeconds: NAT ¬ 20; repeatSeconds: NAT ¬ 1800; secondsBetweenMoves: NAT ¬ 0; maxPauseTime: NAT = 1800; tempDFPath: PATH ~ PFS.PathFromRope["/tmp/TrickleChargeTemp.df"]; bangH: ROPE ~ "!H"; DoIt: PROC [tcInfo: TCInfo, table: RedBlackTree.Table, out: STREAM] = { bestEfforts: BOOL ¬ tcInfo.switches['b]; doFlush: BOOL ¬ tcInfo.switches['a] OR tcInfo.switches['o]; debug: BOOL ¬ tcInfo.switches['d]; verify: BOOL ¬ tcInfo.switches['v]; localDF: BOOL ¬ tcInfo.switches['l]; outputForDebugging: BOOL ¬ tcInfo.switches['x]; useRetrieve: BOOL ¬ tcInfo.switches['r]; moveDfsEvenIfErrors: BOOL ¬ tcInfo.switches['m]; enumerateForDfFiles: BOOL ¬ tcInfo.tcType = allDFs; dfList: LIST OF FileEntry ¬ NIL; errorsEncountered: BOOL ¬ FALSE; processingImports: BOOL ¬ tcInfo.switches['e]; filter: DFUtilities.Filter ¬ [FALSE, all, all, IF ( processingImports ) THEN all ELSE defining]; EachEntry: RedBlackTree.EachNode = { WITH data SELECT FROM entry: FileEntry => { IF entry.state # moved THEN { ext: ROPE ¬ FindExt[entry.fullSrcName]; IF Rope.Equal[ext, "df", FALSE] THEN dfList ¬ CONS[entry, dfList] ELSE MoveFile[entry]; }; RETURN; }; ENDCASE => ERROR; }; MoveFile: PROC [entry: FileEntry] = { isOnDst: BOOL ¬ FALSE; dstBytes: INT ¬ 0; dstUID: PFS.UniqueID ¬ PFS.nullUniqueID; fullDstPath: PATH ¬ PFSPrefixMap.Translate[entry.fullSrcNamePath, tcInfo.toPTable]; Process.CheckForAbort[]; [uniqueID: dstUID, bytes: dstBytes] ¬ PFS.FileInfo[fullDstPath ! PFS.Error => CONTINUE ]; IF EqualUIDs[dstUID, entry.uid] AND dstBytes = entry.len THEN { isOnDst ¬ TRUE; filesAlreadyThere ¬ CheckCount[filesAlreadyThere, out, doFlush]; }; SELECT TRUE FROM debug => { ShowEntry[out, entry]; IF isOnDst THEN out.PutRope[" (already on destination)\n"]; out.PutF1["~~fullDstPath: %g\n", [rope[PFS.RopeFromPath[fullDstPath]]] ]; }; isOnDst => { entry.state ¬ moved; }; verify => { ShowEntry[out, entry]; out.PutRope[" (NOT on destination)\n"]; }; ENDCASE => { dstName: ROPE = PFS.RopeFromPath[fullDstPath]; retriedCount: INT ¬ 0; Yes: PFS.NameConfirmProc ~ { RETURN[TRUE] }; DO ENABLE PFS.Error => { sleepSec: NAT ¬ 0; out.PutF["PFS.Error when storing %g\n %g\n", [rope[dstName]], [rope[error.explanation]] ]; IF doFlush THEN out.Flush[]; SELECT error.code FROM $connectionRejected => sleepSec ¬ retrySeconds; $CrRPC => sleepSec ¬ courierRetrySeconds; $quotaExceeded, $volumeFull => REJECT; ENDCASE; SELECT error.code FROM $connectionRejected => IF retriedCount < maxRetries THEN { retriedCount ¬ retriedCount + 1; Process.Pause[Process.SecondsToTicks[retrySeconds]]; LOOP; }; $CrRPC => IF retriedCount < courierMaxRetries THEN { retriedCount ¬ retriedCount + 1; Process.Pause[Process.SecondsToTicks[courierRetrySeconds]]; LOOP; }; $connectionTimedOut => IF retriedCount < maxRetries THEN { retriedCount ¬ retriedCount + 1; LOOP; }; ENDCASE; GO TO failed; }; IF NOT useRetrieve THEN { [] ¬ PFS.Attach[attachedFile: entry.fullSrcNamePath, attachment: fullDstPath, wantedUniqueID: entry.uid, remoteCheck: TRUE]; EXIT }; ENDLOOP; entry.state ¬ moved; filesMoved ¬ filesMoved + 1; out.PutF["Attached %g\n to %g\n", [rope[PFS.RopeFromPath[fullDstPath]]], [rope[PFS.RopeFromPath[entry.fullSrcNamePath]]] ]; IF doFlush THEN out.Flush[]; IF secondsBetweenMoves # 0 THEN Process.Pause[Process.SecondsToTicks[secondsBetweenMoves]]; EXITS failed => { errorsEncountered ¬ TRUE }; }; }; VisitEntry: PROC [name: PATH, uid: PFS.UniqueID] = { new: FileEntry ¬ NIL; bytes: INT ¬ 0; isa: BOOL; suffix: PATH ¬ NIL; ropeName: ROPE ¬ PFS.RopeFromPath[name]; xFromName, fileInfoName: PATH; Process.CheckForAbort[]; WITH RedBlackTree.Lookup[table, ropeName] SELECT FROM entry: FileEntry => IF EqualUIDs[entry.uid, uid] THEN RETURN; ENDCASE; [isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.prefixPath, name]; IF NOT isa OR Rope.Match["fs.13.*", PFSNames.ComponentRope[PFSNames.ShortName[name]] ] THEN { IF reportIgnoring THEN out.PutF1["\t**Ignoring the file %g\n", [rope[ropeName]] ]; RETURN; }; xFromName ¬ PFSPrefixMap.Translate[name, tcInfo.fromPTable]; [fullFName: fileInfoName, uniqueID: uid, bytes: bytes] ¬ PFS.FileInfo[name: xFromName, wantedUniqueID: uid ! PFS.Error => IF ( error.code = $unknownFile ) OR ( error.code = $unknownUniqueID ) OR ( error.code = $unknownCreatedTime ) OR ( error.code = $unknownServer AND bestEfforts ) THEN { out.PutF["\n*PFS.Error[%g]\n\tasking for %g(%g)\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[xFromName]]], IF uid.egmt.gmt = BasicTime.nullGMT THEN [rope["nullGMT"]] ELSE [time[uid.egmt.gmt]] ]; IF doFlush THEN out.Flush[]; fileInfoName ¬ NIL; CONTINUE; } ELSE REJECT]; IF fileInfoName = NIL THEN RETURN; WITH RedBlackTree.Lookup[table, ropeName] SELECT FROM entry: FileEntry => { IF EqualUIDs[entry.uid, uid] THEN RETURN; [] ¬ RedBlackTree.Delete[table, ropeName]; }; ENDCASE; new ¬ NEW[FileEntryRep ¬ [ fullSrcName: ropeName, fullSrcNamePath: fileInfoName, uid: uid, len: bytes, state: init]]; RedBlackTree.Insert[table, new, ropeName]; filesSeenDuringEnumeration ¬ CheckCount[filesSeenDuringEnumeration, out, doFlush]; bytesSeenDuringEnumeration ¬ bytesSeenDuringEnumeration + bytes; }; VisitClosure: PROC [tcInfo: TCInfo, dfName: PATH, uid: PFS.UniqueID, visitor: PROC [name: PATH, uid: PFS.UniqueID], usingList: REF DFUtilities.UsingList ¬ NIL] = { ENABLE PFS.Error => IF ( error.code = $unknownServer ) OR ( error.code = $unknownFile ) THEN { out.PutF["\n****PFS.Error[%g], in dfFile: %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; }; EachItem: DFUtilities.ProcessItemProc = { WITH item SELECT FROM dir: REF DFUtilities.DirectoryItem => prefix ¬ PFS.PathFromRope[dir.path1]; file: REF DFUtilities.FileItem => { name: PATH = PFSNames.ExpandName[PFS.PathFromRope[file.name], prefix]; visitor[name, UIDFromGMT[file.date.gmt]]; }; incl: REF DFUtilities.IncludeItem => { thisUID: PFS.UniqueID = UIDFromGMT[incl.date.gmt]; path1: PATH ¬ PFS.PathFromRope[incl.path1]; visitor[path1, thisUID]; VisitClosure[tcInfo, path1, thisUID, visitor]; }; ENDCASE => { i: INT ¬ 0; }; -- handy for setting breakpoints - (bj) }; prefix: PATH ¬ NIL; in: STREAM; fileNamePath: PATH ¬ PFSPrefixMap.Translate[dfName, tcInfo.fromPTable]; in ¬ PFS.StreamOpen[ fileName: fileNamePath, wantedUniqueID: uid ! PFS.Error => IF error.code = $unknownFile THEN { out.PutF["PFS.Error[%g] - from dfFile %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; in ¬ NIL; CONTINUE; } ELSE REJECT ]; IF in#NIL THEN { filter.list ¬ usingList; DFUtilities.ParseFromStream[in, EachItem, filter -- global variable, hack for now! - (bj) ! UNWIND => in.Close[]]; in.Close[] }; }; filesMoved: INT ¬ 0; filesInCache: INT ¬ 0; filesAlreadyThere: INT ¬ 0; filesSeenDuringEnumeration: INT ¬ 0; bytesSeenDuringEnumeration: INT ¬ 0; RedBlackTree.DestroyTable[table]; -- clear the table from the last run IF tcInfo.otherArg # NIL THEN out.PutF["\n\tMoving files (%g) from %g to %g\n", [rope[tcInfo.otherArg]], [rope[tcInfo.fromTranslation]], [rope[tcInfo.toTranslation]] ] ELSE out.PutF["\n\tMoving files from %g to %g\n", [rope[tcInfo.fromTranslation]], [rope[tcInfo.toTranslation]] ]; out.PutF1["\n***** Building file table at %g\n", [time[BasicTime.Now[]]] ]; IF doFlush THEN out.Flush[]; SELECT tcInfo.tcType FROM oneDF => { IF localDF THEN VisitClosure[tcInfo, PFS.PathFromRope[tcInfo.otherArg.Concat[bangH]], PFS.nullUniqueID, VisitEntry] ELSE VisitClosure[tcInfo, PFS.PathFromRope[tcInfo.prefix.Cat[tcInfo.otherArg, bangH]], PFS.nullUniqueID, VisitEntry]; }; allDFs => { EachDfFile: PFS.InfoProc = { dstUID: PFS.UniqueID ¬ PFS.nullUniqueID; doTheEnumerate: BOOL ¬ FALSE; isa: BOOL; suffix: PATH; Process.CheckForAbort[]; IF outputForDebugging THEN out.PutF1["~~%g\n", [rope[PFS.RopeFromPath[fullFName]]] ]; [isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.fromTranslationPath, fullFName]; IF NOT isa THEN { out.PutF["\t*** %g is not a prefix of %g => ignoring\n", [rope[tcInfo.fromTranslation]], [rope[PFS.RopeFromPath[fullFName]]] ]; IF doFlush THEN out.Flush[]; RETURN; }; [uniqueID: dstUID] ¬ PFS.FileInfo[PFSPrefixMap.Translate[fullFName, tcInfo.toPTable] ! PFS.Error => IF error.group=user THEN { doTheEnumerate ¬ TRUE; CONTINUE }]; IF NOT EqualUIDs[dstUID, uniqueID] THEN doTheEnumerate ¬ TRUE; IF doTheEnumerate THEN VisitClosure[tcInfo: tcInfo, dfName: fullFName, uid: PFS.nullUniqueID, visitor: VisitEntry]; }; who: ROPE ~ tcInfo.prefix.Concat["/Top/*.df!H"]; IF outputForDebugging THEN out.PutF1["$$doing %g\n", [rope[who]] ]; PFS.EnumerateForInfo[PFSPrefixMap.Translate[PFS.PathFromRope[who], tcInfo.fromPTable], EachDfFile]; }; fullDirectory => { -- Trickling a whole directory. bangH: PATH ~ PFS.PathFromRope["*!h"]; depth: INT ¬ 1; EachFile: PFS.InfoProc = { isa: BOOL; suffix: PATH; ropeName: ROPE; Process.CheckForAbort[]; IF fileType=PFS.tDirectory THEN { newPath: PATH; short: ROPE ¬ PFSNames.ComponentRope[PFSNames.ShortName[fullFName]]; IF short.Equal["."] OR short.Equal[".."] THEN RETURN; newPath ¬ PFSNames.Cat[fullFName, bangH]; PFS.EnumerateForInfo[newPath, EachFile]; RETURN; }; [isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.prefixPath, fullFName]; IF isa THEN { new: FileEntry ¬ NIL; ropeName ¬ PFS.RopeFromPath[fullFName]; WITH RedBlackTree.Lookup[table, ropeName] SELECT FROM entry: FileEntry => { IF EqualUIDs[entry.uid, uniqueID] THEN RETURN; [] ¬ RedBlackTree.Delete[table, ropeName]; }; ENDCASE; new ¬ NEW[FileEntryRep ¬ [ fullSrcName: ropeName, fullSrcNamePath: PFSPrefixMap.Translate[fullFName, tcInfo.fromPTable], uid: uniqueID, len: bytes, state: init]]; RedBlackTree.Insert[table, new, ropeName]; filesSeenDuringEnumeration ¬ CheckCount[filesSeenDuringEnumeration, out, doFlush]; bytesSeenDuringEnumeration ¬ bytesSeenDuringEnumeration + bytes; }; }; PFS.EnumerateForInfo[PFS.PathFromRope[tcInfo.fromTranslation.Concat["*!h"]], EachFile]; }; ENDCASE => { out.PutRope["***unknown case - exiting\n"]; RETURN; }; out.PutF["\nEnumerated new files: %g, bytes: %g\n", [integer[filesSeenDuringEnumeration]], [integer[bytesSeenDuringEnumeration]] ]; out.PutF1["\n***** Moving files at %g\n", [time[BasicTime.Now[]]] ]; IF doFlush THEN out.Flush[]; RedBlackTree.EnumerateIncreasing[table, EachEntry]; IF errorsEncountered AND NOT moveDfsEvenIfErrors THEN out.PutRope["There were errors so no df's will be moved\n"] ELSE { IF dfList # NIL THEN { IF errorsEncountered AND moveDfsEvenIfErrors THEN out.PutRope["\n***There were errors; df's will be moved anyway\n"]; out.PutRope["\n Starting to move df's\n"]; FOR entryList: LIST OF FileEntry ¬ dfList, entryList.rest WHILE entryList # NIL DO MoveFile[entryList.first]; ENDLOOP; }; }; out.PutF1["\n{Done at %g}\n", [time[BasicTime.Now[]]] ]; out.PutF["\tFiles moved: %g, alreadyRemote: %g\n\n", [integer[filesMoved]], [integer[filesAlreadyThere]] ]; IF doFlush THEN out.Flush[]; }; EqualUIDs: PROC[uid1, uid2: PFS.UniqueID] RETURNS[BOOL] = { RETURN[ ( uid1.egmt.gmt = uid2.egmt.gmt ) ]; -- all for now }; UIDFromGMT: PROC [gmt: BasicTime.GMT] RETURNS [PFS.UniqueID] ~ INLINE { RETURN [[egmt: [gmt: gmt, usecs: 0]]] }; CheckCount: PROC[num: INT, out: STREAM, doFlush: BOOL] RETURNS[res: INT] = { IF ( res ¬ num + 1 ) MOD 10 = 0 THEN IF res MOD 100 = 0 THEN { out.PutF1["(%g) ", [integer[res]] ]; IF doFlush THEN out.Flush[]; } ELSE out.PutChar['.]; }; FindExt: PROC[name: ROPE] RETURNS[ext: ROPE] = { short: ROPE ¬ FileNames.GetShortName[name, TRUE]; pos: INT ¬ Rope.FindBackward[short, "."]; IF pos = -1 THEN RETURN[short]; RETURN[Rope.Substr[short, pos+1]]; }; ShowTable: PROC [out: STREAM, table: RedBlackTree.Table] = { EachEntry: RedBlackTree.EachNode = { WITH data SELECT FROM entry: FileEntry => ShowEntry[out, entry]; ENDCASE => ERROR; }; RedBlackTree.EnumerateIncreasing[table, EachEntry]; }; ShowEntry: PROC [out: STREAM, entry: FileEntry] = { IO.PutF[out, "[name: %g, date: %g, len: %g, state: ", [rope[PFS.RopeFromPath[entry.fullSrcNamePath]]], [time[entry.uid.egmt.gmt]], [integer[entry.len]] ]; SELECT entry.state FROM init => out.PutRope["init]\n"]; fetching => out.PutRope["fetching]\n"]; storing => out.PutRope["storing]\n"]; moved => out.PutRope["moved]\n"]; ENDCASE; }; GetKey: RedBlackTree.GetKey = { RETURN [data]; }; Compare: RedBlackTree.Compare = { key: ROPE ¬ NIL; WITH k SELECT FROM ent: FileEntry => key ¬ ent.fullSrcName; rope: ROPE => key ¬ rope; ENDCASE => ERROR; WITH data SELECT FROM ent: FileEntry => RETURN [Rope.Compare[key, ent.fullSrcName, FALSE]]; ENDCASE; ERROR; }; ConvertPCom: PROC[pCom: Basics.PartialComparison] RETURNS[Basics.Comparison] = { SELECT pCom FROM incomparable, less => RETURN[less]; equal => RETURN[equal]; greater => RETURN[greater]; ENDCASE => RETURN[less]; }; AllDfsCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[allDFs, cmd] }; OneDfCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[oneDF, cmd] }; IndDfCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[individualDFs, cmd] }; DirCmdProc: Commander.CommandProc = { [result, msg] ¬ Common[fullDirectory, cmd] }; Common: PROC[tcType: TCType, cmd: Commander.Handle, useRetrieve: BOOL ¬ FALSE] RETURNS[result: REF, msg: ROPE] = { ProcessSwitches: PROC [arg: ROPE] RETURNS[Switches]= { sense: BOOL ¬ TRUE; switches: Switches ¬ ALL[FALSE]; FOR index: INT IN [0..Rope.Length[arg]) DO char: CHAR ¬ Rope.Fetch[arg, index]; SELECT char FROM '- => LOOP; '~ => {sense ¬ NOT sense; LOOP}; 'a, 'A => { outFileName ¬ CommanderOps.NextArgument[cmd]; switches[char] ¬ sense }; 'o, 'O => { outFileName ¬ CommanderOps.NextArgument[cmd]; switches[char] ¬ sense }; IN ['a..'z] => switches[char] ¬ sense; IN ['A..'Z] => switches[char + ('a-'A)] ¬ sense; ENDCASE; sense ¬ TRUE; ENDLOOP; RETURN[switches]; }; out: STREAM ¬ cmd.out; outFileName: ROPE; oldPriority: Process.Priority ¬ Process.GetPriority[]; table: RedBlackTree.Table ¬ RedBlackTree.Create[getKey: GetKey, compare: Compare]; this: TCInfo ¬ NEW[TCInfoRec]; this.tcType ¬ tcType; DO arg: ROPE ¬ CommanderOps.NextArgument[cmd ! CommanderOps.Failed => { msg ¬ errorMsg; GO TO failed } ]; ch: CHAR; Process.CheckForAbort[]; IF arg = NIL THEN EXIT; ch ¬ Rope.Fetch[arg, 0]; SELECT TRUE FROM ( ch = '- ) AND ( arg.Length[] = 2 ) => -- switch this.switches ¬ ProcessSwitches[arg]; ( ch = '{ ) => LOOP; -- ignore ( ch = '} ) => LOOP; -- ignore ( ch = '$ ) => LOOP; -- ignore ENDCASE => { -- translations or other things IF this.prefix = NIL THEN { this.prefix ¬ arg; this.prefixPath ¬ PFS.PathFromRope[this.prefix]; LOOP; }; IF this.fromTranslation = NIL THEN { this.fromTranslation ¬ arg; this.fromTranslationPath ¬ PFS.PathFromRope[arg]; LOOP; }; IF this.toTranslation = NIL THEN { this.toTranslation ¬ arg; LOOP; }; SELECT TRUE FROM ( this.otherArg # NIL) => { msg ¬ IO.PutFR1["Extra argument (%g)", [rope[arg]] ]; GO TO failed; }; ENDCASE => this.otherArg ¬ arg; }; ENDLOOP; IF ( this.prefix = NIL ) OR ( this.fromTranslation = NIL ) OR ( this.toTranslation = NIL ) THEN { msg ¬ "Not enough arguments given.\n"; GOTO failed }; IF useRetrieve THEN this.switches['r] ¬ TRUE; IF outFileName # NIL THEN { outPath: PFS.PATH ~ PFS.PathFromRope[outFileName]; outStream: STREAM; appending: BOOL ~ this.switches['a]; outStream ¬ PFS.StreamOpen[outPath, IF appending THEN $append ELSE $create ! PFS.Error => { out.PutRope[error.explanation]; CONTINUE} ]; IF outStream # NIL THEN { out.PutF["Messages will be %g %g\n", [rope[IF appending THEN "appended to" ELSE "written on"]], [rope[outFileName]] ]; out ¬ outStream; }; }; { Action: PROC = { this.fromPTable ¬ PFSPrefixMap.InsertIntoNewPTable[ PFS.PathFromRope[this.prefix], PFS.PathFromRope[this.fromTranslation] ]; this.toPTable ¬ PFSPrefixMap.InsertIntoNewPTable[ PFS.PathFromRope[this.prefix], PFS.PathFromRope[this.toTranslation] ]; this.toPTable ¬ PFSPrefixMap.Insert[ PFS.PathFromRope[this.fromTranslation], PFS.PathFromRope[this.toTranslation], this.toPTable ]; IF this.tcType = individualDFs THEN IndividualDFs[this, table, out] ELSE DoIt[this, table, out ! PFS.Error => { out.PutF["PFS.Error[%g], stopping this round.\n\t\t(at %g)\n\n", [rope[error.explanation]], [time[BasicTime.Now[]]] ]; IF outFileName # NIL THEN out.Flush[]; CONTINUE; }; ]; IF outFileName # NIL THEN out.Close[]; }; CedarProcess.DoWithPriority[background, Action]; }; EXITS failed => {result ¬ $Failure}; }; IndividualDFs: PROC[tcInfo: TCInfo, table: RedBlackTree.Table, out: STREAM] = { doFlush: BOOL ¬ tcInfo.switches['a] OR tcInfo.switches['o]; dfList: LIST OF ROPE ¬ NIL; filter: DFUtilities.Filter ¬ [FALSE, all, all, defining]; VisitClosure1: PROC [tcInfo: TCInfo, dfName: PATH, uid: PFS.UniqueID, visitor: PROC [name: PATH, uid: PFS.UniqueID], usingList: REF DFUtilities.UsingList ¬ NIL] = { ENABLE PFS.Error => IF ( error.code = $unknownServer ) OR ( error.code = $unknownFile ) THEN { out.PutF["\n****PFS.Error[%g], in dfFile: %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; }; EachItem1: DFUtilities.ProcessItemProc = { WITH item SELECT FROM dir: REF DFUtilities.DirectoryItem => prefix ¬ PFS.PathFromRope[dir.path1]; file: REF DFUtilities.FileItem => { out.PutF1["\n*** fileName found in IndividualDF driver file (%g)", [rope[file.name]]]; out.PutRope["\n\tit will be ignored\n"]; }; incl: REF DFUtilities.IncludeItem => dfList ¬ CONS[incl.path1, dfList]; ENDCASE => { i: INT ¬ 0; }; -- handy for setting breakpoints - (bj) }; prefix: PATH ¬ NIL; in: STREAM; fileNamePath: PATH ¬ PFSPrefixMap.Translate[dfName, tcInfo.fromPTable]; in ¬ PFS.StreamOpen[ fileName: fileNamePath, wantedUniqueID: uid ! PFS.Error => IF error.code = $unknownFile THEN { out.PutF["PFS.Error[%g] - from dfFile %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ]; IF doFlush THEN out.Flush[]; in ¬ NIL; CONTINUE; } ELSE REJECT ]; IF in#NIL THEN { filter.list ¬ usingList; DFUtilities.ParseFromStream[in, EachItem1, filter -- global variable, hack for now! - (bj) ! UNWIND => in.Close[]]; in.Close[] }; }; IF tcInfo.switches['l] THEN VisitClosure1[tcInfo, PFS.PathFromRope[tcInfo.otherArg.Concat[bangH]], PFS.nullUniqueID, NIL] ELSE VisitClosure1[tcInfo, PFS.PathFromRope[tcInfo.prefix.Cat[tcInfo.otherArg, bangH]], PFS.nullUniqueID, NIL]; tcInfo.tcType ¬ oneDF; -- doing one df tcInfo.switches['l] ¬ TRUE; -- make it look like a local df (this is a full name FOR rL: LIST OF ROPE ¬ dfList, rL.rest UNTIL rL = NIL DO tcInfo.otherArg ¬ rL.first; DoIt[tcInfo, table, out ! PFS.Error => { out.PutF["PFS.Error[%g]; problems with %g.\n\t\t(at %g)\n\n", [rope[error.explanation]], [rope[rL.first]], [time[BasicTime.Now[]]] ]; IF doFlush THEN out.Flush[]; CONTINUE; }; ]; ENDLOOP; }; Path: PROC[pt: PFS.PATH] RETURNS[rp: ROPE] ~ { RETURN[PFS.RopeFromPath[pt] ] }; Translate: PROC[name: PATH, pTable: PFSPrefixMap.PrefixTableList] RETURNS [rp: ROPE] = { RETURN[PFS.RopeFromPath[PFSPrefixMap.Translate[name, pTable]]] }; reportIgnoring: BOOL ¬ FALSE; ATReportIgnores: ENTRY Commander.CommandProc ~ { reportIgnoring ¬ TRUE }; ATDontReportIgnores: ENTRY Commander.CommandProc ~ { reportIgnoring ¬ FALSE }; Commander.Register[ key: "AttachOneDF", proc: OneDfCmdProc, doc: oneDfDoc, interpreted: TRUE]; Commander.Register[ key: "AttachAllDFs", proc: AllDfsCmdProc, doc: allDfsDoc, interpreted: TRUE]; Commander.Register[ key: "AttachDirectory", proc: DirCmdProc, doc: dirDoc, interpreted: TRUE]; Commander.Register[ key: "ATReportIgnores", proc: ATReportIgnores]; Commander.Register[ key: "ATDontReportIgnores", proc: ATDontReportIgnores]; END. κTCAttachmentsImpl.mesa Copyright Σ 1985, 1988, 1990, 1991, 1992 by Xerox Corporation. All rights reserved. Russ Atkinson (RRA) June 20, 1985 12:54:23 pm PDT Dave Rumph, September 13, 1988 6:28:12 pm PDT Bob Hagmann July 18, 1985 4:38:42 pm PDT Rick Beach, January 23, 1986 1:37:04 pm PST Eric Nickell February 25, 1986 12:10:10 pm PST Carl Hauser, October 20, 1987 2:20:57 pm PDT Bill Jackson (bj) April 14, 1988 8:41:57 pm PDT Willie-Sue, October 19, 1988 4:20:11 pm PDT Last edited by: Mik Lamming - January 26, 1989 4:09:53 pm GMT Mik Lamming, January 27, 1989 10:17:50 am GMT Willie-Sue Orr, March 28, 1990 4:29:38 pm PST -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 -0100 Willie-s, August 10, 1992 4:44 pm PDT Implementation points: 1. Version numbers should be retained in order to keep the validity of the hints in DF files. 2. Don't move files that are already there. Types includes version number really just create date of the file for now byte count of the file (useful redundancy) indicates the state of the file (obvious) Documentation Option variables # of times to retry CrRPC error wait a while before retrying CrRPC error # of times to retry connectionRejected from STP # of seconds between retry attemps # of seconds between repeats (when using the R switch) # of seconds to wait after a file transfer (to keep IFS load down) Command Procedures A Programmer's quick hack if you want to experiment, you can try other values here! - (bj) [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE] called for each item in the table to do the moving. df files are collected and moved last as a "commit" of the successful completion of the tricklecharge pass. The remote file is already present, so we don't need to move it We are verifying stuff and the entry is NOT on the destination Sigh, we actually have to ship the bits It may be worth retrying later It may be worth retrying later Retry, this time establishing the connection first This procedure is used to visit each file in a simple DF closure, where the imports are NOT followed, but the inclusions ARE followed. This is to deal with generated files that have been "abandoned" while being written - the name starts with fs.13. imports: REF DFUtilities.ImportsItem => { -- this stuff is for me - (bj) IF ( processingImports ) THEN { file: ROPE _ TranslateHost[tcInfo, imports.path1]; IF ( NOT imports.exported ) THEN { ERROR }; IF imports.exported THEN -- fix to make export chasing work SELECT imports.form FROM exports => { the Exports Imports Using ALL case visitor[file, thisUID]; VisitClosure[tcInfo, file, thisUID, visitor]; }; list => { the Exports Imports Using [list] case IF ( imports.list = NIL ) THEN ERROR; -- interface claims this can't happen visitor[file, imports.date.gmt]; VisitClosure[tcInfo, file, thisUID, visitor, imports.list]; }; all => { ERROR }; ENDCASE => { ERROR }; }; }; The mainline of DoIt Phase1, build up data base. Don't move any files. Trickling a df file together with the files it controls out.PutF["oneDF\n"]; Trickling df files in a directory together with the files controlled by those df files. [fullFName: attachedTo: PATH, uniqueID: UniqueID, bytes: INT, mutability: Mutability, fileType: FileType] RETURNS [continue: BOOL] make sure we get who from the correct place [fullFName: attachedTo: PATH, uniqueID: UniqueID, bytes: INT, mutability: Mutability, fileType: FileType] RETURNS [continue: BOOL] IF ( depth _ depth + 1 ) > 10 THEN { out.PutF["depth: %g, path: %g\n", [integer[depth]], [rope[PFS.RopeFromPath[newPath]]] ]; RETURN; }; IF Rope.Match["!*", PFS.RopeFromPath[suffix]] THEN RETURN; This is likely to be the controlling file entry for an IFS (or it could just be a bogus file to be ignored) out.PutF["fulldirectory\n"]; Phase2, move files. Don't change the entries (except for the 'moved' field). Phase2 1/2: move df files last. RETURN[ ( uid1.egmt.time = uid2.egmt.time ) AND ( uid1.egmt.usecs = uid2.egmt.usecs ) AND ( uid1.host.a = uid2.host.a ) AND ( uid1.host.a = uid2.host.a ) ]; [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE] [data: RedBlackTree.UserData] RETURNS [RedBlackTree.Key] [k: RedBlackTree.Key, data: RedBlackTree.UserData] RETURNS [Basics.Comparison] ent: FileEntry => { pCom: Basics.PartialComparison _ PFSNames.Compare[key, ent.fullSrcNamePath]; RETURN [ConvertPCom[pCom]]; }; [cmd: Handle] RETURNS [result: REF _ NIL, msg: ROPE _ NIL] CommandObject = [in, out, err: STREAM, commandLine, command: ROPE, ...] When parsing the command line, be prepared for failure. The error is reported to the user Initialization Rick Beach, January 23, 1986 1:34:16 pm PST changes to: action (local of TrickleCommandProc) changed timeLeft: NAT to INT because timeLeft could go negative! Carl Hauser, February 3, 1987 10:44:11 am PST Register the command in the local directory, not ///Commands/ Last edited by: Mik Lamming - January 25, 1989 7:36:42 pm GMT Removed ERROR clause that makes export chasing fail. changes to: imports (local of EachItem, local of VisitClosure, local of DoIt) Κ½•NewlineDelimiter –(cedarcode) style˜codešœ™Kšœ ΟeœI™TK™1K™-K™(K™+K™.K™,K™/K™+K™=K™.K™ˆK™%K™™K™`K™,—K˜—šΟk ˜ Kšœžœ.˜:Kšœ žœžœ˜$Kšœ žœ˜.Kšœ žœ!˜0Kšœ žœ˜*Kšœ žœ]˜nK˜ Kšžœ˜Kšžœ˜Kšœ ˜ K˜ Kšœžœ?˜LKšœ žœg˜yKšœ˜—K˜šΠlnœžœž˜ KšžœKžœžœ5˜ŽK˜Kšœž˜—head™Kšžœžœ žœ˜Kšžœžœžœ˜Kšžœžœžœžœ˜Kšžœžœžœžœ˜K˜Kš œ žœžœžœžœ žœžœ˜3Kšœžœ3˜?K˜Kšœ žœžœžœ˜"Kšœžœžœ ˜šœ žœžœ˜Kšœ(žœ˜-Kšœžœ˜Kšœ žœΟc˜'Kšœ žœ˜K˜3Kšœ .˜CKšœ žœžœ ˜-K˜Kšœ˜K˜—Kšœ žœžœ˜#šœžœžœ˜šœ žœ˜Kšœ™—Kšœžœ˜šœžœ ˜K™+—šœžœ˜ K™*—šœ˜K™)—K˜—Kšœžœ$˜8—™ Kšœžœί˜λKšœ žœͺ˜ΉKšœ žœϋ˜‰—šœ™šœžœ˜Kšœ™—šœžœ˜ Kšœ(™(—šœ žœ˜Kšœ/™/—šœžœ˜Kšœ"™"—šœžœ˜K™6—šœžœ˜K™B—K˜Kšœžœ˜K˜Kšœ žœžœ+˜AKšœžœ˜—™šΟnœžœ2žœ˜GKšœ žœ˜(Kšœ žœžœ˜;Kšœžœ˜"Kšœžœ˜#Kšœ žœ˜$Kšœžœ˜/Kšœ žœ˜(Kšœžœ˜0Kšœžœ˜3Kšœžœžœ žœ˜ Kšœžœžœ˜ ™Kšœžœ˜.š œžœ žœžœžœ ˜`K™AK™——•StartOfExpansion> -- [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE]šΟb œ˜$KšΠck:™:K™4šžœžœž˜˜šžœžœ˜K™kKšœžœ˜'šžœžœžœ žœ˜AKšžœ˜—K˜—Kšžœ˜K˜—Kšžœžœ˜—K˜K˜—š‘œžœ˜%Kšœ žœžœ˜Kšœ žœ˜Kšœžœ žœ˜(Kšœ žœB˜SKšœ˜šœ&žœ˜>Kšœžœ ž˜Kšœ˜—šžœžœžœ˜?Kšœ žœ˜K˜@K˜—šžœžœž˜šœ ˜ Kšœ˜Kšžœ žœ/˜>Kšœ'žœ˜IK˜—šœ ˜ Kšœ?™?K˜K˜—šœ ˜ Kšœ>™>Kšœ˜Kšœ*˜*K˜—šžœ˜ Kšœ'™'Kšœ žœžœ˜.Kšœžœ˜K–* -- [FSBackdoor.Version] RETURNS [BOOLEAN]š‘œžœžœžœ˜,šž˜šžœžœ ˜Kšœ žœ˜šœ-˜-Kšœ-˜-—Kšžœ žœ ˜šžœ ž˜K˜/K˜)Kšœžœ˜&Kšžœ˜—šžœ ž˜˜Kšœ™šžœžœ˜#K˜ K˜4Kšžœ˜K˜——˜ Kšœ™šžœ"žœ˜*K˜ K˜;Kšžœ˜K˜——šœ˜K™2šžœžœ˜#K˜ Kšžœ˜K˜——Kšžœ˜—Kšžœžœ˜ K˜—K˜—šžœžœ žœ˜Kšœžœnžœ˜|Kšž˜K˜—Kšžœ˜K˜K˜Kšœ*žœ$žœ)˜}Kšžœ žœ ˜Kšžœžœ<˜[Kšžœ!žœ˜-K˜——K˜K˜—š‘ œžœžœžœ˜4KšœXΟsœ€œ ™†Kšœžœ˜Kšœžœ˜Kšœžœ˜ Kšœžœžœ˜Kšœ žœžœ˜(Kšœžœ˜Kšœ˜šžœ&žœž˜5Kšœžœžœžœ˜=Kšžœ˜—K˜<šžœžœžœIžœ˜]K™qKšžœžœ<˜RKšžœ˜K˜—K˜Kšžœžœ]˜sK˜—Kšœžœ'˜0Kšžœžœ)˜CK™+Kšžœ)žœ4˜cKšœ˜—šœ ˜3Kšœžœžœ˜&Kšœžœ˜–j -- [fullFName: ROPE, attachedTo: ROPE, created: GMT, bytes: INT, keep: CARDINAL] RETURNS [continue: BOOL]š’œžœ ˜Kš£œ£œ£œ£™‚Kšœžœ˜ Kšœžœ˜ Kšœ žœ˜Kšœ˜šžœ žœ žœ˜!Kšœ žœ˜Kšœžœ9˜DKšžœžœžœžœ˜6K˜)šžœžœ™$Kšœ:žœ™XKšžœ™K™—Kšžœ%˜(Kšžœ˜Kšœ˜—K˜Ašžœžœ˜ Kšœžœ˜Kšœ žœ˜'šžœ&žœž˜5˜Kšžœ žœžœ˜.K˜*K˜—Kšžœ˜—šžœžœžœžœ™:šœ:™:K™0——Kšœžœ™˜’Kšœ*˜*K˜RK˜@K˜—K˜—K™Kšžœžœ?˜WK˜—šžœ˜ K˜+Kšžœ˜Kšœ˜K˜——Kšœƒ˜ƒK˜KšœM™MKšœD˜DKšžœ žœ ˜Kšœ3˜3K™K™K˜šžœžœžœžœ<˜qšžœ˜šžœ žœžœ˜KšžœžœžœD˜uKšœ*˜*š žœ žœžœ$žœ žœž˜RK˜Kšžœ˜—K˜—K˜——Kšœ8˜8Kšœk˜kKšžœ žœ ˜K˜K˜—š ‘ œžœ žœ žœžœ˜;Kšžœ' ˜;Kšžœ&žœ'žœžœ!™œK˜K˜—š ‘ œžœžœžœžœ žœ˜GKšžœ˜%Kšœ˜K˜—š‘ œžœžœžœ žœžœžœ˜Lš žœžœžœžœžœ žœ˜>K˜$Kšžœ žœ ˜K˜Kšžœ˜—K˜K˜—š ‘œžœžœžœžœ˜0Kšœžœ žœ˜1Kšœžœ!˜)Kšžœ žœžœ˜Kšžœ˜"K˜K˜—š‘ œžœžœ ˜<–> -- [data: RedBlackTree.UserData] RETURNS [stop: BOOL _ FALSE]šœ$˜$Kš£:™:šžœžœž˜Kšœ*˜*Kšžœžœ˜—K˜—Kšœ3˜3K˜K˜—š‘ œžœžœ˜3Kšžœ:žœ[˜ššžœ ž˜Kšœ˜Kšœ'˜'Kšœ%˜%Kšœ!˜!Kšžœ˜—K˜K˜—–< -- [data: RedBlackTree.UserData] RETURNS [RedBlackTree.Key]š‘œ˜Kš£8™8Kšžœ˜K˜K˜—–R -- [k: RedBlackTree.Key, data: RedBlackTree.UserData] RETURNS [Basics.Comparison]š‘œ˜!Kš£N™NKšœžœžœ˜šžœžœž˜K˜(Kšœžœ˜Kšžœžœ˜—šžœžœž˜Kšœžœ%žœ˜Ešœ™K™LKšžœ™K™—Kšžœ˜—Kšžœ˜K˜K˜—š‘ œžœ!žœ˜Pšžœž˜Kšœžœ˜#Kšœ žœ˜Kšœ žœ ˜Kšžœžœ˜—K˜K˜—š‘ œB˜OK˜—š‘ œA˜MK˜—š‘ œI˜UK˜—š‘ œI˜SK˜—š‘œžœ5žœžœžœ žœžœ˜rš œžœ žœžœžœžœ™:Kšœžœžœ™G—š‘œžœžœžœ ˜6Kšœžœžœ˜Kšœžœžœ˜ šžœžœžœž˜*Kšœžœ˜$šžœž˜Kšœžœ˜ Kšœžœžœ˜ K˜SK˜SKšžœ$˜&Kšžœ.˜0Kšžœ˜—Kšœžœ˜ Kšžœ˜—Kšžœ ˜K˜—Kšœžœ ˜Kšœ žœ˜K˜6–@[getKey: RedBlackTree.GetKey, compare: RedBlackTree.Compare]˜RK˜—Kšœžœ ˜K˜K˜šž˜šœžœLžœžœ ˜fKšœZ™Z—Kšœžœ˜ K˜Kšžœžœžœžœ˜K˜šžœžœž˜šœ žœ  ˜1K˜%—Kšœžœ  ˜Kšœžœ  ˜Kšœžœ  ˜šžœ ˜,šžœžœžœ˜K˜Kšœžœ˜0Kšžœ˜K˜—šžœžœžœ˜$K˜Kšœžœ˜1Kšžœ˜K˜—šžœžœžœ˜"K˜Kšžœ˜K˜—šžœžœž˜šœžœ˜Kšœžœ-˜5Kšžœžœ˜ K˜—Kšžœ˜—K˜—Kšžœ˜K˜——Kšžœž œžœž œžœžœžœ*žœ ˜—Kšžœ žœžœ˜-šžœžœžœ˜Kšœ žœžœžœ˜2Kšœ žœ˜Kšœ žœ˜$Kš œ žœžœ žœ žœ žœ,žœ˜ˆšžœ žœžœ˜Kšœ+žœ žœžœ'˜vK˜K˜—K˜K˜—˜š’œžœ˜Kšœ4žœžœ&˜|Kšœ2žœžœ$˜xKšœ%žœ%žœ3˜ƒšžœžœ ˜Cšžœ˜šœžœ ˜Kšœv˜vKšžœžœžœ ˜&Kšžœ˜ K˜—Kšœ˜——Kšžœžœžœ ˜&K˜—K˜0K˜—K˜šž˜K˜—K˜K˜—š‘ œžœ1žœ˜OKšœ žœžœ˜;Kš œžœžœžœžœ˜Kšœžœ˜9K˜š‘ œžœžœžœžœžœžœžœžœ˜€š žœžœ žœ!žœžœ˜^KšœRžœ˜oKšžœ žœ ˜K˜K˜—š₯ œ!˜*šžœžœž˜Kšœžœ'žœ˜Kšœžœ˜#KšœV˜VK˜(K˜—šœžœ˜$Kšœ žœ˜"—Kšžœ žœ  '˜C—K˜K˜—Kšœžœžœ˜Kšœžœ˜ Kšœžœ5˜Gšœžœ ˜Kšœ˜šœ˜šœžœ žœžœ˜2KšœNžœ˜kKšžœ žœ ˜Kšœž˜ Kšžœ˜ K˜——Kšžœž˜ Kšœ˜—šžœžœžœ˜K˜šœ2 (˜ZKšœžœ˜—Kšœ ˜ Kšœ˜—K˜K˜—š žœžœžœ.žœžœ˜yKšžœžœ:žœžœ˜o—K˜Kšœ ˜&Kšœžœ 4˜Pš žœžœžœžœžœžœž˜8K˜šœ˜šœžœ ˜Kšœ…˜…Kšžœ žœ ˜Kšžœ˜ K˜—Kšœ˜—Kšžœ˜—K˜K˜—š‘œžœžœžœžœžœžœžœ˜OK˜—š ‘ œžœžœ(žœžœ˜VKšœžœžœ7˜CK˜—šœžœžœ˜K˜—š‘‘ œžœ,žœ˜IK˜—Kš‘‘œžœ,žœ˜N—–x[key: ROPE, proc: Commander.CommandProc, doc: ROPE _ NIL, clientData: REF ANY _ NIL, interpreted: BOOL _ TRUE]™šœXžœ˜^K˜—šœ[žœ˜aK˜—KšœXžœ˜^K˜˜CK˜—˜KK˜——K˜Kšžœ˜K˜Kšœ˜™+Kš œ Οrœ œ  Πekœ§œ$™q—™-K™=—™=K™4Kšœ ¦œ;™N—K™—…—XŠ«