DoIt:
PROC [tcInfo: TCInfo, table: RedBlackTree.Table, out:
STREAM] = {
bestEfforts: BOOL ¬ tcInfo.switches['b];
doFlush: BOOL ¬ tcInfo.switches['a] OR tcInfo.switches['o];
debug: BOOL ¬ tcInfo.switches['d];
verify: BOOL ¬ tcInfo.switches['v];
localDF: BOOL ¬ tcInfo.switches['l];
outputForDebugging: BOOL ¬ tcInfo.switches['x];
useRetrieve: BOOL ¬ tcInfo.switches['r];
moveDfsEvenIfErrors: BOOL ¬ tcInfo.switches['m];
enumerateForDfFiles: BOOL ¬ tcInfo.tcType = allDFs;
dfList: LIST OF FileEntry ¬ NIL;
errorsEncountered: BOOL ¬ FALSE;
A Programmer's quick hack
processingImports: BOOL ¬ tcInfo.switches['e];
filter: DFUtilities.Filter ¬ [
FALSE, all, all,
IF ( processingImports )
THEN all
ELSE defining];
if you want to experiment, you can try other values here! - (bj)
EachEntry: RedBlackTree.EachNode = {
[data: RedBlackTree.UserData] RETURNS [stop: BOOL ← FALSE]
called for each item in the table to do the moving.
WITH data
SELECT
FROM
entry: FileEntry => {
IF entry.state # moved
THEN {
df files are collected and moved last as a "commit" of the successful completion of the tricklecharge pass.
ext: ROPE ¬ FindExt[entry.fullSrcName];
IF Rope.Equal[ext, "df",
FALSE]
THEN dfList ¬
CONS[entry, dfList]
ELSE MoveFile[entry];
};
RETURN;
};
ENDCASE => ERROR;
};
MoveFile:
PROC [entry: FileEntry] = {
isOnDst: BOOL ¬ FALSE;
dstBytes: INT ¬ 0;
dstUID: PFS.UniqueID ¬ PFS.nullUniqueID;
fullDstPath: PATH ¬ PFSPrefixMap.Translate[entry.fullSrcNamePath, tcInfo.toPTable];
Process.CheckForAbort[];
[uniqueID: dstUID, bytes: dstBytes] ¬
PFS.FileInfo[fullDstPath
! PFS.Error => CONTINUE
];
IF EqualUIDs[dstUID, entry.uid]
AND dstBytes = entry.len
THEN {
isOnDst ¬ TRUE;
filesAlreadyThere ¬ CheckCount[filesAlreadyThere, out, doFlush];
};
SELECT
TRUE
FROM
debug => {
ShowEntry[out, entry];
IF isOnDst THEN out.PutRope[" (already on destination)\n"];
out.PutF1["~~fullDstPath: %g\n", [rope[PFS.RopeFromPath[fullDstPath]]] ];
};
isOnDst => {
The remote file is already present, so we don't need to move it
entry.state ¬ moved;
};
verify => {
We are verifying stuff and the entry is NOT on the destination
ShowEntry[out, entry];
out.PutRope[" (NOT on destination)\n"];
};
ENDCASE => {
Sigh, we actually have to ship the bits
dstName: ROPE = PFS.RopeFromPath[fullDstPath];
retriedCount: INT ¬ 0;
Yes: PFS.NameConfirmProc ~ { RETURN[TRUE] };
DO
ENABLE
PFS.Error => {
sleepSec: NAT ¬ 0;
out.PutF["PFS.Error when storing %g\n %g\n",
[rope[dstName]], [rope[error.explanation]] ];
IF doFlush THEN out.Flush[];
SELECT error.code
FROM
$connectionRejected => sleepSec ¬ retrySeconds;
$CrRPC => sleepSec ¬ courierRetrySeconds;
$quotaExceeded, $volumeFull => REJECT;
ENDCASE;
SELECT error.code
FROM
$connectionRejected =>
It may be worth retrying later
IF retriedCount < maxRetries
THEN {
retriedCount ¬ retriedCount + 1;
Process.Pause[Process.SecondsToTicks[retrySeconds]];
LOOP;
};
$CrRPC =>
It may be worth retrying later
IF retriedCount < courierMaxRetries
THEN {
retriedCount ¬ retriedCount + 1;
Process.Pause[Process.SecondsToTicks[courierRetrySeconds]];
LOOP;
};
$connectionTimedOut =>
Retry, this time establishing the connection first
IF retriedCount < maxRetries
THEN {
retriedCount ¬ retriedCount + 1;
LOOP;
};
ENDCASE;
GO TO failed;
};
IF
NOT useRetrieve
THEN {
[] ¬ PFS.Attach[attachedFile: entry.fullSrcNamePath, attachment: fullDstPath, wantedUniqueID: entry.uid, remoteCheck: TRUE];
EXIT
};
ENDLOOP;
entry.state ¬ moved;
filesMoved ¬ filesMoved + 1;
out.PutF["Attached %g\n to %g\n", [rope[PFS.RopeFromPath[fullDstPath]]], [rope[PFS.RopeFromPath[entry.fullSrcNamePath]]] ];
IF doFlush THEN out.Flush[];
IF secondsBetweenMoves # 0 THEN
Process.Pause[Process.SecondsToTicks[secondsBetweenMoves]];
EXITS failed => { errorsEncountered ¬ TRUE };
};
};
VisitEntry:
PROC [name:
PATH, uid:
PFS.UniqueID] = {
This procedure is used to visit each file in a simple DF closure, where the imports are NOT followed, but the inclusions ARE followed.
new: FileEntry ¬ NIL;
bytes: INT ¬ 0;
isa: BOOL;
suffix: PATH ¬ NIL;
ropeName: ROPE ¬ PFS.RopeFromPath[name];
xFromName, fileInfoName: PATH;
Process.CheckForAbort[];
WITH RedBlackTree.Lookup[table, ropeName]
SELECT
FROM
entry: FileEntry => IF EqualUIDs[entry.uid, uid] THEN RETURN;
ENDCASE;
[isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.prefixPath, name];
IF
NOT isa
OR Rope.Match["fs.13.*", PFSNames.ComponentRope[PFSNames.ShortName[name]] ]
THEN {
This is to deal with generated files that have been "abandoned" while being written - the name starts with fs.13.
IF reportIgnoring THEN out.PutF1["\t**Ignoring the file %g\n", [rope[ropeName]] ];
RETURN;
};
xFromName ¬ PFSPrefixMap.Translate[name, tcInfo.fromPTable];
[fullFName: fileInfoName, uniqueID: uid, bytes: bytes] ¬
PFS.FileInfo[name: xFromName, wantedUniqueID: uid !
PFS.Error =>
IF ( error.code = $unknownFile )
OR ( error.code = $unknownUniqueID )
OR ( error.code = $unknownCreatedTime )
OR ( error.code = $unknownServer
AND bestEfforts )
THEN {
out.PutF["\n*PFS.Error[%g]\n\tasking for %g(%g)\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[xFromName]]], IF uid.egmt.gmt = BasicTime.nullGMT THEN [rope["nullGMT"]] ELSE [time[uid.egmt.gmt]] ];
IF doFlush THEN out.Flush[];
fileInfoName ¬ NIL;
CONTINUE;
}
ELSE REJECT];
IF fileInfoName = NIL THEN RETURN;
WITH RedBlackTree.Lookup[table, ropeName]
SELECT
FROM
entry: FileEntry => {
IF EqualUIDs[entry.uid, uid] THEN RETURN;
[] ¬ RedBlackTree.Delete[table, ropeName];
};
ENDCASE;
new ¬
NEW[FileEntryRep ¬ [
fullSrcName: ropeName,
fullSrcNamePath: fileInfoName,
uid: uid,
len: bytes,
state: init]];
RedBlackTree.Insert[table, new, ropeName];
filesSeenDuringEnumeration ¬ CheckCount[filesSeenDuringEnumeration, out, doFlush];
bytesSeenDuringEnumeration ¬ bytesSeenDuringEnumeration + bytes;
};
VisitClosure:
PROC [tcInfo: TCInfo, dfName:
PATH, uid:
PFS.UniqueID,
visitor:
PROC [name:
PATH, uid:
PFS.UniqueID],
usingList:
REF DFUtilities.UsingList ¬
NIL] = {
ENABLE
PFS.Error =>
IF ( error.code = $unknownServer )
OR
( error.code = $unknownFile )
THEN {
out.PutF["\n****PFS.Error[%g], in dfFile: %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ];
IF doFlush THEN out.Flush[];
};
EachItem: DFUtilities.ProcessItemProc = {
WITH item
SELECT
FROM
dir: REF DFUtilities.DirectoryItem => prefix ¬ PFS.PathFromRope[dir.path1];
file:
REF DFUtilities.FileItem => {
name: PATH = PFSNames.ExpandName[PFS.PathFromRope[file.name], prefix];
visitor[name, UIDFromGMT[file.date.gmt]];
};
incl:
REF DFUtilities.IncludeItem => {
thisUID: PFS.UniqueID = UIDFromGMT[incl.date.gmt];
path1: PATH ¬ PFS.PathFromRope[incl.path1];
visitor[path1, thisUID];
VisitClosure[tcInfo, path1, thisUID, visitor];
};
imports: REF DFUtilities.ImportsItem => { -- this stuff is for me - (bj)
IF ( processingImports ) THEN {
file: ROPE ← TranslateHost[tcInfo, imports.path1];
IF ( NOT imports.exported ) THEN { ERROR };
IF imports.exported THEN -- fix to make export chasing work
SELECT imports.form FROM
exports => {
the Exports Imports Using ALL case
visitor[file, thisUID];
VisitClosure[tcInfo, file, thisUID, visitor];
};
list => {
the Exports Imports Using [list] case
IF ( imports.list = NIL ) THEN ERROR; -- interface claims this can't happen
visitor[file, imports.date.gmt];
VisitClosure[tcInfo, file, thisUID, visitor, imports.list];
};
all => { ERROR };
ENDCASE => { ERROR };
};
};
ENDCASE => { i: INT ¬ 0; }; -- handy for setting breakpoints - (bj)
};
prefix: PATH ¬ NIL;
in: STREAM;
fileNamePath: PATH ¬ PFSPrefixMap.Translate[dfName, tcInfo.fromPTable];
in ¬
PFS.StreamOpen[
fileName: fileNamePath,
wantedUniqueID: uid
!
PFS.Error =>
IF error.code = $unknownFile
THEN {
out.PutF["PFS.Error[%g] - from dfFile %g\n", [rope[error.explanation]],
[rope[PFS.RopeFromPath[dfName]]] ];
IF doFlush THEN out.Flush[];
in ¬ NIL;
CONTINUE;
}
ELSE REJECT
];
IF in#
NIL
THEN {
filter.list ¬ usingList;
DFUtilities.ParseFromStream[in, EachItem, filter
-- global variable, hack for now! - (bj)
! UNWIND => in.Close[]];
in.Close[]
};
};
The mainline of DoIt
filesMoved: INT ¬ 0;
filesInCache: INT ¬ 0;
filesAlreadyThere: INT ¬ 0;
filesSeenDuringEnumeration: INT ¬ 0;
bytesSeenDuringEnumeration: INT ¬ 0;
RedBlackTree.DestroyTable[table]; -- clear the table from the last run
IF tcInfo.otherArg #
NIL
THEN
out.PutF["\n\tMoving files (%g) from %g to %g\n", [rope[tcInfo.otherArg]], [rope[tcInfo.fromTranslation]], [rope[tcInfo.toTranslation]] ]
ELSE out.PutF["\n\tMoving files from %g to %g\n", [rope[tcInfo.fromTranslation]], [rope[tcInfo.toTranslation]] ];
Phase1, build up data base. Don't move any files.
out.PutF1["\n***** Building file table at %g\n", [time[BasicTime.Now[]]] ];
IF doFlush THEN out.Flush[];
SELECT tcInfo.tcType
FROM
oneDF => {
Trickling a df file together with the files it controls
out.PutF["oneDF\n"];
IF localDF
THEN VisitClosure[tcInfo,
PFS.PathFromRope[tcInfo.otherArg.Concat[bangH]],
PFS.nullUniqueID, VisitEntry]
ELSE VisitClosure[tcInfo, PFS.PathFromRope[tcInfo.prefix.Cat[tcInfo.otherArg, bangH]], PFS.nullUniqueID, VisitEntry];
};
allDFs => {
Trickling df files in a directory together with the files controlled by those df files.
EachDfFile:
PFS.InfoProc = {
[fullFName: attachedTo: PATH, uniqueID: UniqueID, bytes: INT, mutability: Mutability, fileType: FileType] RETURNS [continue: BOOL]
dstUID: PFS.UniqueID ¬ PFS.nullUniqueID;
doTheEnumerate: BOOL ¬ FALSE;
isa: BOOL;
suffix: PATH;
Process.CheckForAbort[];
IF outputForDebugging THEN
out.PutF1["~~%g\n", [rope[PFS.RopeFromPath[fullFName]]] ];
[isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.fromTranslationPath, fullFName];
IF
NOT isa
THEN {
out.PutF["\t*** %g is not a prefix of %g => ignoring\n",
[rope[tcInfo.fromTranslation]], [rope[PFS.RopeFromPath[fullFName]]] ];
IF doFlush THEN out.Flush[];
RETURN;
};
[uniqueID: dstUID] ¬ PFS.FileInfo[PFSPrefixMap.Translate[fullFName, tcInfo.toPTable]
! PFS.Error => IF error.group=user THEN
{ doTheEnumerate ¬ TRUE; CONTINUE }];
IF NOT EqualUIDs[dstUID, uniqueID] THEN doTheEnumerate ¬ TRUE;
IF doTheEnumerate THEN VisitClosure[tcInfo: tcInfo, dfName: fullFName, uid: PFS.nullUniqueID, visitor: VisitEntry];
};
who: ROPE ~ tcInfo.prefix.Concat["/Top/*.df!H"];
IF outputForDebugging THEN out.PutF1["$$doing %g\n", [rope[who]] ];
make sure we get who from the correct place
PFS.EnumerateForInfo[PFSPrefixMap.Translate[PFS.PathFromRope[who], tcInfo.fromPTable], EachDfFile];
};
fullDirectory => {
-- Trickling a whole directory.
bangH: PATH ~ PFS.PathFromRope["*!h"];
depth: INT ¬ 1;
EachFile:
PFS.InfoProc = {
[fullFName: attachedTo: PATH, uniqueID: UniqueID, bytes: INT, mutability: Mutability, fileType: FileType] RETURNS [continue: BOOL]
isa: BOOL;
suffix: PATH;
ropeName: ROPE;
Process.CheckForAbort[];
IF fileType=
PFS.tDirectory
THEN {
newPath: PATH;
short: ROPE ¬ PFSNames.ComponentRope[PFSNames.ShortName[fullFName]];
IF short.Equal["."] OR short.Equal[".."] THEN RETURN;
newPath ¬ PFSNames.Cat[fullFName, bangH];
IF ( depth ← depth + 1 ) > 10 THEN {
out.PutF["depth: %g, path: %g\n", [integer[depth]], [rope[PFS.RopeFromPath[newPath]]] ];
RETURN;
};
PFS.EnumerateForInfo[newPath, EachFile];
RETURN;
};
[isa, suffix] ¬ PFSNames.IsAPrefix[tcInfo.prefixPath, fullFName];
IF isa
THEN {
new: FileEntry ¬ NIL;
ropeName ¬ PFS.RopeFromPath[fullFName];
WITH RedBlackTree.Lookup[table, ropeName]
SELECT
FROM
entry: FileEntry => {
IF EqualUIDs[entry.uid, uniqueID] THEN RETURN;
[] ¬ RedBlackTree.Delete[table, ropeName];
};
ENDCASE;
IF Rope.Match["!*", PFS.RopeFromPath[suffix]] THEN RETURN;
This is likely to be the controlling file entry for an IFS
(or it could just be a bogus file to be ignored)
new ¬ NEW[FileEntryRep ¬ [
fullSrcName: ropeName,
fullSrcNamePath: PFSPrefixMap.Translate[fullFName, tcInfo.fromPTable],
uid: uniqueID,
len: bytes,
state: init]];
RedBlackTree.Insert[table, new, ropeName];
filesSeenDuringEnumeration ¬ CheckCount[filesSeenDuringEnumeration, out, doFlush];
bytesSeenDuringEnumeration ¬ bytesSeenDuringEnumeration + bytes;
};
};
out.PutF["fulldirectory\n"];
PFS.EnumerateForInfo[PFS.PathFromRope[tcInfo.fromTranslation.Concat["*!h"]], EachFile];
};
ENDCASE => {
out.PutRope["***unknown case - exiting\n"];
RETURN;
};
out.PutF["\nEnumerated new files: %g, bytes: %g\n",
[integer[filesSeenDuringEnumeration]], [integer[bytesSeenDuringEnumeration]] ];
Phase2, move files. Don't change the entries (except for the 'moved' field).
out.PutF1["\n***** Moving files at %g\n", [time[BasicTime.Now[]]] ];
IF doFlush THEN out.Flush[];
RedBlackTree.EnumerateIncreasing[table, EachEntry];
Phase2 1/2: move df files last.
IF errorsEncountered
AND
NOT moveDfsEvenIfErrors
THEN
out.PutRope["There were errors so no df's will be moved\n"]
ELSE {
IF dfList #
NIL
THEN {
IF errorsEncountered AND moveDfsEvenIfErrors THEN
out.PutRope["\n***There were errors; df's will be moved anyway\n"];
out.PutRope["\n Starting to move df's\n"];
FOR entryList:
LIST
OF FileEntry ¬ dfList, entryList.rest
WHILE entryList #
NIL
DO
MoveFile[entryList.first];
ENDLOOP;
};
};
out.PutF1["\n{Done at %g}\n", [time[BasicTime.Now[]]] ];
out.PutF["\tFiles moved: %g, alreadyRemote: %g\n\n",
[integer[filesMoved]], [integer[filesAlreadyThere]] ];
IF doFlush THEN out.Flush[];
};
Common:
PROC[tcType: TCType, cmd: Commander.Handle, useRetrieve:
BOOL ¬
FALSE]
RETURNS[result:
REF, msg:
ROPE] = {
[cmd: Handle] RETURNS [result: REF ← NIL, msg: ROPE ← NIL]
CommandObject = [in, out, err: STREAM, commandLine, command: ROPE, ...]
ProcessSwitches:
PROC [arg:
ROPE]
RETURNS[Switches]= {
sense: BOOL ¬ TRUE;
switches: Switches ¬ ALL[FALSE];
FOR index:
INT
IN [0..Rope.Length[arg])
DO
char: CHAR ¬ Rope.Fetch[arg, index];
SELECT char
FROM
'- => LOOP;
'~ => {sense ¬ NOT sense; LOOP};
'a, 'A => { outFileName ¬ CommanderOps.NextArgument[cmd]; switches[char] ¬ sense };
'o, 'O => { outFileName ¬ CommanderOps.NextArgument[cmd]; switches[char] ¬ sense };
IN ['a..'z] => switches[char] ¬ sense;
IN ['A..'Z] => switches[char + ('a-'A)] ¬ sense;
ENDCASE;
sense ¬ TRUE;
ENDLOOP;
RETURN[switches];
};
out: STREAM ¬ cmd.out;
outFileName: ROPE;
oldPriority: Process.Priority ¬ Process.GetPriority[];
table: RedBlackTree.Table ¬ RedBlackTree.Create[getKey: GetKey, compare: Compare];
this: TCInfo ¬ NEW[TCInfoRec];
this.tcType ¬ tcType;
DO
arg:
ROPE ¬ CommanderOps.NextArgument[cmd ! CommanderOps.Failed => { msg ¬ errorMsg;
GO
TO failed } ];
When parsing the command line, be prepared for failure. The error is reported to the user
ch: CHAR;
Process.CheckForAbort[];
IF arg = NIL THEN EXIT;
ch ¬ Rope.Fetch[arg, 0];
SELECT
TRUE
FROM
( ch = '- )
AND ( arg.Length[] = 2 ) =>
-- switch
this.switches ¬ ProcessSwitches[arg];
( ch = '{ ) => LOOP; -- ignore
( ch = '} ) => LOOP; -- ignore
( ch = '$ ) => LOOP; -- ignore
ENDCASE => {
-- translations or other things
IF this.prefix =
NIL
THEN {
this.prefix ¬ arg;
this.prefixPath ¬ PFS.PathFromRope[this.prefix];
LOOP;
};
IF this.fromTranslation =
NIL
THEN {
this.fromTranslation ¬ arg;
this.fromTranslationPath ¬ PFS.PathFromRope[arg];
LOOP;
};
IF this.toTranslation =
NIL
THEN {
this.toTranslation ¬ arg;
LOOP;
};
SELECT
TRUE
FROM
( this.otherArg #
NIL) => {
msg ¬ IO.PutFR1["Extra argument (%g)", [rope[arg]] ];
GO TO failed;
};
ENDCASE => this.otherArg ¬ arg;
};
ENDLOOP;
IF ( this.prefix = NIL ) OR ( this.fromTranslation = NIL ) OR ( this.toTranslation = NIL ) THEN
{ msg ¬ "Not enough arguments given.\n"; GOTO failed };
IF useRetrieve THEN this.switches['r] ¬ TRUE;
IF outFileName #
NIL
THEN {
outPath: PFS.PATH ~ PFS.PathFromRope[outFileName];
outStream: STREAM;
appending: BOOL ~ this.switches['a];
outStream ¬ PFS.StreamOpen[outPath, IF appending THEN $append ELSE $create ! PFS.Error => {
out.PutRope[error.explanation]; CONTINUE} ];
IF outStream #
NIL
THEN {
out.PutF["Messages will be %g %g\n",
[rope[IF appending THEN "appended to" ELSE "written on"]],
[rope[outFileName]] ];
out ¬ outStream;
};
};
{
Action:
PROC = {
this.fromPTable ¬ PFSPrefixMap.InsertIntoNewPTable[
PFS.PathFromRope[this.prefix],
PFS.PathFromRope[this.fromTranslation] ];
this.toPTable ¬ PFSPrefixMap.InsertIntoNewPTable[
PFS.PathFromRope[this.prefix],
PFS.PathFromRope[this.toTranslation] ];
this.toPTable ¬ PFSPrefixMap.Insert[
PFS.PathFromRope[this.fromTranslation],
PFS.PathFromRope[this.toTranslation],
this.toPTable ];
IF this.tcType = individualDFs
THEN IndividualDFs[this, table, out]
ELSE DoIt[this, table, out
!
PFS.Error => {
out.PutF["PFS.Error[%g], stopping this round.\n\t\t(at %g)\n\n", [rope[error.explanation]], [time[BasicTime.Now[]]] ];
IF outFileName # NIL THEN out.Flush[];
CONTINUE;
};
];
IF outFileName # NIL THEN out.Close[];
};
CedarProcess.DoWithPriority[background, Action];
};
EXITS
failed => {result ¬ $Failure};
};
IndividualDFs:
PROC[tcInfo: TCInfo, table: RedBlackTree.Table, out:
STREAM] = {
doFlush: BOOL ¬ tcInfo.switches['a] OR tcInfo.switches['o];
dfList: LIST OF ROPE ¬ NIL;
filter: DFUtilities.Filter ¬ [FALSE, all, all, defining];
VisitClosure1:
PROC [tcInfo: TCInfo, dfName:
PATH, uid:
PFS.UniqueID,
visitor:
PROC [name:
PATH, uid:
PFS.UniqueID],
usingList:
REF DFUtilities.UsingList ¬
NIL] = {
ENABLE
PFS.Error =>
IF ( error.code = $unknownServer )
OR
( error.code = $unknownFile )
THEN {
out.PutF["\n****PFS.Error[%g], in dfFile: %g\n", [rope[error.explanation]], [rope[PFS.RopeFromPath[dfName]]] ];
IF doFlush THEN out.Flush[];
};
EachItem1: DFUtilities.ProcessItemProc = {
WITH item
SELECT
FROM
dir: REF DFUtilities.DirectoryItem => prefix ¬ PFS.PathFromRope[dir.path1];
file:
REF DFUtilities.FileItem => {
out.PutF1["\n*** fileName found in IndividualDF driver file (%g)", [rope[file.name]]];
out.PutRope["\n\tit will be ignored\n"];
};
incl:
REF DFUtilities.IncludeItem =>
dfList ¬ CONS[incl.path1, dfList];
ENDCASE => { i: INT ¬ 0; }; -- handy for setting breakpoints - (bj)
};
prefix: PATH ¬ NIL;
in: STREAM;
fileNamePath: PATH ¬ PFSPrefixMap.Translate[dfName, tcInfo.fromPTable];
in ¬
PFS.StreamOpen[
fileName: fileNamePath,
wantedUniqueID: uid
!
PFS.Error =>
IF error.code = $unknownFile
THEN {
out.PutF["PFS.Error[%g] - from dfFile %g\n", [rope[error.explanation]],
[rope[PFS.RopeFromPath[dfName]]] ];
IF doFlush THEN out.Flush[];
in ¬ NIL;
CONTINUE;
}
ELSE REJECT
];
IF in#
NIL
THEN {
filter.list ¬ usingList;
DFUtilities.ParseFromStream[in, EachItem1, filter
-- global variable, hack for now! - (bj)
! UNWIND => in.Close[]];
in.Close[]
};
};
IF tcInfo.switches['l]
THEN VisitClosure1[tcInfo,
PFS.PathFromRope[tcInfo.otherArg.Concat[bangH]],
PFS.nullUniqueID,
NIL]
ELSE VisitClosure1[tcInfo, PFS.PathFromRope[tcInfo.prefix.Cat[tcInfo.otherArg, bangH]], PFS.nullUniqueID, NIL];
tcInfo.tcType ¬ oneDF; -- doing one df
tcInfo.switches['l] ¬ TRUE; -- make it look like a local df (this is a full name
FOR rL:
LIST
OF
ROPE ¬ dfList, rL.rest
UNTIL rL =
NIL
DO
tcInfo.otherArg ¬ rL.first;
DoIt[tcInfo, table, out
!
PFS.Error => {
out.PutF["PFS.Error[%g]; problems with %g.\n\t\t(at %g)\n\n", [rope[error.explanation]], [rope[rL.first]], [time[BasicTime.Now[]]] ];
IF doFlush THEN out.Flush[];
CONTINUE;
};
];
ENDLOOP;
};