UnReleaseTool.mesa
Copyright © 1985, 1986 by Xerox Corporation. All rights reserved.
Russ Atkinson, March 12, 1985 3:30:26 pm PST
Doug Wyatt, May 13, 1986 3:20:44 pm PDT
DIRECTORY
BasicTime,
Commander,
DefaultRemoteNames,
DFOperations,
DFUtilities,
FS,
FSBackdoor,
IO,
MessageWindow,
Process,
Rope,
SymTab,
ViewerIO;
UnReleaseTool: CEDAR PROGRAM
IMPORTS BasicTime, Commander, DefaultRemoteNames, DFUtilities, FS, FSBackdoor, IO, MessageWindow, Process, Rope, SymTab, ViewerIO
= BEGIN
CommentItem: TYPE = DFUtilities.CommentItem;
Date: TYPE = DFUtilities.Date;
DirectoryItem: TYPE = DFUtilities.DirectoryItem;
FileItem: TYPE = DFUtilities.FileItem;
Filter: TYPE = DFUtilities.Filter;
ImportsItem: TYPE = DFUtilities.ImportsItem;
IncludeItem: TYPE = DFUtilities.IncludeItem;
LORA: TYPE = LIST OF REF ANY;
ROPE: TYPE = Rope.ROPE;
STREAM: TYPE = IO.STREAM;
WhiteSpaceItem: TYPE = DFUtilities.WhiteSpaceItem;
copyAll: BOOLFALSE;
If TRUE, this switch forces all files to be copied, even though it would save space to just point backwards from the DF file.
UnReleaseToolCommand: Commander.CommandProc = {
debugging, moving: BOOLFALSE;
inV,outV: STREAMNIL;
inStream: STREAMNIL;
dfTable: SymTab.Ref ← NIL;
dfFile: ROPE ← "CurrentCedar.df";
mapPrefix: ROPE ← "Cedar";
releaseDirectory: ROPENIL;
releaseHost: ROPENIL;
unDir: ROPE ← "[Indigo]<Cedar5.2>";
ris: STREAM = IO.RIS[cmd.commandLine];
sourceMapName,symbolsMapName: ROPENIL;
Cleanup: PROC = {
IF inStream # NIL THEN {IO.Close[inStream]; inStream ← NIL};
IF inV # NIL THEN {IO.Close[inV]; inV ← NIL};
IF outV # NIL THEN {IO.Close[outV]; outV ← NIL};
};
TimedMessage: PROC [msg: ROPE] = {
IO.PutF[outV, msg, [time[BasicTime.Now[]]]];
};
WITH cmd.procData.clientData SELECT FROM
list: LIST OF REF ANY => {
WHILE list # NIL DO
SELECT list.first FROM
$debug => debugging ← TRUE;
$move => moving ← TRUE;
ENDCASE;
list ← list.rest;
ENDLOOP;
};
ENDCASE;
The command line has the following items (all optional):
dfFile mapPrefix releaseHost releaseDirectory
unDir ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
dfFile ← DefaultExtension[
IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token, ".df"];
mapPrefix ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
releaseHost ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
releaseDirectory ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
inStream ← FS.StreamOpen[dfFile];
[inV,outV] ← ViewerIO.CreateViewerStreams[
"UnReleaseTool.log", NIL, "UnReleaseTool.log", FALSE];
{ENABLE
UNWIND => {
TimedMessage["\n\n**** Aborting at %g ****\n"];
Cleanup[];
};
IF moving THEN {
list: DFTableEntryList ← NIL;
TimedMessage["\n\nFile Moving Phase of UnReleaseTool starting at %g\n"];
[dfTable, list] ← BuildDFTable[dfFile, outV, unDir];
TimedMessage["\nDF table built at %g\n"];
MoveFiles[dfTable, list, outV, unDir, debugging];
TimedMessage["\n\nFile Moving Phase of UnReleaseTool ending at %g\n"];
};
};
Cleanup[];
};
showInfo: BOOLFALSE;
Interact: DFOperations.InteractionProc = {
[interaction: REF, clientData: REF]
RETURNS [abort: BOOLFALSE, abortMessageForLog: ROPENIL, response: REFNIL]
out: STREAM = NARROW[clientData];
IF showInfo THEN
WITH interaction SELECT FROM
info: REF DFOperations.InfoInteraction => {
IO.PutRope[out, info.message];
};
dfInfo: REF DFOperations.DFInfoInteraction => {
IO.PutRope[out, dfInfo.message];
};
abort: REF DFOperations.AbortInteraction => {
Process.CheckForAbort[];
};
ENDCASE;
};
DefaultExtension: PROC [name: ROPE, ext: ROPE] RETURNS [ROPE] = {
len: INT = Rope.Length[name];
eLen: INT = Rope.Length[ext];
pos, bang, dot: INT ← len;
WHILE pos > 0 DO
posM: INT = pos-1;
SELECT Rope.Fetch[name, posM] FROM
'! => bang ← dot ← posM;
'. => dot ← posM;
'>, '/, '] => EXIT;
ENDCASE;
pos ← posM;
ENDLOOP;
IF bang = len
AND (bang-dot # eLen OR Rope.Run[name, dot, ext, 0, FALSE] # eLen)
THEN name ← Rope.Concat[name, ext];
RETURN [name];
};
DFTableEntry: TYPE = REF DFTableEntryRep;
DFTableEntryList: TYPE = LIST OF DFTableEntry;
DFTableEntryRep: TYPE = RECORD [
sourceName: ROPE,
tempName: ROPE,
destName: ROPE,
time: BasicTime.GMT
];
BuildDFTable: PROC
[topDF: ROPE, log: STREAM, unDir: ROPENIL]
RETURNS [tab: SymTab.Ref ← NIL, list: DFTableEntryList ← NIL] = {
Build a table of source DF names and entries. Each entry gets a distinct desired create time, which is bounded by the start time of the procedure and the start time plus N seconds, where N is the number of DF files.
now: BasicTime.GMT = BasicTime.Now[];
index: INT ← 0;
tail: DFTableEntryList ← NIL;
EachDF: PROC [dfName: ROPE, dfDate: Date ← []] = {
EachDFPass[dfName, dfDate];
};
EachDFPass: PROC [dfName: ROPE, dfDate: Date ← []] = {
inStream: STREAMNIL;
currentDir: REF DirectoryItem ← NIL;
EachItem: DFUtilities.ProcessItemProc = {
stop ← FALSE;
WITH item SELECT FROM
dirItem: REF DirectoryItem => {
currentDir ← dirItem;
};
inclItem: REF IncludeItem => {
EachDF[inclItem.path1, inclItem.date];
};
ENDCASE;
};
Cleanup: PROC = {
IF inStream # NIL THEN {IO.Close[inStream]; inStream ← NIL};
};
First check to see if we have seen this dfName before.
entry: DFTableEntry ← NIL;
stripped: ROPE = RemoveVersion[dfName];
realName: ROPENIL;
key: ROPENIL;
[realName, dfDate] ← MakeExplicit[dfName, dfDate];
IF realName = NIL THEN {
IO.PutF[log, "\n**** DF file not found: %g", [rope[dfName]]];
GO TO skipIt
};
inStream ← FS.StreamOpen[dfName ← realName];
key ← RemoveVersion[dfName];
WITH SymTab.Fetch[tab, key].val SELECT FROM
e: DFTableEntry => {
This file has a previous entry (put there on the previous pass)
entry ← e;
};
ENDCASE => {
Only need to do this stuff if this is the first entry for this file
destName: ROPE ← dfName;
tempName: ROPE ← Rope.Concat["Temp.", ShortName[dfName]];
IF unDir # NIL THEN {
destName ← Rope.Concat[unDir, RemoveRoot[destName]];
};
entry ← NEW[DFTableEntryRep ← [
sourceName: key,
tempName: tempName,
destName: destName,
time: BasicTime.Update[now, index]
]];
IO.PutF[log, "\nDF table entry for: %g", [rope[dfName]]];
IO.PutF[log, "\n (temp: %g, dest: %g)", [rope[tempName]], [rope[destName]]];
index ← index + 1;
IF tail = NIL
THEN list ← tail ← LIST[entry]
ELSE {tail.rest ← LIST[entry]; tail ← tail.rest};
[] ← SymTab.Store[tab, key, entry];
};
{ENABLE UNWIND => Cleanup[];
filter: Filter ← [filterA: source, filterB: public, filterC: defining];
This is the most restrictive filter we can use to just get the Includes clauses.
DFUtilities.ParseFromStream[
inStream, EachItem, filter
! DFUtilities.SyntaxError => {
IO.PutF[log, "\n**** Syntax Error: %g", [rope[reason]]];
CONTINUE;
}];
};
Cleanup[];
EXITS skipIt => {};
};
tab ← SymTab.Create[151, FALSE];
EachDF[topDF];
};
MakeExplicit: PROC
[name: ROPE, date: Date] RETURNS [realName: ROPENIL, realDate: Date] = {
realDate.format ← explicit;
[fullFName: realName, created: realDate.gmt] ← FS.FileInfo[name, date.gmt
! FS.Error => IF error.group # bug THEN CONTINUE];
IF realName = NIL THEN
[fullFName: realName, created: realDate.gmt] ← FS.FileInfo[RemoveVersion[name], date.gmt
! FS.Error => IF error.group # bug THEN CONTINUE];
};
RemoveVersion: PROC [name: ROPE] RETURNS [ROPE] = {
pos: INT ← Rope.Length[name];
WHILE (pos ← pos - 1) > 0 DO
SELECT Rope.Fetch[name, pos] FROM
'! => RETURN [Rope.Flatten[name, 0, pos]];
'>, '], '. => EXIT;
ENDCASE;
ENDLOOP;
RETURN [name];
};
ShortName: PROC [name: ROPE] RETURNS [ROPE] = {
pos: INT ← Rope.Length[name];
bang: INT ← Rope.Length[name];
WHILE (pos ← pos - 1) > 0 DO
SELECT Rope.Fetch[name, pos] FROM
'! => bang ← pos;
'>, '] => RETURN [Rope.Flatten[name, pos+1, bang-pos-1]];
ENDCASE;
ENDLOOP;
RETURN [Rope.Flatten[name, 0, bang]];
};
RemoveRoot: PROC [name: ROPE] RETURNS [ROPE] = {
pos: INT ← Rope.SkipTo[name ← RemoveVersion[name], 0, ">"];
IF pos = Rope.Length[name] THEN RETURN [name];
RETURN [Rope.Flatten[name, pos+1]];
};
GetPrefix: PROC [fileName: ROPE] RETURNS [ROPE] = {
pos: INT ← Rope.Length[fileName];
WHILE pos > 0 DO
pos ← pos - 1;
SELECT Rope.Fetch[fileName, pos] FROM
'], '> => EXIT;
ENDCASE;
ENDLOOP;
IF pos = 0 THEN RETURN [NIL];
RETURN [Rope.Flatten[fileName, 0, pos+1]];
};
FindDFName: PROC
[table: SymTab.Ref, name: ROPE, date: Date]
RETURNS [destName: ROPE, destDate: Date, tempName: ROPENIL] = {
destName ← RemoveVersion[name];
destDate ← date;
WITH SymTab.Fetch[table, destName].val SELECT FROM
entry: DFTableEntry => {
destName ← entry.destName;
destDate.format ← explicit;
destDate.gmt ← entry.time;
tempName ← entry.tempName;
};
ENDCASE;
};
FindNonDF: PROC
[table: SymTab.Ref, name: ROPE, date: Date]
RETURNS [realName: ROPENIL, realDate: Date ← []] = {
stripped: ROPE ← RemoveVersion[name];
WITH SymTab.Fetch[table, stripped].val SELECT FROM
entry: DFTableEntry => {
In this case, the file is a DF file in our table, so we return NIL to indicate that it should NOT be moved!
};
ENDCASE => {
[realName, realDate] ← MakeExplicit[name, date];
};
};
MoveFiles: PROC
[table: SymTab.Ref, list: DFTableEntryList, log: STREAM, unDir: ROPE, debugging: BOOL] = {
totalCount: INT ← 0;
FOR each: DFTableEntryList ← list, each.rest WHILE each # NIL DO
totalCount ← MoveDFContents[each.first.sourceName, table, log, unDir, totalCount, debugging];
ENDLOOP;
FOR each: DFTableEntryList ← list, each.rest WHILE each # NIL DO
totalCount ← MoveDFTemp[each.first, log, totalCount, debugging];
ENDLOOP;
};
MoveDFContents: PROC
[dfName: ROPE, table: SymTab.Ref, log: STREAM, unDir: ROPE, count: INT, debugging: BOOL]
RETURNS [INT ← 0] = {
entry: DFTableEntry ← NIL;
inStream,outStream: STREAMNIL;
currentDirItem: REF DirectoryItem;
forcedDirItem: REF DirectoryItem;
currentSourcePrefix: ROPENIL;
listHead,listTail: LORANIL;
lastDirItem: LORANIL;
systemPrefix: ROPE = DefaultRemoteNames.Get[].current;
systemPrefixLen: INT = Rope.Length[systemPrefix];
movingThisDir: BOOLFALSE;
AddToList: PROC [item: REF] = {
This procedure adds a new item to the list we keep. We splice out directory entries that have no files associated with them. This enables us to change our minds about where files will be stored as we encounter the files.
new: LORALIST[item];
IF listTail = NIL THEN listHead ← new ELSE listTail.rest ← new;
listTail ← new;
WITH item SELECT FROM
dirItem: REF DirectoryItem => {
IF lastDirItem # NIL
THEN {
Splice out directory items that have no files.
lastDirItem.first ← item;
lastDirItem.rest ← NIL;
listTail ← lastDirItem;
}
ELSE lastDirItem ← new;
};
file: REF FileItem => {
lastDirItem ← NIL
};
imports: REF ImportsItem => {
lastDirItem ← NIL
};
include: REF IncludeItem => {
lastDirItem ← NIL
};
ENDCASE;
IF lastDirItem = NIL THEN FlushList[];
};
oneBlankLine: REF WhiteSpaceItem ~ NEW[WhiteSpaceItem ← [lines: 1]];
FlushList: PROC = {
Flush all items in the list out to the stream
FOR each: LORA ← listHead, each.rest WHILE each # NIL DO
IF each # NIL THEN {
item: REF ~ each.first;
DFUtilities.WriteItemToStream[outStream, item];
WITH item SELECT FROM
x: REF DirectoryItem => DFUtilities.WriteItemToStream[outStream, oneBlankLine];
ENDCASE;
};
ENDLOOP;
Take apart the list piece by piece to avoid bogus retention!
listTail ← NIL;
WHILE listHead # NIL DO
lag: LORA ← listHead;
listHead ← lag.rest;
lag.rest ← NIL;
ENDLOOP;
};
EachItem: DFUtilities.ProcessItemProc = {
WITH item SELECT FROM
dirItem: REF DirectoryItem => {
currentSourcePrefix ← dirItem.path1;
dirItem.path2 ← NIL;
dirItem.path2IsCameFrom ← FALSE;
IF unDir # NIL THEN {
dirItem.path2 ← Rope.Concat[unDir, RemoveRoot[currentSourcePrefix]];
};
currentDirItem ← dirItem;
IF copyAll
THEN movingThisDir ← TRUE
ELSE movingThisDir ← Rope.Run[currentSourcePrefix, 0, systemPrefix, 0, FALSE] # systemPrefixLen;
IF movingThisDir THEN {
The contents of this directory must be moved to the new release directory, since the source is not from the current release directory.
dirItem.path1 ← dirItem.path2;
};
dirItem.path2 ← NIL; -- abolish ReleaseAs clauses (DKW)
};
file: REF FileItem => {
date: Date ← file.date;
sourceName: ROPE ← Rope.Concat[currentSourcePrefix, file.name];
noVersion: ROPE ← file.name ← RemoveVersion[file.name];
destExists: BOOLFALSE;
realName,tempName: ROPENIL;
[realName, date] ← FindNonDF[table, sourceName, file.date];
IF forcedDirItem # NIL THEN {
AddToList[currentDirItem];
forcedDirItem ← NIL;
};
IF realName = NIL THEN {
destName: ROPENIL;
newPrefix: ROPENIL;
The file requested is either a DF file (which does NOT get moved yet) or it does not exist, in which case we put a message to the log.
[destName, date, tempName] ← FindDFName[table, sourceName, date];
IF NOT Rope.Match["*.df", destName, FALSE] THEN {
IO.PutF[log, "\n**** File not found: %g", [rope[sourceName]]];
GO TO skip1
};
IO.PutF[log, "\nFuture copy %g (%g)\n to %g",
[rope[sourceName]], [rope[tempName]], [rope[destName]]];
file.date ← date;
newPrefix ← GetPrefix[destName];
IF newPrefix # NIL THEN {
!! At this point we need to put out a new directory entry if the destination of the DF file is NOT the same as where the current directory indicates.
forcedDirItem ← NEW[DirectoryItem ← [
path1: newPrefix,
path2: NIL,
path2IsCameFrom: FALSE,
exported: TRUE,
readOnly: FALSE]];
AddToList[forcedDirItem];
};
GO TO skip1;
};
At this point, realName is the real long name of the sourceFile, and date is the real explicit date of the source file. We only move the file if it is not coming from the current release. This lets us gracefully get rid of very elderly directories.
file.date ← date;
IF movingThisDir THEN {
newName: ROPE ← Rope.Concat[unDir, RemoveRoot[realName]];
FileCopy[realName, date, newName, log, debugging];
};
MessageWindow.Append[
IO.PutFR[" %g files checked.", [integer[count ← count + 1]]], TRUE];
EXITS skip1 => {};
};
imports: REF ImportsItem => {
destName, tempName: ROPE;
destDate: Date;
[destName, destDate, tempName] ← FindDFName[table, imports.path1, imports.date];
IF tempName = NIL
THEN {
This is an imported file NOT being released.
[destName, destDate] ← MakeExplicit[imports.path1, imports.date];
IF destName = NIL
THEN IO.PutF[log, "\n**** File not found: %g", [rope[imports.path1]]]
ELSE {imports.path1 ← destName; imports.date ← destDate};
}
ELSE {
This is an imported file that is being unreleased.
imports.path2 ← NIL;
imports.path1 ← destName;
imports.date ← [format: notEqual];
};
};
include: REF IncludeItem => {
destName, tempName: ROPE;
destDate: Date;
[destName, destDate, tempName] ← FindDFName[table, include.path1, include.date];
IF tempName = NIL
THEN {
This is an included file NOT being released.
[destName, destDate] ← MakeExplicit[include.path1, include.date];
IF destName = NIL
THEN IO.PutF[log, "\n**** File not found: %g", [rope[include.path1]]]
ELSE {include.path1 ← destName; include.date ← destDate};
}
ELSE {
This is an included file that is being unreleased.
include.path2IsCameFrom ← FALSE;
include.path2 ← NIL;
include.path1 ← destName;
include.date ← [format: notEqual];
};
};
ENDCASE;
AddToList[item]
};
WITH SymTab.Fetch[table, dfName].val SELECT FROM
e: DFTableEntry => entry ← e;
ENDCASE => {
IO.PutF[log, "\n**** DF file not in table: %g", [rope[dfName]]];
GO TO quit
};
inStream ← FS.StreamOpen[dfName
! FS.Error =>
IF error.group # bug THEN {
IO.PutF[log, "\n**** DF file not found: %g", [rope[error.explanation]]];
GO TO quit
};
];
outStream ← FS.StreamOpen[entry.tempName, create
! FS.Error =>
IF error.group # bug THEN {
IO.PutF[log, "\n**** temp DF file not opened: %g", [rope[error.explanation]]];
IO.Close[inStream];
GO TO quit
};
];
IO.PutF[log, "\n\nMoving contents of %g (%g)", [rope[dfName]], [rope[entry.tempName]]];
DFUtilities.ParseFromStream[inStream, EachItem, [comments: TRUE]
! DFUtilities.SyntaxError => {
IO.PutF[log, "\n**** Syntax Error: %g", [rope[reason]]];
CONTINUE;
}];
FlushList[];
IO.Close[inStream];
FS.SetByteCountAndCreatedTime[FS.OpenFileFromStream[outStream], -1, entry.time];
IO.Close[outStream];
RETURN [count];
EXITS quit => {};
};
MoveDFTemp: PROC
[entry: DFTableEntry, log: STREAM, count: INT, debugging: BOOL] RETURNS [INT] = {
IO.PutF[log, "\nCopy %g (%g)\n to %g",
[rope[entry.sourceName]], [rope[entry.tempName]], [rope[entry.destName]]];
IF debugging
THEN
IO.PutRope[log, "\n (not copied, debugging)"]
ELSE
[] ← FS.Copy[entry.tempName, entry.destName
! FS.Error =>
IF error.group # bug THEN {
IO.PutF[log, "\n**** Copy failed: %g", [rope[error.explanation]]];
CONTINUE;
};
];
MessageWindow.Append[
IO.PutFR[" %g files moved.", [integer[count ← count + 1]]], TRUE];
RETURN [count];
};
IsInFileCache: PROC [name: ROPE, date: Date] RETURNS [inCache: BOOLFALSE] = {
cacheChecker: FSBackdoor.InfoProc = {
[fullGName: ROPE, created: BasicTime.GMT, bytes: INT, keep: CARDINAL]
RETURNS [continue: BOOL]
IF bytes > 0 THEN {
IF date.gmt # BasicTime.nullGMT THEN {
We will only accept a specific date
IF created # date.gmt THEN RETURN [TRUE];
};
At this point we will either accept anything, or we have a match on the time.
inCache ← TRUE;
RETURN [FALSE];
};
RETURN [TRUE];
};
FSBackdoor.EnumerateCacheForInfo[cacheChecker, NIL, name];
};
FileCopy: PROC
[sourceName: ROPE, date: Date, destName: ROPE, log: STREAM, debugging: BOOL] = {
tempName: ROPE ← "UnReleaseTool.Temp$";
gmt: BasicTime.GMT;
realDestName: ROPENIL;
[fullFName: realDestName, created: gmt] ← FS.FileInfo[destName, date.gmt
! FS.Error => IF error.group # bug THEN CONTINUE];
IO.PutF[log, "\nCopy %g\n to %g", [rope[sourceName]], [rope[destName]]];
IF realDestName # NIL THEN {
IO.PutRope[log, "\n (not copied, already exists)"];
RETURN;
};
IF date.gmt # BasicTime.nullGMT AND IsInFileCache[sourceName, date]
THEN {
No need to copy to a temp file, since we already have this file in our cache. Therefore, we just make the tempName the real source name.
tempName ← sourceName;
IO.PutRope[log, " (from cache) "];
}
ELSE {
It pays to copy this file to a temp file first, since that means we don't have to mess up our current file cache with entries that are better flushed. This would not have to happen if FS did the right thing with remote to remote copies!
IF NOT debugging THEN
tempName ← FS.Copy[sourceName, tempName
! FS.Error =>
IF error.group # bug THEN {
IO.PutF[log, "\n**** Copy failed: %g", [rope[error.explanation]]];
GO TO bugOut;
};
];
};
IF debugging
THEN {
No copy, just reporting
IO.PutRope[log, "\n (not copied, debugging)"];
}
ELSE {
Really do the copy
destName ← FS.Copy[tempName, destName
! FS.Error =>
IF error.group # bug THEN {
IO.PutF[log, "\n**** Copy failed: %g", [rope[error.explanation]]];
GO TO bugOut;
}];
};
EXITS bugOut => {};
};
Commander.Register[
"TestUnRelease",
UnReleaseToolCommand,
"tests the file moving phase of the UnReleaseTool without moving files. The command line has, in order (all optional): <unDir> <dfFile> <mapPrefix> <releaseHost> <releaseDirectory>",
LIST[$debug, $move]];
Commander.Register[
"UnRelease",
UnReleaseToolCommand,
"moves the files in a release to the prerelease directory. We assume that the version maps are valid, and that the files have been verified (although no state is retained between verification and moving). The command line has, in order (all optional): <unDir> <dfFile> <mapPrefix> <releaseHost> <releaseDirectory>",
LIST[$move]];
END.