UnReleaseToolDriver.mesa
Copyright © 1985 by Xerox Corporation. All rights reserved.
Russ Atkinson, January 28, 1985 4:29:38 pm PST
DIRECTORY
BasicTime,
Commander,
DefaultRemoteNames,
DFOperations,
DFUtilities,
FS,
IO,
MessageWindow,
ProcessExtras,
ReleaseToolVerify,
Rope,
SymTab,
ViewerIO;
UnReleaseToolDriver:
CEDAR
PROGRAM
IMPORTS BasicTime, Commander, DefaultRemoteNames, DFUtilities, FS, IO, MessageWindow, ProcessExtras, ReleaseToolVerify, Rope, SymTab, ViewerIO
= BEGIN
CommentItem: TYPE = DFUtilities.CommentItem;
Date: TYPE = DFUtilities.Date;
DirectoryItem: TYPE = DFUtilities.DirectoryItem;
FileItem: TYPE = DFUtilities.FileItem;
Filter: TYPE = DFUtilities.Filter;
ImportsItem: TYPE = DFUtilities.ImportsItem;
IncludeItem: TYPE = DFUtilities.IncludeItem;
LORA: TYPE = LIST OF REF ANY;
ROPE: TYPE = Rope.ROPE;
STREAM: TYPE = IO.STREAM;
WhiteSpaceItem: TYPE = DFUtilities.WhiteSpaceItem;
copyAll:
BOOL ←
FALSE;
If TRUE, this switch forces all files to be copied, even though it would save space to just point backwards from the DF file.
UnReleaseToolCommand: Commander.CommandProc = {
debugging, moving: BOOL ← FALSE;
inV,outV: STREAM ← NIL;
inStream: STREAM ← NIL;
dfTable: SymTab.Ref ← NIL;
dfFile: ROPE ← "CurrentCedar.df";
mapPrefix: ROPE ← "Cedar";
releaseDirectory: ROPE ← NIL;
releaseHost: ROPE ← NIL;
unDir: ROPE ← "[Indigo]<Cedar5.2>";
ris: STREAM = IO.RIS[cmd.commandLine];
sourceMapName,symbolsMapName: ROPE ← NIL;
bcdCache: ReleaseToolVerify.BcdCache ← NIL;
Cleanup:
PROC = {
IF bcdCache # NIL THEN {ReleaseToolVerify.FlushBcdCache[bcdCache]; bcdCache ← NIL};
IF inStream # NIL THEN {IO.Close[inStream]; inStream ← NIL};
IF inV # NIL THEN {IO.Close[inV]; inV ← NIL};
IF outV # NIL THEN {IO.Close[outV]; outV ← NIL};
};
TimedMessage:
PROC [msg:
ROPE] = {
IO.PutF[outV, msg, [time[BasicTime.Now[]]]];
};
WITH cmd.procData.clientData
SELECT
FROM
list:
LIST
OF
REF
ANY => {
WHILE list #
NIL
DO
SELECT list.first
FROM
$debug => debugging ← TRUE;
$move => moving ← TRUE;
ENDCASE;
list ← list.rest;
ENDLOOP;
};
ENDCASE;
The command line has the following items (all optional):
dfFile mapPrefix releaseHost releaseDirectory
unDir ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
dfFile ← DefaultExtension[
IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token, ".df"];
mapPrefix ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
releaseHost ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
releaseDirectory ← IO.GetTokenRope[ris, IO.IDProc ! IO.EndOfStream => CONTINUE].token;
inStream ← FS.StreamOpen[dfFile];
[inV,outV] ← ViewerIO.CreateViewerStreams[
"UnReleaseTool.log", NIL, "UnReleaseTool.log", FALSE];
{
ENABLE
UNWIND => {
TimedMessage["\n\n**** Aborting at %g ****\n"];
Cleanup[];
};
IF moving
THEN {
list: DFTableEntryList ← NIL;
TimedMessage["\n\nFile Moving Phase of UnReleaseTool starting at %g\n"];
[dfTable, list] ← BuildDFTable[dfFile, outV, unDir];
TimedMessage["\nDF table built at %g\n"];
MoveFiles[dfTable, list, outV, unDir, debugging];
TimedMessage["\n\nFile Moving Phase of UnReleaseTool ending at %g\n"];
};
};
Cleanup[];
};
showInfo: BOOL ← FALSE;
Interact: DFOperations.InteractionProc = {
[interaction: REF, clientData: REF]
RETURNS [abort: BOOL ← FALSE, abortMessageForLog: ROPE ← NIL, response: REF ← NIL]
out: STREAM = NARROW[clientData];
IF showInfo
THEN
WITH interaction
SELECT
FROM
info:
REF DFOperations.InfoInteraction => {
IO.PutRope[out, info.message];
};
dfInfo:
REF DFOperations.DFInfoInteraction => {
IO.PutRope[out, dfInfo.message];
};
abort:
REF DFOperations.AbortInteraction => {
ProcessExtras.CheckForAbort[];
};
ENDCASE;
};
DefaultExtension:
PROC [name:
ROPE, ext:
ROPE]
RETURNS [
ROPE] = {
len: INT = Rope.Length[name];
eLen: INT = Rope.Length[ext];
pos, bang, dot: INT ← len;
WHILE pos > 0
DO
posM: INT = pos-1;
SELECT Rope.Fetch[name, posM]
FROM
'! => bang ← dot ← posM;
'. => dot ← posM;
'>, '/, '] => EXIT;
ENDCASE;
pos ← posM;
ENDLOOP;
IF bang = len
AND (bang-dot # eLen OR Rope.Run[name, dot, ext, 0, FALSE] # eLen)
THEN name ← Rope.Concat[name, ext];
RETURN [name];
};
DFTableEntry: TYPE = REF DFTableEntryRep;
DFTableEntryList: TYPE = LIST OF DFTableEntry;
DFTableEntryRep:
TYPE =
RECORD [
sourceName: ROPE,
tempName: ROPE,
destName: ROPE,
time: BasicTime.GMT
];
BuildDFTable:
PROC
[topDF: ROPE, log: STREAM, unDir: ROPE ← NIL]
RETURNS [tab: SymTab.Ref ← NIL, list: DFTableEntryList ← NIL] = {
Build a table of source DF names and entries. Each entry gets a distinct desired create time, which is bounded by the start time of the procedure and the start time plus N seconds, where N is the number of DF files.
now: BasicTime.GMT = BasicTime.Now[];
index: INT ← 0;
tail: DFTableEntryList ← NIL;
EachDF:
PROC [dfName:
ROPE, dfDate: Date ← []] = {
EachDFPass[dfName, dfDate];
};
EachDFPass:
PROC [dfName:
ROPE, dfDate: Date ← []] = {
inStream: STREAM ← NIL;
currentDir: REF DirectoryItem ← NIL;
EachItem: DFUtilities.ProcessItemProc = {
stop ← FALSE;
WITH item
SELECT
FROM
dirItem:
REF DirectoryItem => {
currentDir ← dirItem;
};
inclItem:
REF IncludeItem => {
EachDF[inclItem.path1, inclItem.date];
};
ENDCASE;
};
Cleanup:
PROC = {
IF inStream # NIL THEN {IO.Close[inStream]; inStream ← NIL};
};
First check to see if we have seen this dfName before.
entry: DFTableEntry ← NIL;
stripped: ROPE = RemoveVersion[dfName];
realName: ROPE ← NIL;
key: ROPE ← NIL;
[realName, dfDate] ← MakeExplicit[dfName, dfDate];
IF realName =
NIL
THEN {
IO.PutF[log, "\n**** DF file not found: %g", [rope[dfName]]];
GO TO skipIt
};
inStream ← FS.StreamOpen[dfName ← realName];
key ← RemoveVersion[dfName];
WITH SymTab.Fetch[tab, key].val
SELECT
FROM
e: DFTableEntry => {
This file has a previous entry (put there on the previous pass)
entry ← e;
};
ENDCASE => {
Only need to do this stuff if this is the first entry for this file
destName: ROPE ← dfName;
tempName: ROPE ← Rope.Concat["Temp.", ShortName[dfName]];
IF unDir #
NIL
THEN {
destName ← Rope.Concat[unDir, RemoveRoot[destName]];
};
entry ←
NEW[DFTableEntryRep ← [
sourceName: key,
tempName: tempName,
destName: destName,
time: BasicTime.Update[now, index]
]];
IO.PutF[log, "\nDF table entry for: %g", [rope[dfName]]];
IO.PutF[log, "\n (temp: %g, dest: %g)", [rope[tempName]], [rope[destName]]];
index ← index + 1;
IF tail =
NIL
THEN list ← tail ← LIST[entry]
ELSE {tail.rest ← LIST[entry]; tail ← tail.rest};
[] ← SymTab.Store[tab, key, entry];
};
{
ENABLE
UNWIND => Cleanup[];
filter: Filter ← [filterA: source, filterB: public, filterC: defining];
This is the most restrictive filter we can use to just get the Includes clauses.
DFUtilities.ParseFromStream[
inStream, EachItem, filter
! DFUtilities.SyntaxError => {
IO.PutF[log, "\n**** Syntax Error: %g", [rope[reason]]];
CONTINUE;
}];
};
Cleanup[];
EXITS skipIt => {};
};
tab ← SymTab.Create[151, FALSE];
EachDF[topDF];
};
MakeExplicit:
PROC
[name: ROPE, date: Date] RETURNS [realName: ROPE ← NIL, realDate: Date] = {
realDate.format ← explicit;
[fullFName: realName, created: realDate.gmt] ←
FS.FileInfo[name, date.gmt
! FS.Error => IF error.group # bug THEN CONTINUE];
IF realName =
NIL
THEN
[fullFName: realName, created: realDate.gmt] ←
FS.FileInfo[RemoveVersion[name], date.gmt
! FS.Error => IF error.group # bug THEN CONTINUE];
};
RemoveVersion:
PROC [name:
ROPE]
RETURNS [
ROPE] = {
pos: INT ← Rope.Length[name];
WHILE (pos ← pos - 1) > 0
DO
SELECT Rope.Fetch[name, pos]
FROM
'! => RETURN [Rope.Flatten[name, 0, pos]];
'>, '], '. => EXIT;
ENDCASE;
ENDLOOP;
RETURN [name];
};
ShortName:
PROC [name:
ROPE]
RETURNS [
ROPE] = {
pos: INT ← Rope.Length[name];
bang: INT ← Rope.Length[name];
WHILE (pos ← pos - 1) > 0
DO
SELECT Rope.Fetch[name, pos]
FROM
'! => bang ← pos;
'>, '] => RETURN [Rope.Flatten[name, pos+1, bang-pos-1]];
ENDCASE;
ENDLOOP;
RETURN [Rope.Flatten[name, 0, bang]];
};
RemoveRoot:
PROC [name:
ROPE]
RETURNS [
ROPE] = {
pos: INT ← Rope.SkipTo[name ← RemoveVersion[name], 0, ">"];
IF pos = Rope.Length[name] THEN RETURN [name];
RETURN [Rope.Flatten[name, pos+1]];
};
GetPrefix:
PROC [fileName:
ROPE]
RETURNS [
ROPE] = {
pos: INT ← Rope.Length[fileName];
WHILE pos > 0
DO
pos ← pos - 1;
SELECT Rope.Fetch[fileName, pos]
FROM
'], '> => EXIT;
ENDCASE;
ENDLOOP;
IF pos = 0 THEN RETURN [NIL];
RETURN [Rope.Flatten[fileName, 0, pos+1]];
};
FindDFName:
PROC
[table: SymTab.Ref, name: ROPE, date: Date]
RETURNS [destName: ROPE, destDate: Date, tempName: ROPE ← NIL] = {
destName ← RemoveVersion[name];
destDate ← date;
WITH SymTab.Fetch[table, destName].val
SELECT
FROM
entry: DFTableEntry => {
destName ← entry.destName;
destDate.format ← explicit;
destDate.gmt ← entry.time;
tempName ← entry.tempName;
};
ENDCASE;
};
FindNonDF:
PROC
[table: SymTab.Ref, name: ROPE, date: Date]
RETURNS [realName: ROPE ← NIL, realDate: Date ← []] = {
stripped: ROPE ← RemoveVersion[name];
WITH SymTab.Fetch[table, stripped].val
SELECT
FROM
entry: DFTableEntry => {
In this case, the file is a DF file in our table, so we return NIL to indicate that it should NOT be moved!
};
ENDCASE => {
[realName, realDate] ← MakeExplicit[name, date];
};
};
MoveFiles:
PROC
[table: SymTab.Ref, list: DFTableEntryList, log: STREAM, unDir: ROPE, debugging: BOOL] = {
totalCount: INT ← 0;
FOR each: DFTableEntryList ← list, each.rest
WHILE each #
NIL
DO
totalCount ← MoveDFContents[each.first.sourceName, table, log, unDir, totalCount, debugging];
ENDLOOP;
FOR each: DFTableEntryList ← list, each.rest
WHILE each #
NIL
DO
totalCount ← MoveDFTemp[each.first, log, totalCount, debugging];
ENDLOOP;
};
MoveDFContents:
PROC
[dfName: ROPE, table: SymTab.Ref, log: STREAM, unDir: ROPE, count: INT, debugging: BOOL]
RETURNS [INT ← 0] = {
entry: DFTableEntry ← NIL;
inStream,outStream: STREAM ← NIL;
currentDirItem: REF DirectoryItem;
forcedDirItem: REF DirectoryItem;
currentSourcePrefix: ROPE ← NIL;
listHead,listTail: LORA ← NIL;
lastDirItem: LORA ← NIL;
systemPrefix: ROPE = DefaultRemoteNames.Get[].current;
systemPrefixLen: INT = Rope.Length[systemPrefix];
movingThisDir: BOOL ← FALSE;
AddToList:
PROC [item:
REF] = {
This procedure adds a new item to the list we keep. We splice out directory entries that have no files associated with them. This enables us to change our minds about where files will be stored as we encounter the files.
new: LORA ← LIST[item];
IF listTail = NIL THEN listHead ← new ELSE listTail.rest ← new;
listTail ← new;
WITH item
SELECT
FROM
dirItem:
REF DirectoryItem => {
IF lastDirItem #
NIL
THEN {
Splice out directory items that have no files.
lastDirItem.first ← item;
lastDirItem.rest ← NIL;
listTail ← lastDirItem;
}
ELSE lastDirItem ← new;
};
file:
REF FileItem => {
lastDirItem ← NIL
};
imports:
REF ImportsItem => {
lastDirItem ← NIL
};
include:
REF IncludeItem => {
lastDirItem ← NIL
};
ENDCASE;
IF lastDirItem = NIL THEN FlushList[];
};
FlushList:
PROC = {
Flush all items in the list out to the stream
FOR each:
LORA ← listHead, each.rest
WHILE each #
NIL
DO
IF each # NIL THEN DFUtilities.WriteItemToStream[outStream, each.first];
ENDLOOP;
Take apart the list piece by piece to avoid bogus retention!
listTail ← NIL;
WHILE listHead #
NIL
DO
lag: LORA ← listHead;
listHead ← lag.rest;
lag.rest ← NIL;
ENDLOOP;
};
EachItem: DFUtilities.ProcessItemProc = {
WITH item
SELECT
FROM
dirItem:
REF DirectoryItem => {
currentSourcePrefix ← dirItem.path1;
dirItem.path2 ← NIL;
dirItem.path2IsCameFrom ← FALSE;
IF unDir #
NIL
THEN {
dirItem.path2 ← Rope.Concat[unDir, RemoveRoot[currentSourcePrefix]];
};
currentDirItem ← dirItem;
IF copyAll
THEN movingThisDir ← TRUE
ELSE movingThisDir ← Rope.Run[currentSourcePrefix, 0, systemPrefix, 0, FALSE] # systemPrefixLen;
IF movingThisDir
THEN {
The contents of this directory must be moved to the new release directory, since the source is not from the current release directory.
dirItem.path1 ← dirItem.path2;
};
};
file:
REF FileItem => {
date: Date ← file.date;
sourceName: ROPE ← Rope.Concat[currentSourcePrefix, file.name];
noVersion: ROPE ← file.name ← RemoveVersion[file.name];
destExists: BOOL ← FALSE;
realName,tempName: ROPE ← NIL;
[realName, date] ← FindNonDF[table, sourceName, file.date];
IF forcedDirItem #
NIL
THEN {
AddToList[currentDirItem];
forcedDirItem ← NIL;
};
IF realName =
NIL
THEN {
destName: ROPE ← NIL;
newPrefix: ROPE ← NIL;
The file requested is either a DF file (which does NOT get moved yet) or it does not exist, in which case we put a message to the log.
[destName, date, tempName] ← FindDFName[table, sourceName, date];
IF
NOT Rope.Match["*.df", destName,
FALSE]
THEN {
IO.PutF[log, "\n**** File not found: %g", [rope[sourceName]]];
GO TO skip1
};
IO.PutF[log, "\nFuture copy %g (%g)\n to %g",
[rope[sourceName]], [rope[tempName]], [rope[destName]]];
file.date ← date;
newPrefix ← GetPrefix[destName];
IF newPrefix #
NIL
THEN {
!! At this point we need to put out a new directory entry if the destination of the DF file is NOT the same as where the current directory indicates.
forcedDirItem ←
NEW[DirectoryItem ← [
path1: newPrefix,
path2: newPrefix,
path2IsCameFrom: FALSE,
exported: TRUE,
readOnly: FALSE]];
AddToList[forcedDirItem];
};
GO TO skip1;
};
At this point, realName is the real long name of the sourceFile, and date is the real explicit date of the source file. We only move the file if it is not coming from the current release. This lets us gracefully get rid of very elderly directories.
file.date ← date;
IF movingThisDir
THEN {
newName: ROPE ← Rope.Concat[unDir, RemoveRoot[realName]];
FileCopy[realName, date, newName, log, debugging];
};
MessageWindow.Append[
IO.PutFR[" %g files checked.", [integer[count ← count + 1]]], TRUE];
EXITS skip1 => {};
};
imports:
REF ImportsItem => {
destName, tempName: ROPE;
destDate: Date;
[destName, destDate, tempName] ← FindDFName[table, imports.path1, imports.date];
IF tempName =
NIL
THEN {
This is an imported file NOT being released.
[destName, destDate] ← MakeExplicit[imports.path1, imports.date];
IF destName =
NIL
THEN IO.PutF[log, "\n**** File not found: %g", [rope[imports.path1]]]
ELSE {imports.path1 ← destName; imports.date ← destDate};
}
ELSE {
This is an imported file that is being unreleased.
imports.path2 ← NIL;
imports.path1 ← destName;
imports.date ← [format: notEqual];
};
};
include:
REF IncludeItem => {
destName, tempName: ROPE;
destDate: Date;
[destName, destDate, tempName] ← FindDFName[table, include.path1, include.date];
IF tempName =
NIL
THEN {
This is an included file NOT being released.
[destName, destDate] ← MakeExplicit[include.path1, include.date];
IF destName =
NIL
THEN IO.PutF[log, "\n**** File not found: %g", [rope[include.path1]]]
ELSE {include.path1 ← destName; include.date ← destDate};
}
ELSE {
This is an included file that is being unreleased.
include.path2IsCameFrom ← FALSE;
include.path2 ← destName;
include.path1 ← destName;
include.date ← [format: notEqual];
};
};
ENDCASE;
AddToList[item]
};
WITH SymTab.Fetch[table, dfName].val
SELECT
FROM
e: DFTableEntry => entry ← e;
ENDCASE => {
IO.PutF[log, "\n**** DF file not in table: %g", [rope[dfName]]];
GO TO quit
};
inStream ←
FS.StreamOpen[dfName
!
FS.Error =>
IF error.group # bug
THEN {
IO.PutF[log, "\n**** DF file not found: %g", [rope[error.explanation]]];
GO TO quit
};
];
outStream ←
FS.StreamOpen[entry.tempName, create
!
FS.Error =>
IF error.group # bug
THEN {
IO.PutF[log, "\n**** temp DF file not opened: %g", [rope[error.explanation]]];
IO.Close[inStream];
GO TO quit
};
];
IO.PutF[log, "\n\nMoving contents of %g (%g)", [rope[dfName]], [rope[entry.tempName]]];
DFUtilities.ParseFromStream[inStream, EachItem, [comments:
TRUE]
! DFUtilities.SyntaxError => {
IO.PutF[log, "\n**** Syntax Error: %g", [rope[reason]]];
CONTINUE;
}];
FlushList[];
IO.Close[inStream];
FS.SetByteCountAndCreatedTime[FS.OpenFileFromStream[outStream], -1, entry.time];
IO.Close[outStream];
RETURN [count];
EXITS quit => {};
};
MoveDFTemp:
PROC
[entry: DFTableEntry, log: STREAM, count: INT, debugging: BOOL] RETURNS [INT] = {
IO.PutF[log, "\nCopy %g (%g)\n to %g",
[rope[entry.sourceName]], [rope[entry.tempName]], [rope[entry.destName]]];
IF debugging
THEN
IO.PutRope[log, "\n (not copied, debugging)"]
ELSE
FS.Copy[entry.tempName, entry.destName
!
FS.Error =>
IF error.group # bug
THEN {
IO.PutF[log, "\n**** Copy failed: %g", [rope[error.explanation]]];
CONTINUE;
};
];
MessageWindow.Append[
IO.PutFR[" %g files moved.", [integer[count ← count + 1]]], TRUE];
RETURN [count];
};
FileCopy:
PROC
[sourceName: ROPE, date: Date, destName: ROPE, log: STREAM, debugging: BOOL] = {
tempName: ROPE ← "UnReleaseTool.Temp$";
gmt: BasicTime.GMT;
realDestName: ROPE ← NIL;
[fullFName: realDestName, created: gmt] ←
FS.FileInfo[destName, date.gmt
! FS.Error => IF error.group # bug THEN CONTINUE];
IO.PutF[log, "\nCopy %g\n to %g", [rope[sourceName]], [rope[destName]]];
IF realDestName #
NIL
THEN {
IO.PutRope[log, "\n (not copied, already exists)"];
RETURN;
};
IF date.gmt # BasicTime.nullGMT
AND ReleaseToolVerify.IsInFileCache[sourceName, date]
THEN {
No need to copy to a temp file, since we already have this file in our cache. Therefore, we just make the tempName the real source name.
tempName ← sourceName;
IO.PutRope[log, " (from cache) "];
}
ELSE {
It pays to copy this file to a temp file first, since that means we don't have to mess up our current file cache with entries that are better flushed. This would not have to happen if FS did the right thing with remote to remote copies!
IF
NOT debugging
THEN
FS.Copy[sourceName, tempName
!
FS.Error =>
IF error.group # bug
THEN {
IO.PutF[log, "\n**** Copy failed: %g", [rope[error.explanation]]];
GO TO bugOut;
};
];
};
IF debugging
THEN {
No copy, just reporting
IO.PutRope[log, "\n (not copied, debugging)"];
}
ELSE {
Really do the copy
FS.Copy[tempName, destName
!
FS.Error =>
IF error.group # bug
THEN {
IO.PutF[log, "\n**** Copy failed: %g", [rope[error.explanation]]];
GO TO bugOut;
}];
};
EXITS bugOut => {};
};
Commander.Register[
"TestUnRelease",
UnReleaseToolCommand,
"tests the file moving phase of the UnReleaseTool without moving files. The command line has, in order (all optional): <unDir> <dfFile> <mapPrefix> <releaseHost> <releaseDirectory>",
LIST[$debug, $move]];
Commander.Register[
"UnRelease",
UnReleaseToolCommand,
"moves the files in a release to the prerelease directory. We assume that the version maps are valid, and that the files have been verified (although no state is retained between verification and moving). The command line has, in order (all optional): <unDir> <dfFile> <mapPrefix> <releaseHost> <releaseDirectory>",
LIST[$move]];
END.