DFUtilitiesParseFileImpl.mesa
Copyright Ó 1986, 1987, 1990, 1991 by Xerox Corporation. All rights reserved.
Created by Bertrand Serlet, August 28, 1986 4:31:08 pm PDT
Bertrand Serlet February 27, 1987 8:51:28 pm PST
Last tweaked by Mike Spreitzer on May 2, 1990 8:24 am PDT
Michael Plass, January 31, 1990 10:57:22 am PST
Willie-s, September 27, 1991 2:32 pm PDT
DIRECTORY BasicTime, DFUtilities, FS, IO, Rope, RopeHash;
DFUtilitiesParseFileImpl: CEDAR MONITOR
IMPORTS DFUtilities, FS, IO, Rope, RopeHash
EXPORTS DFUtilities
~ BEGIN OPEN DFUtilities, Rope;
CacheArray: TYPE = RECORD [
nDFs, nItems, nProbes, nHits, nMisses: CARD,
elts: SEQUENCE length: NATURAL OF ParsedDF];
cache: REF CacheArray ¬ NIL;
ParsedDF: TYPE = REF ParsedDFRep;
ParsedDFRep: TYPE = RECORD [
dfFullName: ROPE,
created: BasicTime.GMT,
length: NAT,
items: SEQUENCE size: NAT OF REF ANY
];
FileSyntaxError: PUBLIC ERROR [reason: ROPE, position: INT] = CODE;
SetCacheSize: ENTRY PROC [size: NATURAL] = {
ENABLE UNWIND => NULL;
cache ¬ NEW [CacheArray[size]];
cache.nDFs ¬ cache.nItems ¬ cache.nProbes ¬ cache.nHits ¬ cache.nMisses ¬ 0;
FOR i: NATURAL IN [0 .. size) DO cache[i] ¬ NIL ENDLOOP;
RETURN
};
GetCacheStats: ENTRY PROC RETURNS [dfs, items, probes, hits, misses: CARD] = {
ENABLE UNWIND => NULL;
IF cache=NIL THEN RETURN [0, 0, 0, 0, 0];
RETURN [cache.nDFs, cache.nItems, cache.nProbes, cache.nHits, cache.nMisses]};
ParseDFFile: ENTRY PROC [df: ROPE, wantedCreatedTime: BasicTime.GMT] RETURNS [parsedDF: ParsedDF] = {
ENABLE UNWIND => NULL;
dfFullName: ROPE;
created: BasicTime.GMT;
hash: CARDINAL;
IF cache=NIL OR cache.length=0 THEN RETURN [NIL];
cache.nProbes ¬ cache.nProbes+1;
[fullFName: dfFullName, created: created] ¬ FS.FileInfo[name: df, wantedCreatedTime: wantedCreatedTime];
hash ¬ RopeHash.FromRope[dfFullName, FALSE] MOD cache.length;
parsedDF ¬ cache[hash];
IF parsedDF#NIL AND Rope.Equal[parsedDF.dfFullName, dfFullName, FALSE] AND parsedDF.created = created THEN {
cache.nHits ¬ cache.nHits+1;
RETURN};
cache.nMisses ¬ cache.nMisses+1;
Let us start the real work!
BEGIN
dfStream: IO.STREAM ¬ FS.StreamOpen[fileName: dfFullName];
ProcessItem: ProcessItemProc = {
nextLen: NATURAL ~ parsedDF.length+1;
IF nextLen = parsedDF.size THEN {
new: ParsedDF ~ NEW [ParsedDFRep[parsedDF.size*2]];
FOR i: NATURAL IN [0 .. parsedDF.length) DO new[i] ¬ parsedDF[i] ENDLOOP;
FOR i: NATURAL IN [parsedDF.length .. new.size) DO new[i] ¬ NIL ENDLOOP;
new.length ¬ parsedDF.length;
parsedDF ¬ new;
};
parsedDF[parsedDF.length] ¬ item;
parsedDF.length ¬ nextLen};
parsedDF ¬ NEW [ParsedDFRep[60]];
parsedDF.length ¬ 0;
ParseFromStream[dfStream, ProcessItem, [comments: TRUE]
! SyntaxError => {
position: INT = IO.GetIndex[dfStream];
IO.Close[dfStream];
ERROR FileSyntaxError[reason, position];
}];
IO.Close[dfStream];
parsedDF.dfFullName ¬ dfFullName;
parsedDF.created ¬ created;
IF cache[hash] = NIL
THEN cache.nDFs ¬ cache.nDFs + 1
ELSE cache.nItems ¬ cache.nItems - cache[hash].length;
cache.nItems ¬ cache.nItems + parsedDF.length;
cache[hash] ¬ parsedDF;
END;
RETURN};
ParseFromFile: PUBLIC PROC [df: ROPE, proc: ProcessItemProc, filter: Filter ¬ [], wantedCreatedTime: BasicTime.GMT ¬ BasicTime.nullGMT] = {
previousWhiteSpace: REF ANY ¬ NIL;
previousDirectoryPassed: BOOL ¬ FALSE;
parsedDF: ParsedDF ~ ParseDFFile[df, wantedCreatedTime];
SortUsingList[filter.list];
IF parsedDF=NIL THEN {
dfStream: IO.STREAM ~ FS.StreamOpen[fileName: df, wantedCreatedTime: wantedCreatedTime];
ParseFromStream[dfStream, proc, filter
! SyntaxError => {
position: INT = IO.GetIndex[dfStream];
IO.Close[dfStream];
ERROR FileSyntaxError[reason, position];
}];
IO.Close[dfStream];
RETURN};
FOR i: NAT IN [0 .. parsedDF.length) DO
item: REF ANY ¬ parsedDF[i];
WITH item SELECT FROM
directory: REF DirectoryItem   => {
directoryFilterB: DFUtilities.FilterB = IF directory.exported THEN $public ELSE $private;
directoryFilterC: DFUtilities.FilterC = IF directory.readOnly THEN $imported ELSE $defining;
previousDirectoryPassed ¬ (filter.filterB = $all OR filter.filterB = directoryFilterB) AND (filter.filterC = $all OR filter.filterC = directoryFilterC);
IF NOT previousDirectoryPassed THEN LOOP;
};
file: REF FileItem      => {
PassesNameFilter: PROC [file: ROPE] RETURNS [BOOL] = {
SELECT TRUE FROM
filter.filterA = $all => {};
ClassifyFileExtension[file] = filter.filterA => {};
ENDCASE => RETURN[FALSE];
RETURN[SearchUsingList[file, filter.list].found]
};
IF NOT previousDirectoryPassed OR NOT PassesNameFilter[RemoveVersionNumber[file.name]] THEN LOOP;
};
import: REF ImportsItem    => {
list: REF UsingList ¬ import.list;
ConsiderImports: PROC RETURNS [BOOL] = {
IF filter.filterC = $defining THEN RETURN [FALSE];
SELECT filter.filterB FROM
$private => IF import.exported THEN RETURN [FALSE];
$public => IF ~import.exported THEN RETURN [FALSE];
ENDCASE;
RETURN[~(import.form = $list AND list = NIL)]
};
IF filter.list=NIL OR import.list=NIL
THEN IF ConsiderImports[] THEN {} ELSE LOOP
ELSE {
last: NAT ¬ 0;
list ¬ NEW [UsingList[import.list.nEntries]];
FOR i: NAT IN [0 .. import.list.nEntries) DO
IF NOT SearchUsingList[import.list[i].name, filter.list].found THEN LOOP;
list[last] ¬ import.list[i];
last ¬ last + 1;
ENDLOOP;
list.nEntries ¬ last;
SortUsingList[list];
IF ConsiderImports[]
THEN item ¬ NEW [ImportsItem ¬ [path1: import.path1, date: import.date, path2: import.path2, exported: import.exported, form: import.form, list: list]]
ELSE LOOP;
};
};
include: REF IncludeItem    => {};
comment: REF CommentItem   => IF NOT filter.comments THEN LOOP;
whiteSpace: REF WhiteSpaceItem  => {IF filter.comments THEN previousWhiteSpace ¬ item; LOOP};
ENDCASE         => ERROR;
IF previousWhiteSpace#NIL
THEN {IF proc[previousWhiteSpace] THEN RETURN; previousWhiteSpace ¬ NIL};
IF proc[item] THEN RETURN;
ENDLOOP;
};
SetCacheSize[01019];
SetCacheSize[SELECT SystemVersion.machineType FROM
dorado => 01019,
altoI, altoII, altoIIXM, dolphin, dandelion, dicentra, daybreak, daisy, kiku => 0,
ENDCASE => ERROR];
END.