M3ScanCommands.mesa
Copyright Ó 1991, 1992 by Xerox Corporation. All rights reserved.
Spreitze, May 8, 1992 3:22 pm PDT
DIRECTORY Commander, CommanderOps, IO, M3Scan, PFS, RefText, Rope;
M3ScanCommands: CEDAR PROGRAM
IMPORTS Commander, CommanderOps, IO, M3Scan, PFS, RefText
= BEGIN
ROPE: TYPE ~ Rope.ROPE;
Sums: TYPE ~ RECORD [chars1, chars2, words, lines: INT ¬ 0];
WcCmd: PROC [cmd: Commander.Handle] RETURNS [result: REF ANY ¬ NIL, msg: ROPE ¬ NIL] --Commander.CommandProc-- ~ {
argv: CommanderOps.ArgumentVector ~ CommanderOps.Parse[cmd];
buffer, token: REF TEXT;
totals: Sums ¬ [];
IF argv.argc < 2 THEN RETURN [$Failure, "Usage: M3WC filename ..."];
buffer ¬ RefText.ObtainScratch[300];
FOR i: NAT IN (0 .. argv.argc) DO
filename: ROPE ~ argv[i];
filepath: PFS.PATH ~ PFS.PathFromRope[filename];
in: IO.STREAM ~ PFS.StreamOpen[filepath];
charsSkipped: INT ¬ 0;
tokenKind: M3Scan.TokenKind;
fs: Sums ¬ [];
DO
[tokenKind: tokenKind, token: token, charsSkipped: charsSkipped] ¬ M3Scan.GetM3Token[in, buffer, FALSE];
fs.chars1 ¬ fs.chars1 + charsSkipped + token.length;
SELECT tokenKind FROM
tokenEOF => EXIT;
ENDCASE => fs.words ¬ fs.words+1;
ENDLOOP;
in.SetIndex[0];
WHILE NOT in.EndOf[] DO
fs.chars2 ¬ fs.chars2+1;
SELECT in.GetChar[] FROM
'\r, '\l => fs.lines ¬ fs.lines + 1;
ENDCASE => NULL;
ENDLOOP;
totals ¬ [totals.chars1+fs.chars1, totals.chars2+fs.chars2, totals.words+fs.words, totals.lines+fs.lines];
in.Close[];
cmd.out.PutF["%4g\t%5g\t%6g", [integer[fs.lines]], [integer[fs.words]], [integer[fs.chars1]] ];
IF fs.chars1 # fs.chars2 THEN cmd.out.PutF1["|%g", [integer[fs.chars2]] ];
cmd.out.PutF1["\t%g\n", [rope[filename]] ];
ENDLOOP;
RefText.ReleaseScratch[buffer];
buffer ¬ NIL;
IF argv.argc > 2 THEN {
cmd.out.PutF["%4g\t%5g\t%6g", [integer[totals.lines]], [integer[totals.words]], [integer[totals.chars1]] ];
IF totals.chars1 # totals.chars2 THEN cmd.out.PutF1["|%g", [integer[totals.chars2]] ];
cmd.out.PutRope["\tTotal\n"];
};
RETURN};
Commander.Register["M3WC", WcCmd, "filename ... --- word count, using Modula 3 syntax for tokens"];
END.