DIRECTORY Basics, ISBinding, ISToken, ISNode, IO USING [STREAM], Rope; ISScan: CEDAR DEFINITIONS = BEGIN ParseFailureReason: TYPE = { unsupportedConstruct, unexpectedDelimiter, hitEndOfScript, outOfPlaceConstruct, bindingAbsent, bindingWrongType, spare }; ParseFailure: SIGNAL [s: ScanHandle, r: ParseFailureReason]; ScanState: TYPE = {null, inProgress, complete, failed}; ScanStreamFlavor: TYPE = {characters, tokens}; bufferBytes: CARDINAL = 4; Buffer: TYPE = PACKED ARRAY [0..bufferBytes) OF Basics.Byte; ScanStream: TYPE = RECORD [ name: ATOM _ NIL, pushed: REF ScanStream _ NIL, token: ISToken.TVHandle _ NIL, body: SELECT type: ScanStreamFlavor FROM characters => [ cstream: IO.STREAM _ NIL, inString: BOOLEAN _ FALSE], tokens => [ tokenList: ISToken.TVHandle _ NIL], ENDCASE _ NULL]; CharactersScanner: TYPE = REF characters ScanStream; TokensScanner: TYPE = REF tokens ScanStream; ScanObject: TYPE = RECORD [ scanState: ScanState _ null, bindingStack: ISBinding.StackHandle, zoneIsScans, stackIsScans: BOOLEAN _ FALSE, family: ATOM _ NIL, stream: REF ScanStream ]; ScanHandle: TYPE = REF ScanObject; CreateScan: PROCEDURE [ base: Rope.ROPE, index: INT, bindingStack: ISBinding.StackHandle _ NIL, family: ATOM _ NIL ] RETURNS [ScanHandle]; PushScan: PROCEDURE [ScanHandle, Rope.ROPE, ATOM]; PopScan: PROCEDURE [ScanHandle]; InitializeScan: PROCEDURE [ScanHandle]; InitializeScanThroughCurly: PROCEDURE [ScanHandle]; AdvanceScan: PROCEDURE [ScanHandle]; CollectNode: PROCEDURE [scan: ScanHandle, parent: ISNode.Handle _ NIL, bound: BOOL _ FALSE] RETURNS [ISToken.NodeTVHandle]; END. jISScan.mesa Copyright c 1985 by Xerox Corporation. All rights reserved. edit by Ayers 23-May-85 18:44:11 Rick Beach, August 1, 1985 3:08:52 pm PDT MKaplan, September 13, 1985 2:33:29 pm PDT The input stream. Can be a character stream, in which case chars are collected into tokens by ISScanImpl.TokenFromCharactersStream[], or a stream of tokens that are picked off, one by one, by ISScanImpl.TokenFromTokensStream[]. When a stream runs out, the current stream becomes the one in the pushed field, from which further tokens come. This gives an "include" facility: when you want to splice in a stream, set the input stream to a new one with the splice, its pushed field being the unspliced input stream. allocateWith: ISToken.ISNodeDotHandle, <> allocateWith: ISNode.Handle, Advance scan gets a new token into scanHandle.token. It should be called whenever a token is used up by the parser. Collect node returns a node parsed from scan. It assumes that scan has been positioned past the initial leftCurly token. It does much of the real work of the parser. Κχ˜codešœ ™ Kšœ Οmœ1™