DIRECTORY Ascii USING [ SP ], IO USING [ BreakProc, card, CharClass, Close, GetCard, GetCedarTokenRope, GetTokenRope, int, PutF, PutRope, RIS, rope, RopeFromROS, ROS, SkipWhitespace, STREAM, TokenKind ], KipperSupport USING [ CreateKipperer, CreateUnKipperer, Kipperer ], OneCasabaParser, OneCasabaParserData USING [ KipperParserTableBodyRef, UnKipperParserTableBodyRef ], OneCasabaParserPrivate USING [ Action, ActionCell, ActionCellBody, ActionTable, ActionTableBody, ParserTableBody, SymbolCell, SymbolCellBody, SymbolTable, SymbolTableBody, TokenCase, TokenCell, TokenCellBody, TokenTable, TokenTableBody ], Rope USING [ Equal, Fetch, Length, ROPE, Substr ]; OneCasabaParserImpl: CEDAR PROGRAM IMPORTS IO, KipperSupport, OneCasabaParserData, Rope EXPORTS OneCasabaParser ~ { OPEN OneCasabaParser, OneCasabaParserPrivate; ROPE: TYPE ~ Rope.ROPE; RecordKipperedParserTableOnStream: PUBLIC PROC [ s: IO.STREAM, genInfo: OneCasabaParser.GenInfoProc, getSourceToken: OneCasabaParser.LexicalAnalysisProc _ NIL ] ~ { lexProc: OneCasabaParser.LexicalAnalysisProc _ IF ( getSourceToken # NIL ) THEN getSourceToken ELSE IO.GetCedarTokenRope; table: ParserTable _ BuildParserTable[genInfo]; kipperer: KipperSupport.Kipperer _ KipperSupport.CreateKipperer[s]; OneCasabaParserData.KipperParserTableBodyRef[kipperer, table]; s.Close[]; }; RecordReadableParserTableOnStream: PUBLIC PROC [ s: IO.STREAM, genInfo: OneCasabaParser.GenInfoProc, getSourceToken: OneCasabaParser.LexicalAnalysisProc _ NIL ] ~ { lexProc: OneCasabaParser.LexicalAnalysisProc _ IF ( getSourceToken # NIL ) THEN getSourceToken ELSE IO.GetCedarTokenRope; LocalRecordSymbol: SymbolProc ~ { s.PutF["sy %g ", IO.rope[name]] }; LocalRecordUniqueToken: UniqueTokenProc ~ { s.PutF["ut %g %g ", IO.rope[name], IO.rope[spelling]] }; LocalRecordGenericToken: GenericTokenProc ~ { s.PutF["gt %g %g ", IO.rope[name], IO.rope[class]] }; LocalRecordShift: ShiftProc ~ { s.PutF["sh %g <%g> %g ", IO.card[state], IO.rope[terminalSeq], IO.card[newState]] }; LocalRecordReduction: ReductionProc ~ { s.PutF["rd %g <%g> %g %g %g ", IO.card[state], IO.rope[terminalSeq], IO.rope[leftSide], IO.card[ruleNumber], IO.card[ruleSize]] }; LocalRecordAcceptance: AcceptanceProc ~ { s.PutF["ac %g <%g> ", IO.card[state], IO.rope[terminalSeq]] }; LocalRecordStartState: StartStateProc ~ { s.PutF["start %g ", IO.card[state]] }; genInfo[LocalRecordSymbol, LocalRecordUniqueToken, LocalRecordGenericToken, LocalRecordShift, LocalRecordReduction, LocalRecordAcceptance, LocalRecordStartState]; s.PutRope[" end "]; }; ParserTable: TYPE ~ REF ParserTableBody; ParserTableBody: PUBLIC TYPE ~ OneCasabaParserPrivate.ParserTableBody; BuildParserTableFromKipperedStream: PUBLIC PROC [ s: IO.STREAM, getSourceToken: LexicalAnalysisProc _ NIL ] RETURNS [ table: ParserTable ] ~ { lexProc: OneCasabaParser.LexicalAnalysisProc _ IF ( getSourceToken # NIL ) THEN getSourceToken ELSE IO.GetCedarTokenRope; table _ OneCasabaParserData.UnKipperParserTableBodyRef[ KipperSupport.CreateUnKipperer[s]]; }; BuildParserTableFromReadableStream: PUBLIC PROC [ s: IO.STREAM, getSourceToken: LexicalAnalysisProc _ NIL ] RETURNS [ table: ParserTable ] ~ { lexProc: OneCasabaParser.LexicalAnalysisProc _ IF ( getSourceToken # NIL ) THEN getSourceToken ELSE IO.GetCedarTokenRope; GenInfo: GenInfoProc ~ { Break: IO.BreakProc ~ { class: IO.CharClass _ IF ( char = Ascii.SP ) THEN sepr ELSE other; RETURN[class]; }; DO GetTermSeq: PROC RETURNS [ text: ROPE ] ~ { [] _ s.SkipWhitespace[]; text _ s.GetTokenRope[Break].token; text _ text.Substr[1, text.Length[] - 2]; }; SkipAndGetCard: PROC RETURNS [ c: CARD16 ] ~ { [] _ s.SkipWhitespace[]; c _ s.GetCard[]; }; firstToken: ROPE _ s.GetTokenRope[Break].token; SELECT TRUE FROM firstToken.Equal["sy"] => { recordSymbol[s.GetTokenRope[Break].token]; }; firstToken.Equal["ut"] => { name: ROPE ~ s.GetTokenRope[Break].token; spelling: ROPE ~ s.GetTokenRope[Break].token; recordUniqueToken[name, spelling] }; firstToken.Equal["gt"] => { name: ROPE ~ s.GetTokenRope[Break].token; class: ROPE ~ s.GetTokenRope[Break].token; recordGenericToken[name, class]; }; firstToken.Equal["sh"] => { state: CARD16 ~ SkipAndGetCard[]; terms: ROPE ~ GetTermSeq[]; newState: CARD16 _ SkipAndGetCard[]; recordShift[state, terms, newState]; }; firstToken.Equal["rd"] => { state: CARD16 ~ SkipAndGetCard[]; terms: ROPE ~ GetTermSeq[]; leftSide: ROPE ~ s.GetTokenRope[Break].token; ruleNumber: CARD16 ~ SkipAndGetCard[]; ruleSize: CARD16 ~ SkipAndGetCard[]; recordReduction[state, terms, leftSide, ruleNumber, ruleSize]; }; firstToken.Equal["ac"] => { state: CARD16 ~ SkipAndGetCard[]; terms: ROPE ~ GetTermSeq[]; recordAcceptance[state, terms]; }; firstToken.Equal["start"] => { recordStartState[SkipAndGetCard[]]; }; firstToken.Equal["end"] => { EXIT }; ENDCASE => { ERROR }; ENDLOOP; }; table _ BuildParserTable[GenInfo]; }; BuildParserTable: PROC [ genInfo: GenInfoProc ] RETURNS [ table: ParserTable ] ~ { LocalRecordSymbol: SymbolProc ~ { RecordSymbol[table, name] }; LocalRecordUniqueToken: UniqueTokenProc ~ { RecordUniqueToken[table, name, spelling] }; LocalRecordGenericToken: GenericTokenProc ~ { RecordGenericToken[table, name, class] }; LocalRecordShift: ShiftProc ~ { RecordShift[table, state, terminalSeq, newState] }; LocalRecordReduction: ReductionProc ~ { RecordReduction[table, state, terminalSeq, leftSide, ruleNumber, ruleSize] }; LocalRecordAcceptance: AcceptanceProc ~ { RecordAcceptance[table, state, terminalSeq] }; LocalRecordStartState: StartStateProc ~ { RecordStartState[table, state] }; table _ CreateParserTable[]; genInfo[LocalRecordSymbol, LocalRecordUniqueToken, LocalRecordGenericToken, LocalRecordShift, LocalRecordReduction, LocalRecordAcceptance, LocalRecordStartState]; AnalyzeActions[table]; }; CreateParserTable: PROC RETURNS [ table: ParserTable ] ~ { table _ NEW[ParserTableBody]; RecordSymbol[table, ""]; -- the empty seq RecordTokenInfo[table, "", tokenEOF, "", generic]; }; AnalyzeActions: PROC [ table: ParserTable ] ~ { actionTable: ActionTable; next: ActionCell; nShiftReduceConflicts: INT32 _ 0; nReduceReduceConflicts: INT32 _ 0; NoteShiftReduce: PROC [ terminal: CARD16, ruleNumber: CARD16 ] ~ { nShiftReduceConflicts _ nShiftReduceConflicts.SUCC }; NoteShiftAccept: PROC [ terminal: CARD16 ] ~ { nShiftReduceConflicts _ nShiftReduceConflicts.SUCC }; NoteReduceAccept: PROC [ terminal: CARD16, ruleNumber: CARD16 ] ~ { nReduceReduceConflicts _ nReduceReduceConflicts.SUCC }; NoteReduceReduce: PROC [ terminal: CARD16, ruleNumber1, ruleNumber2: CARD16 ] ~ { nReduceReduceConflicts _ nReduceReduceConflicts.SUCC }; IF table.actionTable # NIL THEN ERROR; actionTable _ table.actionTable _ NEW[ActionTableBody[table.nActions]]; FOR I: CARD16 IN [0..actionTable.nSlots) DO actionTable.actions[I] _ NIL ENDLOOP; FOR action: ActionCell _ table.unAnalyzedActions, next WHILE action # NIL DO hash: CARD16 _ HashOneAction[action.state, action.terminal]; hashCode: CARD16 _ hash MOD actionTable.nSlots; next _ action.next; action.next _ actionTable.actions[hashCode]; actionTable.actions[hashCode] _ action; ENDLOOP; table.unAnalyzedActions _ NIL; FOR I: CARD16 IN [0..actionTable.nSlots) DO previousTo1: ActionCell _ NIL; action1: ActionCell_ NIL; action1Next: ActionCell _ NIL; FOR action1 _ actionTable.actions[I], action1Next WHILE ( action1 # NIL ) DO previousTo2: ActionCell _ action1; action2: ActionCell _ NIL; action2Next: ActionCell _ NIL; action1Next _ action1.next; FOR action2 _ action1.next, action2Next WHILE ( action2 # NIL ) DO action2Next _ action2.next; IF action1.state = action2.state AND action1.terminal = action2.terminal THEN { -- we have a conflict SELECT action1.action FROM shift => { SELECT action2.action FROM shift => { ERROR; }; reduce => { NoteShiftReduce[action1.terminal, action2.ruleNumber]; GOTO remove2; }; accept => { NoteShiftAccept[action1.terminal]; GOTO remove2; }; ENDCASE => { ERROR; }; }; reduce => { SELECT action2.action FROM shift => { NoteShiftReduce[action1.terminal, action1.ruleNumber]; GOTO remove1; }; reduce => { NoteReduceReduce[action1.terminal, action1.ruleNumber, action2.ruleNumber]; IF action1.ruleNumber < action2.ruleNumber THEN GOTO remove2 ELSE GOTO remove1; }; accept => { NoteReduceAccept[action1.terminal, action1.ruleNumber]; GOTO remove1; }; ENDCASE => { ERROR; }; }; accept => { SELECT action2.action FROM shift => { NoteShiftAccept[action1.terminal]; GOTO remove2; }; reduce => { NoteReduceAccept[action1.terminal, action2.ruleNumber]; GOTO remove2; }; accept => { ERROR; }; ENDCASE => { ERROR; }; }; ENDCASE => ERROR; EXITS remove1 => { IF previousTo2 = action1 THEN previousTo2 _ previousTo1; action1 _ action1Next; action1Next _ action1.next; IF previousTo1 # NIL THEN previousTo1.next _ action1 ELSE actionTable.actions[I] _ action1; }; remove2 => { action2 _ action2Next; action2Next _ IF action2 # NIL THEN action2.next ELSE NIL; previousTo2.next _ action2; }; }; previousTo2 _ action2; ENDLOOP; previousTo1 _ action1; ENDLOOP; ENDLOOP; IF nReduceReduceConflicts # 0 OR nShiftReduceConflicts # 0 THEN { reportStream: IO.STREAM _ GetReportStream[]; reportStream.PutF["\n\nWARNING\n"]; IF nReduceReduceConflicts # 0 THEN reportStream.PutF["\t%g reduce/reduce conflicts removed\n", IO.int[nReduceReduceConflicts]]; IF nShiftReduceConflicts # 0 THEN reportStream.PutF["\t%g shift/reduce conflicts removed\n\n", IO.int[nShiftReduceConflicts]]; }; }; RecordSymbol: PROC [ table: ParserTable, name: ROPE ] ~ { IF table.symbolTable # NIL THEN ERROR; -- must record all symbols before any actions table.unAnalyzedSymbols _ NEW[SymbolCellBody_[name, table.nSymbols, table.unAnalyzedSymbols]]; table.nSymbols _ table.nSymbols.SUCC; }; RecordUniqueToken: PROC [ table: ParserTable, name: ROPE, spelling: ROPE ] ~ { spellingStream: IO.STREAM _ IO.RIS[spelling]; kind: IO.TokenKind; kind _ IF spelling.Equal["|"] THEN tokenSINGLE ELSE spellingStream.GetCedarTokenRope[].tokenKind; spellingStream.Close[]; IF ( ( kind # tokenID ) AND ( NOT HashKind[kind].includeRope ) ) THEN ERROR; RecordTokenInfo[table, name, kind, spelling, unique]; }; RecordGenericToken: PROC [table: ParserTable, name: ROPE, class: ROPE] ~ { kind: IO.TokenKind _ DecodeTokenKind[class]; IF ( ( kind # tokenID ) AND ( HashKind[kind].includeRope ) ) THEN ERROR; RecordTokenInfo[table, name, kind, "", generic]; }; RecordShift: PROC [ table: ParserTable, state: State, terminalSeq: ROPE, newState: State ] ~ { IF ( table.symbolTable = NIL ) THEN AnalyzeSymbols[table]; table.unAnalyzedActions _ NEW[ActionCellBody _ [ state: state, terminal: LookUpSymbol[table.symbolTable, terminalSeq], action: shift, nextState: newState, ruleNumber: 0, leftSide: 0, ruleSize: 0, next: table.unAnalyzedActions ]]; table.nActions _ table.nActions.SUCC; }; RecordReduction: PROC [ table: ParserTable, state: State, terminalSeq: ROPE, leftSide: ROPE, ruleNumber: CARD16, ruleSize: CARD16 ] ~ { IF ( table.symbolTable = NIL ) THEN AnalyzeSymbols[table]; table.unAnalyzedActions _ NEW[ActionCellBody _ [ state: state, terminal: LookUpSymbol[table.symbolTable, terminalSeq], action: reduce, nextState: 0, ruleNumber: ruleNumber, leftSide: LookUpSymbol[table.symbolTable, leftSide], ruleSize: ruleSize, next: table.unAnalyzedActions ]]; table.nActions _ table.nActions.SUCC; }; RecordAcceptance: PROC [ table: ParserTable, state: State, terminalSeq: ROPE ] ~ { IF ( table.symbolTable = NIL ) THEN AnalyzeSymbols[table]; table.unAnalyzedActions _ NEW[ActionCellBody _ [ state: state, terminal: LookUpSymbol[table.symbolTable, terminalSeq], action: accept, nextState: 0, ruleNumber: 0, leftSide: 0, ruleSize: 0, next: table.unAnalyzedActions ]]; table.nActions _ table.nActions.SUCC; }; RecordStartState: PROC [ table: ParserTable, state: State ] ~ { table.startState _ state; }; RecordTokenInfo: PROC [ table: ParserTable, name: ROPE, kind: IO.TokenKind, spelling: ROPE, case: TokenCase ] ~ { IF ( table.tokenTable # NIL ) THEN ERROR; -- must record all tokens before any actions table.unAnalyzedTokens _ NEW[TokenCellBody _ [name, 0, kind, spelling, case, table.unAnalyzedTokens]]; table.nTokens _ table.nTokens.SUCC; }; HashOneAction: PROC [ state: CARD16, terminal: CARD16 ] RETURNS [ hash: CARD16 ] ~ INLINE { hash _ (state * terminal) MOD 256 }; HashKind: PROC [ kind: IO.TokenKind ] RETURNS [ hash: CARD16, includeRope: BOOL ] ~ { SELECT kind FROM tokenERROR => RETURN[0, FALSE]; tokenID => RETURN[1, TRUE]; tokenDECIMAL => RETURN[2, FALSE]; tokenOCTAL => RETURN[3, FALSE]; tokenHEX => RETURN[4, FALSE]; tokenREAL => RETURN[5, FALSE]; tokenROPE => RETURN[6, FALSE]; tokenCHAR => RETURN[7, FALSE]; tokenATOM => RETURN[8, FALSE]; tokenSINGLE => RETURN[9, TRUE]; tokenDOUBLE => RETURN[10, TRUE]; tokenCOMMENT => RETURN[11, FALSE]; tokenEOF => RETURN[12, FALSE]; ENDCASE => ERROR; }; DecodeTokenKind: PROC [ class: ROPE ] RETURNS [ kind: IO.TokenKind ] ~ { SELECT TRUE FROM class.Equal["tokenERROR"] => RETURN[tokenERROR]; class.Equal["tokenID"] => RETURN[tokenID]; class.Equal["tokenDECIMAL"] => RETURN[tokenDECIMAL]; class.Equal["tokenOCTAL"] => RETURN[tokenOCTAL]; class.Equal["tokenHEX"] => RETURN[tokenHEX]; class.Equal["tokenREAL"] => RETURN[tokenREAL]; class.Equal["tokenROPE"] => RETURN[tokenROPE]; class.Equal["tokenCHAR"] => RETURN[tokenCHAR]; class.Equal["tokenATOM"] => RETURN[tokenATOM]; class.Equal["tokenSINGLE"] => RETURN[tokenSINGLE]; class.Equal["tokenDOUBLE"] => RETURN[tokenDOUBLE]; class.Equal["tokenCOMMENT"] => RETURN[tokenCOMMENT]; class.Equal["tokenEOF"] => RETURN[tokenEOF]; ENDCASE => ERROR; }; AnalyzeSymbols: PROC [ table: ParserTable ] ~ { symbolTable: SymbolTable; next: SymbolCell; IF table.symbolTable # NIL THEN ERROR; symbolTable _ table.symbolTable _ NEW[SymbolTableBody[table.nSymbols]]; FOR I: CARD16 IN [0..symbolTable.nSlots) DO symbolTable.symbols[I] _ NIL ENDLOOP; FOR symbol: SymbolCell _ table.unAnalyzedSymbols, next WHILE symbol # NIL DO hash: CARD16 _ HashOneSymbol[symbol.name]; hashCode: CARD16 _ hash MOD symbolTable.nSlots; next _ symbol.next; symbol.next _ symbolTable.symbols[hashCode]; symbolTable.symbols[hashCode] _ symbol; ENDLOOP; table.unAnalyzedSymbols _ NIL; AnalyzeTokens[table]; }; LookUpSymbol: PROC [ symbolTable: SymbolTable, name: ROPE ] RETURNS [ code: CARD16 ] ~ { hash: CARD16 _ HashOneSymbol[name]; hashCode: CARD16 _ hash MOD symbolTable.nSlots; FOR symbol: SymbolCell _ symbolTable.symbols[hashCode], symbol.next WHILE symbol # NIL DO IF symbol.name.Equal[name] THEN RETURN[symbol.code]; ENDLOOP; ERROR; }; HashOneSymbol: PROC [ rope: ROPE ] RETURNS [ hash: CARD16 ] ~ { hash _ 0; FOR I: INT32 IN [0..rope.Length[]) DO hash _ hash + LOOPHOLE[rope.Fetch[I], CARD16]; ENDLOOP; }; AnalyzeTokens: PROC [ table: ParserTable ] ~ { tokenTable: TokenTable; next: TokenCell; IF table.tokenTable # NIL THEN ERROR; tokenTable _ table.tokenTable _ NEW[TokenTableBody[table.nTokens]]; FOR token: TokenCell _ table.unAnalyzedTokens, next WHILE token # NIL DO spellingHash: CARD16 _ TokenHash[token.kind, token.spelling].hash; spellingHashCode: CARD16 _ spellingHash MOD tokenTable.nSlots; next _ token.next; token.symbolCode _ LookUpSymbol[table.symbolTable, token.name]; token.next _ tokenTable.tokens[spellingHashCode]; tokenTable.tokens[spellingHashCode] _ token; IF token.kind = tokenID AND token.case = generic THEN { IF tokenTable.idToken # NIL THEN ERROR; tokenTable.idToken _ token; }; ENDLOOP; table.unAnalyzedTokens _ NIL; }; LookUpCedarToken: PROC [ tokenTable: TokenTable, kind: IO.TokenKind, text: ROPE ] RETURNS [ code: CARD16, info: ROPE, case: TokenCase ] ~ { includeTextInHash: BOOL; hash: CARD16; hashCode: CARD16; [hash, includeTextInHash] _ TokenHash[kind, text]; hashCode _ hash MOD tokenTable.nSlots; IF ( includeTextInHash ) THEN FOR token: TokenCell _ tokenTable.tokens[hashCode], token.next WHILE token # NIL DO IF ( ( token.kind = kind ) AND ( token.spelling.Equal[text] ) ) THEN RETURN[token.symbolCode, NIL, token.case]; ENDLOOP ELSE FOR token: TokenCell _ tokenTable.tokens[hashCode], token.next WHILE token # NIL DO IF ( token.kind = kind ) THEN RETURN[token.symbolCode, text, token.case]; ENDLOOP; IF ( ( kind = tokenID ) AND ( tokenTable.idToken # NIL ) ) THEN RETURN[tokenTable.idToken.symbolCode, text, tokenTable.idToken.case]; ERROR; }; TokenHash: PROC [ kind: IO.TokenKind, text: ROPE ] RETURNS [ hash: CARD16, includeTextInHash: BOOL ] ~ { [hash, includeTextInHash] _ HashKind[kind]; IF ( ( includeTextInHash ) AND ( text # NIL ) ) THEN FOR I: INT32 IN [0..text.Length[]) DO hash _ hash + LOOPHOLE[text.Fetch[I], CARD16]; ENDLOOP; }; LookUpAction: PROC [ actionTable: ActionTable, state: State, terminalCode: CARD16 ] RETURNS [ cell: ActionCell _ NIL ] ~ { hash: CARD16 _ HashOneAction[state, terminalCode]; hashCode: CARD16 _ hash MOD actionTable.nSlots; FOR action: ActionCell _ actionTable.actions[hashCode], action.next WHILE ( action # NIL ) DO IF ( ( action.state = state ) AND ( action.terminal = terminalCode ) ) THEN { cell _ action; RETURN }; ENDLOOP; }; StackCell: TYPE ~ REF StackCellBody; StackCellBody: TYPE ~ RECORD [ state: State, firstPosition, length: INT32, next: StackCell ]; ReductionCell: TYPE ~ REF ReductionCellBody; ReductionCellBody: TYPE ~ RECORD [ case: ReductionCellCase, firstCharPosition: INT32, rule: CARD16, length: INT32, code: CARD16, text: ROPE, tokenCase: TokenCase, tokenKind: IO.TokenKind, previous: ReductionCell, -- always a shift stack: StackCell, -- as it appears just after applying this shift next: ReductionCell ]; ReductionCellCase: TYPE ~ { reduction, shift, accept }; SimpleTryTokenCase: TYPE ~ { reduction, shift, accept, fail }; SourceTokenCell: TYPE ~ REF SourceTokenCellBody; SourceTokenCellBody: TYPE ~ RECORD [ kind: IO.TokenKind, text: ROPE, position: INT32, next: SourceTokenCell ]; TrialTokenCell: TYPE ~ REF TrialTokenCellBody; TrialTokenCellBody: TYPE ~ RECORD [ kind: IO.TokenKind, text: ROPE, next: TrialTokenCell ]; MaxTrialInserts: INT32 _ 4; MaxSkip: INT32 _ 4; MinSuccesses: INT32 _ 4; DesiredSuccesses: INT32 _ 12; FoundAFix: ERROR ~ CODE; GetReportStream: PUBLIC SIGNAL RETURNS [ s: IO.STREAM ] ~ CODE; UnRecoverableSyntaxError: PUBLIC ERROR ~ CODE; Parse: PUBLIC PROC [ table: ParserTable, getSourceToken: GetSourceTokenProc, showReduce: ShowReduceProc _ NIL, showGenericShift: ShowGenericShiftProc _ NIL, showNonGenericShift: ShowNonGenericShiftProc _ NIL ] RETURNS [ accepted: BOOL ] ~ { oldStack: StackCell _ NEW[StackCellBody _ [table.startState, 0, 0, NIL]]; currentStack: StackCell _ oldStack; firstRed: ReductionCell _ NIL; lastShift: ReductionCell _ NIL; -- always a shift lastRed: ReductionCell _ NIL; SimpleTryToken: PROC [ tokenKind: IO.TokenKind, tokenText: ROPE, position: INT32 ] RETURNS [ SimpleTryTokenCase ] ~ { terminalSeqCode: CARD16; terminalSeqInfo: ROPE; tokenCase: TokenCase; action: ActionCell; [terminalSeqCode, terminalSeqInfo, tokenCase] _ LookUpCedarToken[table.tokenTable, tokenKind, tokenText]; action _ LookUpAction[table.actionTable, currentStack.state, terminalSeqCode]; IF action = NIL THEN RETURN[fail]; SELECT action.action FROM shift => { redCell: ReductionCell; currentStack _ NEW[StackCellBody _ [action.nextState, position, Rope.Length[tokenText], currentStack]]; redCell _ NEW[ReductionCellBody _ [shift, position, , , terminalSeqCode, tokenText, tokenCase, tokenKind, lastShift, currentStack, NIL]]; IF lastRed # NIL THEN lastRed.next _ redCell ELSE firstRed _ redCell; lastRed _ redCell; lastShift _ redCell; RETURN[shift]; }; reduce => { shift: ActionCell; firstPosition: INT32 _ position; limitPosition: INT32 _ IF action.ruleSize = 0 THEN position ELSE currentStack.firstPosition + currentStack.length; redCell: ReductionCell; FOR I: CARD16 IN [0..action.ruleSize) DO firstPosition _ currentStack.firstPosition; currentStack _ currentStack.next; ENDLOOP; redCell _ NEW[ReductionCellBody _ [reduction, firstPosition, action.ruleNumber, limitPosition-firstPosition, , , , , , , NIL]]; IF lastRed # NIL THEN lastRed.next _ redCell ELSE firstRed _ redCell; lastRed _ redCell; shift _ LookUpAction[table.actionTable, currentStack.state, action.leftSide]; IF shift.action # shift THEN ERROR; currentStack _ NEW[StackCellBody _ [shift.nextState, firstPosition, limitPosition-firstPosition, currentStack]]; RETURN[reduction] }; accept => { redCell: ReductionCell _ NEW[ReductionCellBody _ [accept, , , , , , , , , , NIL]]; IF lastRed # NIL THEN lastRed.next _ redCell ELSE firstRed _ redCell; lastRed _ redCell; RETURN[accept]; }; ENDCASE => ERROR; }; FullTryToken: PROC [ tokenKind: IO.TokenKind, tokenText: ROPE, position: INT32 ] RETURNS [ SimpleTryTokenCase ] ~ { DO SELECT SimpleTryToken[tokenKind, tokenText, position] FROM reduction => LOOP; shift => RETURN[shift]; accept => RETURN[accept]; fail => { ResetToShift[]; RETURN[fail]; }; ENDCASE => ERROR; ENDLOOP; }; ResetStack: PROC ~ { currentStack _ oldStack; firstRed _ lastShift _ lastRed _ NIL; }; Play1TokenWorth: PROC RETURNS [ accepted: BOOL ] ~ { DO SELECT firstRed.case FROM reduction => { IF showReduce # NIL THEN showReduce[firstRed.rule, firstRed.firstCharPosition, firstRed.length]; firstRed _ firstRed.next; IF firstRed = NIL THEN lastShift _ lastRed _ NIL; }; shift => { IF firstRed.tokenCase = generic THEN { IF showGenericShift # NIL THEN showGenericShift[firstRed.code, firstRed.tokenKind, firstRed.text, firstRed.firstCharPosition]; } ELSE { IF showNonGenericShift # NIL THEN showNonGenericShift[firstRed.text, firstRed.firstCharPosition]; }; firstRed.previous _ NIL; oldStack _ firstRed.stack; firstRed _ firstRed.next; IF firstRed = NIL THEN lastShift _ lastRed _ NIL; RETURN[FALSE]; }; accept => { firstRed _ firstRed.next; IF firstRed = NIL THEN lastShift _ lastRed _ NIL; RETURN[TRUE]; }; ENDCASE => ERROR; ENDLOOP; }; ResetToShift: PROC ~ { IF lastShift # NIL THEN { lastRed _ lastShift; lastRed.next _ NIL; currentStack _ lastRed.stack; } ELSE { lastRed _ NIL; currentStack _ oldStack; }; }; Reset1TokenWorth: PROC ~ { lastRed _ lastShift; lastShift _ lastRed.previous; lastRed.previous _ NIL; ResetToShift[]; }; ResetNTokensWorth: PROC [n: CARD16] ~ { FOR I: CARD16 IN [0..n) DO Reset1TokenWorth[] ENDLOOP; }; firstToken: SourceTokenCell _ NIL; currentToken: SourceTokenCell _ NIL; lastToken: SourceTokenCell _ NIL; GetToken: PROC RETURNS [ IO.TokenKind, ROPE, INT32 ] ~ { t: SourceTokenCell; IF currentToken = NIL THEN { currentToken _ NEW[SourceTokenCellBody]; [currentToken.kind, currentToken.text, currentToken.position] _ getSourceToken[]; IF firstToken # NIL THEN lastToken.next _ currentToken ELSE firstToken _ currentToken; lastToken _ currentToken; }; t _ currentToken; currentToken _ currentToken.next; RETURN[t.kind, t.text, t.position]; }; ResetTokens: PROC ~ { currentToken _ firstToken; }; DeleteOneToken: PROC ~ { IF currentToken = firstToken THEN currentToken _ firstToken.next; firstToken _ firstToken.next; }; currentInsertions: TrialTokenCell _ NIL; bestInsertions: TrialTokenCell _ NIL; bestDeletions: CARD16 _ 0; bestScore: INT32 _ 0; BackUpOneTrialInsertion: PROC ~ { currentInsertions _ currentInsertions.next; }; AddOneTrialInsertion: PROC [ kind: IO.TokenKind, text: ROPE ] ~ { cell: TrialTokenCell _ NEW[TrialTokenCellBody _ [kind, text, currentInsertions]]; currentInsertions _ cell; }; RecordTrialScore: PROC [ nDeletions: INT32, score: INT32 ] ~ { IF score > bestScore THEN { bestInsertions _ currentInsertions; bestDeletions _ nDeletions; bestScore _ score; }; }; RecordFoundFix: PROC [ nDeletions: INT32 ] ~ { bestInsertions _ currentInsertions; bestDeletions _ nDeletions; bestScore _ DesiredSuccesses; }; ResetTrials: PROC ~ { currentInsertions _ bestInsertions _ NIL; bestDeletions _ bestScore _ 0; }; PlayTrialInsertions: PROC [ for: PROC [ IO.TokenKind, ROPE ] ] ~ { SubPlayTrialInsertions: PROC [ cell: TrialTokenCell ] ~ { IF cell.next # NIL THEN SubPlayTrialInsertions[cell.next]; for[cell.kind, cell.text]; }; IF bestInsertions # NIL THEN SubPlayTrialInsertions[bestInsertions]; }; ExploreForBestRecovery: PROC [ nInsertsSoFar: INT32, tryNDeletions: INT32 ] ~ { FOR I: INT32 IN [0..tryNDeletions) DO [] _ GetToken[] ENDLOOP; FOR I: INT32 IN [0..DesiredSuccesses) DO tKind: IO.TokenKind; tText: ROPE; tPosition: INT32; [tKind, tText, tPosition] _ GetToken[]; SELECT FullTryToken[tKind, tText, tPosition] FROM shift => IF I.SUCC # DesiredSuccesses THEN LOOP ELSE -- we parsed ahead as far as we wanted, go with this {RecordFoundFix[tryNDeletions]; ResetNTokensWorth[I.SUCC]; FoundAFix[]}; accept => {RecordFoundFix[tryNDeletions]; ResetNTokensWorth[I]; FoundAFix[]}; fail => { IF I >= MinSuccesses THEN RecordTrialScore[tryNDeletions, I]; ResetNTokensWorth[I]; EXIT; }; ENDCASE => ERROR; ENDLOOP; -- success loop ResetTokens[]; IF tryNDeletions > 0 THEN { SeeOneTokenToInsert: PROC [ kind: IO.TokenKind, text: ROPE ] ~ { AddOneTrialInsertion[kind, text]; SELECT FullTryToken[kind, text, 0] FROM shift => { ExploreForBestRecovery[nInsertsSoFar.SUCC, tryNDeletions-1]; Reset1TokenWorth[]; }; fail => NULL; accept => ResetToShift[]; ENDCASE => ERROR; BackUpOneTrialInsertion[]; }; GenPlausableInsertedTokens[table, currentStack.state, SeeOneTokenToInsert]; }; }; successfulTrys: CARD16 _ 0; DO kind: IO.TokenKind; text: ROPE; position: INT32; [kind, text, position] _ GetToken[]; SELECT FullTryToken[kind, text, position] FROM shift => { IF successfulTrys = 0 THEN successfulTrys _ 1 ELSE { IF Play1TokenWorth[] THEN ERROR; -- can't produce an acceptance DeleteOneToken[]; -- from the saved input tokens }; }; accept => { IF Play1TokenWorth[] THEN { IF successfulTrys = 1 THEN ERROR ELSE RETURN[TRUE]; }; IF successfulTrys # 1 THEN ERROR; IF Play1TokenWorth[] THEN RETURN[TRUE]; ERROR; -- one of them should have produced an acceptance }; fail => { successfulTrys _ 0; ResetStack[]; ResetTokens[]; ResetTrials[]; FOR diag: INT32 IN [0..MaxSkip] DO ExploreForBestRecovery[0, diag ! FoundAFix => { EXIT } ]; ENDLOOP; { reportStream: IO.STREAM _ GetReportStream[]; msgStream: IO.STREAM _ IO.ROS[]; nInsertions: CARD16 _ 0; ReportOneInsertion: PROC [ kind: IO.TokenKind, text: ROPE ] ~ { IO.PutF[msgStream, " %g", IO.rope[text]]; }; TryOneTrialInsertion: PROC [ kind: IO.TokenKind, text: ROPE ] ~ { nInsertions _ nInsertions.SUCC; SELECT FullTryToken[kind, text, 0] FROM shift, accept => RETURN; fail => ERROR; ENDCASE; }; ResetStack[]; ResetTokens[]; IO.PutF[msgStream, "\n\nSyntaxError at %g\n", IF firstToken=NIL THEN IO.rope[""] ELSE IO.int[firstToken.position]]; IF bestDeletions # 0 THEN IO.PutF[msgStream, "\tdeleting"]; FOR I: CARD16 IN [0..bestDeletions) DO text: ROPE; [, text, ] _ GetToken[]; IO.PutF[msgStream, " %g", IO.rope[text]]; ENDLOOP; ResetTokens[]; IO.PutF[msgStream, "\tinserting"]; PlayTrialInsertions[ReportOneInsertion]; IO.PutF[msgStream, "\n\n"]; reportStream.PutF["%g", IO.rope[IO.RopeFromROS[msgStream]]]; IF bestScore < MinSuccesses THEN UnRecoverableSyntaxError[]; FOR I: CARD16 IN [0..bestDeletions) DO DeleteOneToken[] ENDLOOP; PlayTrialInsertions[TryOneTrialInsertion]; FOR I: CARD16 IN [0..nInsertions) DO IF Play1TokenWorth[] THEN { IF I = (nInsertions - 1) THEN RETURN[TRUE] ELSE ERROR; }; ENDLOOP; successfulTrys _ 0; }; }; ENDCASE => ERROR; ENDLOOP; }; GenPlausableInsertedTokens: PROC [ table: ParserTable, state: State, for: PROC [ IO.TokenKind, ROPE ] ] ~ { FOR cellX: CARD16 IN [0..table.tokenTable.nSlots) DO FOR trialTokenCell: TokenCell _ table.tokenTable.tokens[cellX], trialTokenCell.next WHILE trialTokenCell # NIL DO actionCell: ActionCell _ LookUpAction[table.actionTable, state, trialTokenCell.symbolCode]; IF actionCell # NIL THEN { text: ROPE _ IF trialTokenCell.case = unique THEN trialTokenCell.spelling ELSE SELECT trialTokenCell.kind FROM tokenID => "id", tokenDECIMAL, tokenOCTAL, tokenHEX, tokenREAL => "num", tokenROPE => "rope", tokenCHAR => "char", tokenATOM => "atom", tokenEOF => "eof", ENDCASE => ERROR; for[trialTokenCell.kind, text]; }; ENDLOOP; ENDLOOP; }; }. šOneCasabaParserImpl.Mesa (September 18, 1984 9:00:29 am PDT) Copyright Ó 1987, 1988, 1989 by Xerox Corporation. All rights reserved. Sturgis, June 3, 1986 3:07:05 pm PDT Bill Jackson (bj) September 13, 1989 10:22:29 pm PDT first the procedures for creating parser info on streams now the types and procedures for building an internal parsetable from parser descriptions on a stream now remove any ambiguities resolve shift/reduce by shift resolve reduce/reduce to the lowest production number now report any conflicts that have been artificially removed using GetReportStream for a slightly bastardized purpose the definitions file says it will only be called during parse, and I don't want to change the definitions file the following hack is because GetCedarToken fails on | of course, the scanner will still fail if it ever sees a | following routines are intended for the output of the GetCedarToken routine in IO.mesa finally the code for actually performing a parse if reduce case if shift if accept nothing all cases this uses a scheme obtained orally from Ed Satterthwaite on Nov 7, 1985. I am begining to suspect I have not got the search strategy right, I am breadth first on insertions, but not on deletions? state stack and saved reductions for later play to caller old stack is as the stack appears just before applying firstRed current stack is as the stack appears just after appling the last reduction chained from firstRed these reduction cells form a linear list from firstRed to LastRed last shift points into this list. results in a shift, a single reduction, an acceptance, or a failure repeats calls on SimpleTryToken, so that all reductions are done, up to and including a shift, acceptance, or failure. plays successive reductions up to and including a shift or acceptance now we deal with delivery of tokens. this mechanism allows back up and replay of tokens without any assumptions about the ultimate source of tokens syntactic error recovery involves trying the deletion of some incomming tokens and the insertion of other plausable tokens. The following data records current best such attempt as well as the currently explored attempt. first see if we can get away by trying an appropriate number of deletions without any insertions we couldn't parse as many input tokens as we wanted but we got enough to record we didn't parse ahead as far as we wanted, so reset now try each plausable inserted token, but with one fewer deletions this might generate FoundAFix, which will be caught by our ultimate caller following is the main loop that drives the parser two source tokens in a row have worked, so show first to caller first forget what we have seen now conduct an exploration for a replacement found a replacement that results in acceptance or good match we could try to replay the current stack, but it is easier to program using the code for "go with the best". didn't not find a replacement that resulted in acceptance or good match so, go with the best (or, did find one, and are defaulting to this code) we try each possible terminal token, and generate those that result in a non NIL actionCell eventually we want to control the order in which the terminal tokens are tried Ê#•NewlineDelimiter – "cedar" style˜codešœ8Ïkœ™˜>K˜ Kšœ˜—K˜š ž!œœœœœ^œ˜¤Kš œ/œœœœœ˜yKšžœ"œ˜DKšžœ*œ œ˜dKšžœ+œ œ˜cKšžœ)œœœ˜tKš žœ3œœœœœ˜ªKšžœ+œœ˜hšžœ)œ˜PK˜—K˜¢Kšœ˜Kšœ˜—K˜Kšœe™eK˜Kšœ œœ˜(Kšœœœ*˜FK˜šž"œœœœœ(œœ˜ŽKš œ/œœœœœ˜yKšœ[˜[Kšœ˜—K˜šž"œœœœœ(œœ˜Kš œ/œœœœœ˜yšžœ˜šžœœ˜Kš œœ œœœœ˜BKšœ˜Kšœ˜—š˜šž œœœ œ˜+K˜K˜#Kšœ)˜)Kšœ˜—šžœœœœ˜.K˜Kšœ˜Kšœ˜—Kšœ œ ˜0šœœ˜˜K˜*K˜—šœ˜Kšœœ˜)Kšœ œ˜-K˜!Kšœ˜—šœ˜Kšœœ˜)Kšœœ˜*K˜ Kšœ˜—šœ˜Kšœœ˜!Kšœœ˜Kšœ œ˜$K˜$Kšœ˜—šœ˜Kšœœ˜!Kšœœ˜Kšœ œ˜-Kšœ œ˜&Kšœ œ˜$K˜>Kšœ˜—šœ˜Kšœœ˜!Kšœœ˜K˜Kšœ˜—˜K˜#K˜—Kšœœ˜$Kšœœ˜—Kšœ˜—Kšœ˜—Kšœ"˜"Kšœ˜—K˜šžœœœ˜RKšžœ-˜>KšžœA˜WKšžœ@˜WKšžœC˜SKšžœa˜uKšžœC˜XKšžœ6˜KK˜K˜¢K˜Kšœ˜—K˜šžœœœ˜:Kšœœ˜KšœÏc˜)K˜2Kšœ˜K˜—šžœœ˜/K˜K˜Kšœœ˜!Kšœœ˜"Kš žœœ œœ5œ˜xKšžœœ œ5œ˜dKš žœœ œœ7œ˜{Kš žœœ œœ7œ˜‰Kšœœœœ˜&Kšœ"œ"˜GKšœžœœœœœœ˜Qšœ4œ œ˜LKšœœ0˜œ˜a—Kšœ˜—Kšœ˜—K˜šž œœœ˜9Kš œœœœŸ-˜TKšœœA˜^Kšœ œ˜%Kšœ˜—K˜šžœœœ œ˜NKš œœœœœ ˜-Kšœœ ˜Kšœ6™6Kšœ:™:Kšœœœ œ.˜aK˜Kš œœœ œœ˜LK˜5Kšœ˜—K˜šžœœœ œ˜JKšœœ$˜,Kšœœ"œœ˜HK˜0Kšœ˜—K˜šž œœ2œ˜^Kšœœœ˜:šœœ˜0K˜ K˜7K˜K˜K˜K˜ K˜ K˜K˜—Kšœ œ˜%Kšœ˜—š žœœ2œ œœ œ˜‡Kšœœœ˜:šœœ˜0K˜ K˜7K˜K˜ K˜K˜4K˜K˜K˜—Kšœ œ˜%Kšœ˜—K˜šžœœ2œ˜RKšœœœ˜:šœœ˜0K˜ K˜7K˜K˜ K˜K˜ K˜ K˜K˜—Kšœ œ˜%Kšœ˜K˜—šžœœ)˜?K˜K˜—K˜š žœœœœœ˜qKš œœœœŸ,˜WKšœœJ˜fKšœœ˜#Kšœ˜—K˜Kšž œœ œ œœ œœœ˜K˜š žœœ œ œ œœ˜Ušœ˜Kšœœœ˜Kšœ œœ˜Kšœœœ˜!Kšœœœ˜Kšœ œœ˜Kšœ œœ˜Kšœ œœ˜Kšœ œœ˜Kšœ œœ˜Kšœœœ˜Kšœœœ˜ Kšœœœ˜"Kšœ œœ˜Kšœœ˜—Kšœ˜—K˜š žœœ œœ œ˜Hšœœ˜Kšœœ ˜0Kšœœ ˜*Kšœœ˜4Kšœœ ˜0Kšœœ ˜,Kšœœ ˜.Kšœœ ˜.Kšœœ ˜.Kšœœ ˜.Kšœœ˜2Kšœœ˜2Kšœœ˜4Kšœœ ˜,Kšœœ˜—Kšœ˜—K˜šžœœ˜/K˜K˜Kšœœœœ˜&Kšœ"œ"˜GKšœžœœœœœœ˜Qšœ4œ œ˜LKšœœ˜*Kšœ œœ˜/K˜K˜,K˜'Kšœ˜—Kšœœ˜K˜Kšœ˜—K˜š ž œœ#œœ œ˜XKšœœ˜#Kšœ œœ˜/šœAœ œ˜YKšœœœ˜4Kšœ˜—Kšœ˜Kšœ˜—K˜š ž œœ œœ œ˜?K˜ š œžœœœ˜%Kšœœœ˜.Kšœ˜—Kšœ˜K˜—šž œœ˜.K˜K˜Kšœœœœ˜%Kšœ œ ˜Cšœ1œ œ˜HKšœœ.˜BKšœœœ˜>K˜K˜?K˜1K˜,šœœœ˜7Kšœœœœ˜'K˜Kšœ˜—Kšœ˜—Kšœœ˜Kšœ˜—K˜KšœOœ™VK˜šžœœ!œœœ œœ˜‹Kšœœ˜Kšœœ˜ Kšœ œ˜K˜2Kšœœ˜&šœ˜š˜šœ<œ œ˜SKš œœ"œœœ˜oKš˜——š˜šœ<œ œ˜SKšœœœ%˜IKšœ˜———Kš œœœœœ?˜…Kšœ˜Kšœ˜—K˜šž œœ œœœ œœ˜hK˜,šœœ œ˜4š œžœœœ˜%Kšœœœ˜.Kšœ˜——Kšœ˜—K˜š ž œœ9œœœ˜zKšœœ&˜2Kšœ œœ˜/šœAœ œ˜]Kšœœ&œœ˜fKšœ˜—Kšœ˜—K˜Kšœ0™0K˜Kšœ œœ˜$šœœœ˜K˜ Kšœœ˜K˜K˜K˜—Kšœœœ˜,šœœœ˜"K˜Kšœœ˜šœ™Kšœœ˜ Kšœœ˜—šœ™Kšœœ˜ Kšœœ˜ K˜Kšœ œ ˜KšœŸ˜*KšœŸ/˜A—šœ ™ Kšœ™—šœ ™ K˜—K˜K˜—Kšœœ ˜7Kšœœ&˜>K˜Kšœœœ˜0šœœœ˜$Kšœœ ˜Kšœœ˜ Kšœ œ˜K˜K˜K˜—Kšœœœ˜.šœœœ˜#Kšœœ ˜Kšœœ˜ K˜K˜K˜—Kšžœœ˜Kšžœœ˜Kšž œœ˜Kšžœœ˜K˜Kšž œœœ˜Kšžœœœœœœœ˜?Kšžœœœœ˜.K˜šžœœœXœ+œ1œœ œ˜ðKšœH™HKšœB™BKšœ7™7K˜šœ9™9Kšœ?™?Kšœa™aK˜—Kšœœ*œ˜IK˜#K˜KšœA™AKšœ!™!K˜Kšœœ˜KšœœŸ˜1Kšœœ˜˜K™C—š žœœœœ œœ˜uKšœœ˜Kšœœ˜K˜K˜K˜K˜iK˜NKšœ œœœ˜"šœ˜šœ ˜ K˜KšœœU˜gKšœ œvœ˜‰Kšœ œœœ˜EK˜K˜Kšœ˜Kšœ˜—šœ ˜ K˜Kšœœ ˜ šœœœ˜-Kšœ ˜ Kšœ2˜6—K˜K˜š œžœœœ˜(K˜+K˜!Kšœ˜—K˜Kšœ œlœ˜Kšœ œœœ˜EK˜K˜MKšœœœ˜#Kšœœ^˜pKšœ ˜Kšœ˜K˜—šœ ˜ Kšœœ0œ˜RKšœ œœœ˜EK˜Kšœ ˜Kšœ˜—Kšœœ˜—Kšœ˜—˜K™v—š ž œœœœ œœ˜sš˜šœ0˜:Kšœ œ˜Kšœ œ˜Kšœ œ ˜šœ ˜ K˜Kšœ˜ Kšœ˜—Kšœœ˜—Kšœ˜—Kšœ˜—K˜šž œœ˜Kšœ˜Kšœ!œ˜%Kšœ˜—˜K™E—šžœœœ œ˜4š˜šœ˜šœ˜šœ˜KšœH˜L—K˜šœ ˜Kšœœ˜ —Kšœ˜—šœ ˜ šœ˜šœ˜šœ˜Kšœ`˜d—Kšœ˜—šœœ˜šœ˜Kšœ@˜D—Kšœ˜——Kšœœ˜K˜K˜šœ ˜Kšœœ˜ —Kšœœ˜Kšœ˜—šœ ˜ K˜šœ ˜Kšœœ˜ —Kšœœ˜ Kšœ˜—Kšœœ˜—Kšœ˜—Kšœ˜—K˜šž œœ˜šœ ˜šœ˜Kšœ˜Kšœœ˜Kšœ˜Kšœ˜—šœ˜Kšœ œ˜Kšœ˜Kšœ˜——Kšœ˜K˜—šžœœ˜K˜K˜Kšœœ˜K˜Kšœ˜—K˜šžœœœ˜'Kš œžœœœœœ˜6Kšœ˜—K˜Kšœ”™”K˜Kšœœ˜"Kšœ œ˜$Kšœœ˜!K˜š žœœœœ œœ˜8K˜šœœœ˜Kšœœ˜(K˜Qšœ˜Kšœ˜"Kšœ˜—K˜Kšœ˜—K˜K˜!Kšœ˜#Kšœ˜—K˜šž œœ˜K˜K˜—K˜šžœœ˜Kšœœ ˜AK˜Kšœ˜—K˜KšœÝ™ÝK˜Kšœ$œ˜(Kšœ!œ˜%Kšœœ˜Kšœ œ˜K˜šžœœ˜!K˜+K˜—K˜šžœœ œœ˜AKšœœ7˜QK˜Kšœ˜—K˜šžœœœ œ˜>šœœ˜K˜#K˜K˜Kšœ˜—Kšœ˜—K˜šžœœœ˜.K˜#K˜K˜Kšœ˜—K˜šž œœ˜Kšœ%œ˜)K˜Kšœ˜—K˜š žœœœœ œ˜Bšžœœ˜9Kšœ œœ#˜:K˜Kšœ˜—Kšœœœ(˜DKšœ˜—K˜šžœœœœ˜OKšœ`™`K˜Kš œžœœœœœ˜>š œžœœœ˜(Kšœœœ œ˜3K˜'šœ'˜1Kšœ œœœ˜/šœœŸ4˜