def is_function(defMap): """ Given a mapping from pairs (role, term) |-> (shorthand, lineNum) checks to make sure that for any two (role, term) |-> (shorthand, lineNum) (role', term') |-> (shorthand', lineNum') term = term' iff shorthand = shorthand' In other words, the mapping from shorthand to term defines a function. """ definedShorthand = {} for role, term in defMap: shorthand, lineNumber = defMap[(role, term)] if shorthand in definedShorthand: tokenizedTerm1 = tokenize(term) term2, lineNumber2 = definedShorthand[shorthand] tokenizedTerm2 = tokenize(term2) if any([ token1 != token2 for token1, token2 in zip(tokenizedTerm1, tokenizedTerm2) ]): raise pslErrors.TranslationError(' '.join([ pslErrors.error, pslErrors.color_line_number(lineNumber) + ",", pslErrors.color_line_number(lineNumber2), "Multiple definitions of", pslErrors.color_token(shorthand) + ":", pslErrors.color_token(term) + ",", pslErrors.color_token(term2) ])) else: definedShorthand[shorthand] = (term, lineNumber)
def role_variables_correct(defMap, parseTree): """ Given a mapping from pairs (role, term) |-> (shorthand, lineNum) checks to make sure that the variables in term are allowed to show up in terms associated with role. """ #Working on modifying the disjoint_vars code from pslTree.py to work here. protocol = parseTree.get_protocol() roleMap = protocol.variables_per_role() declaredVars = protocol.declared_variables() roleTermPairs = defMap.keys() errors = [] checkedRoles = [] for role, term in roleTermPairs: #checkedRoles.append(role) inVars = set(roleMap[role]) otherStmtVars = protocol.statement_vars(float('inf')) otherStmtVars = {roleName:otherStmtVars[roleName] for roleName in otherStmtVars if roleName != role} #Because we've already passed the terms to Maude to be parsed, the variables in the term are annotated with their sort. We need to ignore that sort #when checking if a variable is in someone else's variables. for var in [token for token in tokenize(term) if token.split(':')[0] in declaredVars]: for roleName in otherStmtVars: varName = var.split(':')[0] if varName in otherStmtVars[roleName] and varName not in inVars: errorMsg = ' '.join([pslErrors.error, pslErrors.color_line_number(defMap[(role, term)][LINE_NUM]), "Variable", pslErrors.color_token(varName.strip()), "appears in the protocol terms of roles", pslErrors.color_token(roleName), "and", pslErrors.color_token(role) + ".", "Variables must be disjoint between roles, with the possible exception of In(put) variables."]) if errorMsg not in errors: errors.append(errorMsg) if errors: raise pslErrors.TranslationError('\n'.join(errors))
def parse_code(sectionStmts): root = pslTree.Root() try: root.children.append(pslTree.Theory.parse(sectionStmts['Theory'], root)) except KeyError: raise pslErrors.SyntaxError(' '.join([ pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Theory") ])) try: root.children.append( pslTree.Protocol.parse(sectionStmts['Protocol'], root)) except KeyError: raise pslErrors.SyntaxError(' '.join([ pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Protocol") ])) try: root.children.append( pslTree.Intruder.parse(sectionStmts['Intruder'], root)) except KeyError: raise pslErrors.SyntaxError(' '.join([ pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Intruder") ])) try: root.children.append( pslTree.Attacks.parse((sectionStmts['Attacks']), root)) except KeyError: raise pslErrors.SyntaxError(' '.join([ pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Attacks") ])) return root
def sorts_independent_shorthand(independentShorthand, knownShorthand, syntaxFileName): """ Given a mapping from (role, term) to (shorthand, lineNumber), and a mapping of all shorthand appearing in each "term" to their respective sorts, returns a mapping of all shorthand in independentShorthand to their respective sorts. """ maudeCmds = (["fmod SHORTHAND is", "protecting PROTOCOL-EXAMPLE-SYMBOLS ."] + [' '.join(['op', shorthand, ':', '->', sort, '.']) for (shorthand, sort) in knownShorthand.iteritems()] + ["endfm"]) maudeCmds += [' '.join(['parse', termRolePair[DEF_KEY_TERM], '.']) for termRolePair in independentShorthand.keys()] + ['q'] maudeExecution = subprocess.Popen([MAUDE_COMMAND, NO_PRELUDE, '-no-banner', '-no-advise', '-no-wrap', PRELUDE, NPA_SYNTAX, syntaxFileName], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = maudeExecution.communicate('\n'.join(maudeCmds)) #Print any maude errors if stderr: stderr = stderr.strip().split('\n') errors = [] for line in stderr: #Warning format: #Warning: <standard input>, line i: error message line = line.split(':') termIndex = int(line[1].split()[-1]) startParsingIndex = maudeCmds.index('endfm')+2 role, problemTerm = independentShorthand.keys()[termIndex - startParsingIndex] if "no parse for term" not in ''.join(line): errors.append(' '.join([pslErrors.error, pslErrors.color_line_number(independentShorthand[(role, problemTerm)][DEF_LINE_NUM]), "Term: ", pslErrors.color_token(problemTerm), ':'.join(line[2:])])) raise pslErrors.SyntaxError('\n'.join(errors)) stdout = stdout.split('\n') stdout = [line.replace('Maude>', '').strip() for line in stdout] #Maude output after removing Maude>: #Sort: term return {independentShorthand[termRolePair][DEF_SHORTHAND]:line.split(':')[0] for termRolePair, line in zip(independentShorthand, stdout)}
def role_variables_correct(defMap, parseTree): """ Given a mapping from pairs (role, term) |-> (shorthand, lineNum) checks to make sure that the variables in term are allowed to show up in terms associated with role. """ #Working on modifying the disjoint_vars code from pslTree.py to work here. protocol = parseTree.get_protocol() roleMap = protocol.variables_per_role() declaredVars = protocol.declared_variables() roleTermPairs = defMap.keys() errors = [] checkedRoles = [] for role, term in roleTermPairs: #checkedRoles.append(role) inVars = set(roleMap[role]) otherStmtVars = protocol.statement_vars(float('inf')) otherStmtVars = { roleName: otherStmtVars[roleName] for roleName in otherStmtVars if roleName != role } #Because we've already passed the terms to Maude to be parsed, the variables in the term are annotated with their sort. We need to ignore that sort #when checking if a variable is in someone else's variables. for var in [ token for token in tokenize(term) if token.split(':')[0] in declaredVars ]: for roleName in otherStmtVars: varName = var.split(':')[0] if varName in otherStmtVars[roleName] and varName not in inVars: errorMsg = ' '.join([ pslErrors.error, pslErrors.color_line_number(defMap[(role, term)][LINE_NUM]), "Variable", pslErrors.color_token(varName.strip()), "appears in the protocol terms of roles", pslErrors.color_token(roleName), "and", pslErrors.color_token(role) + ".", "Variables must be disjoint between roles, with the possible exception of In(put) variables." ]) if errorMsg not in errors: errors.append(errorMsg) if errors: raise pslErrors.TranslationError('\n'.join(errors))
def gen_intermediate(parseTree, theoryFileName): """ Generates the maude code needed to translate a PSL specification into a trio of Maude-NPA module. Returns the code as a list of lines. """ code = ['load psl.maude', ' '.join(['load', theoryFileName]), 'mod INTERMEDIATE is', 'protecting TRANSLATION-TO-MAUDE-NPA .', 'protecting PROTOCOL-EXAMPLE-SYMBOLS .'] defPairs = [] for defNode in parseTree.get_protocol().get_defs(): defPairs.extend(defNode.def_pairs()) defMap = {} for defPair in defPairs: if (defPair.role(), defPair.term()) in defMap: otherDef = defMap[(defPair.role(), defPair.term())] raise pslErrors.TranslationError(' '.join([pslErrors.error, pslErrors.color_line_number(defPair.lineNum) + ",", otherDef[DEF_LINE_NUM], "Term", pslErrors.color_token(defPair.term()), "has multiple shorthands: ", pslErrors.color_token(defPair.shorthand()) + ",", pslErrors.color_token(otherDef[DEF_SHORTHAND])])) else: defMap[(defPair.role(), defPair.term())] = (defPair.shorthand(), defPair.lineNum) shorthandSortMap = compute_sorts(defMap, theoryFileName, parseTree) code.extend([' '.join(['op', shorthand, ':', '->', sort, '.']) for shorthand, sort in shorthandSortMap.items()]) code.append('endm') code.append('rew') code.extend(['Specification', '{']) protocol = parseTree.get_protocol() code.extend(stmt for stmt in protocol.translate() if stmt) intruder = parseTree.get_intruder() code.extend([stmt for stmt in intruder.translate() if stmt]) attacks = parseTree.get_attacks() code.extend([stmt for stmt in attacks.translate() if stmt]) code.append('}') #Empty StrandData for protocols code.append('[mt]') #Empty strand set for intruders code.append('[empty]') if defMap: defs = ', '.join([' '.join([''.join(['(', shorthandLineNum[DEF_SHORTHAND], ', ', str(shorthandLineNum[DEF_LINE_NUM]), ')']), ':=', roleTermPair[DEF_KEY_TERM]]) for roleTermPair, shorthandLineNum in defMap.items()]) else: defs = '$noDefs' code.append(' '.join(['[', '$makeIdem($checkWellFormed(', defs, '))', ']'])) code.append('.') return code
def sorts_independent_shorthand(independentShorthand, knownShorthand, syntaxFileName): """ Given a mapping from (role, term) to (shorthand, lineNumber), and a mapping of all shorthand appearing in each "term" to their respective sorts, returns a mapping of all shorthand in independentShorthand to their respective sorts. """ maudeCmds = ( ["fmod SHORTHAND is", "protecting PROTOCOL-EXAMPLE-SYMBOLS ."] + [ ' '.join(['op', shorthand, ':', '->', sort, '.']) for (shorthand, sort) in knownShorthand.iteritems() ] + ["endfm"]) maudeCmds += [ ' '.join(['parse', termRolePair[DEF_KEY_TERM], '.']) for termRolePair in independentShorthand.keys() ] + ['q'] maudeExecution = subprocess.Popen([ MAUDE_COMMAND, NO_PRELUDE, '-no-banner', '-no-advise', '-no-wrap', PRELUDE, NPA_SYNTAX, syntaxFileName ], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = maudeExecution.communicate('\n'.join(maudeCmds)) #Print any maude errors if stderr: stderr = stderr.strip().split('\n') errors = [] for line in stderr: #Warning format: #Warning: <standard input>, line i: error message line = line.split(':') termIndex = int(line[1].split()[-1]) startParsingIndex = maudeCmds.index('endfm') + 2 role, problemTerm = independentShorthand.keys()[termIndex - startParsingIndex] if "no parse for term" not in ''.join(line): errors.append(' '.join([ pslErrors.error, pslErrors.color_line_number( independentShorthand[(role, problemTerm)][DEF_LINE_NUM]), "Term: ", pslErrors.color_token(problemTerm), ':'.join(line[2:]) ])) raise pslErrors.SyntaxError('\n'.join(errors)) stdout = stdout.split('\n') stdout = [line.replace('Maude>', '').strip() for line in stdout] #Maude output after removing Maude>: #Sort: term return { independentShorthand[termRolePair][DEF_SHORTHAND]: line.split(':')[0] for termRolePair, line in zip(independentShorthand, stdout) }
def is_function(defMap): """ Given a mapping from pairs (role, term) |-> (shorthand, lineNum) checks to make sure that for any two (role, term) |-> (shorthand, lineNum) (role', term') |-> (shorthand', lineNum') term = term' iff shorthand = shorthand' In other words, the mapping from shorthand to term defines a function. """ definedShorthand = {} for role, term in defMap: shorthand, lineNumber = defMap[(role, term)] if shorthand in definedShorthand: tokenizedTerm1 = tokenize(term) term2, lineNumber2 = definedShorthand[shorthand] tokenizedTerm2 = tokenize(term2) if any([token1 != token2 for token1, token2 in zip(tokenizedTerm1, tokenizedTerm2)]): raise pslErrors.TranslationError(' '.join([pslErrors.error, pslErrors.color_line_number(lineNumber) + ",", pslErrors.color_line_number(lineNumber2), "Multiple definitions of", pslErrors.color_token(shorthand) + ":", pslErrors.color_token(term) + ",", pslErrors.color_token(term2)])) else: definedShorthand[shorthand] = (term, lineNumber)
def parse_code(sectionStmts): root = pslTree.Root() try: root.children.append(pslTree.Theory.parse(sectionStmts['Theory'], root)) except KeyError: raise pslErrors.SyntaxError(' '.join([pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Theory")])) try: root.children.append(pslTree.Protocol.parse(sectionStmts['Protocol'], root)) except KeyError: raise pslErrors.SyntaxError(' '.join([pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Protocol")])) try: root.children.append(pslTree.Intruder.parse(sectionStmts['Intruder'], root)) except KeyError: raise pslErrors.SyntaxError(' '.join([pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Intruder")])) try: root.children.append(pslTree.Attacks.parse((sectionStmts['Attacks']), root)) except KeyError: raise pslErrors.SyntaxError(' '.join([pslErrors.errorNoLine, "Missing Section:", pslErrors.color_token("Attacks")])) return root
def process_error(error, parseTree): """ Given a partially evaluated PSL specification, extracts the offending error term, and extracts from the error term the information need for a usable error message. Then raises a TranslationError containing said usable error message. """ try: errorTermStart = error.index("$$$") except ValueError as e: raise ValueError(error) errorType, errorTerm = error[errorTermStart:].split('(', 1) numParens = 1 endOfTerm = compute_end_of_term(errorType, errorTerm) errorTerm = errorTerm[:endOfTerm] if errorType == "$$$infiniteIdem": invalidMapping, lineNumbers = errorTerm.rsplit(',', 1) lineNumbers = [number.strip() for number in lineNumbers.split(':')] raise pslErrors.TranslationError(' '.join([pslErrors.error, pslErrors.color_line_number(','.join(lineNumbers)), "The substitution: ", invalidMapping, "cannot be made idempotent."])) elif errorType.strip() == "$$$malformedDefs": errorDefs = [] for error in errorTerm.split("$$;;;;$$"): pair, lineNumber = error.split("$$,$$") lineNumber = lineNumber.replace(")", '') errorDefs.append(' '.join([pslErrors.error, pslErrors.color_line_number(lineNumber.strip()), #Stripping off the () around the definition "Malformed Definition:", pslErrors.color_token(pair[1:-1])])) raise pslErrors.TranslationError('\n'.join(errorDefs)) elif errorType.strip() == "$$$malformedTerm": errorTerm, lineNumber = errorTerm.rsplit(',', 1) raise pslErrors.TranslationError(' '.join([pslErrors.error, pslErrors.color_line_number(lineNumber.strip()), "Malformed term:", errorTerm])) elif errorType.strip() == "$$$notAFunction": errorMappings = errorTerm.split("$$$;;;$$$") errorMsg = [] for error in errorMappings: var, results = [string.strip() for string in error.split('|->')] problemTerms = [] lineNumbers = [] for result in [r.strip() for r in results.split('}$') if r.strip()]: result = result.replace('${', '') startLineNum = result.rindex(';')+1 lineNumber = result[startLineNum:].strip() result = result[:startLineNum-1].strip() problemTerms.append(result) lineNumbers.append(lineNumber) errorMsg.append(''.join([pslErrors.errorNoLine, " Substitution does not", " define a function. Variable: ", pslErrors.color_token(var), " maps to the terms:\n\t", '\n\t'.join([' Line: '.join([ pslErrors.color_token(term), pslErrors.color_line_number(lineNum)]) for term, lineNum in zip(problemTerms, lineNumbers)])])) raise pslErrors.TranslationError('\n'.join(errorMsg)) elif errorType.strip() == "$$$invalidSorting": var, termLineNum = [s.strip() for s in errorTerm.split('|->')] var, variableSort = var.split(':') termLineNum = termLineNum.replace('${', '').replace('}$', '') lineNumberIndex = termLineNum.rindex(';')+1 lineNum = termLineNum[lineNumberIndex:].strip() term = termLineNum[:lineNumberIndex-1].strip() raise pslErrors.TranslationError(' '.join([pslErrors.error, pslErrors.color_line_number(lineNum), "Variable", pslErrors.color_token(var), "has sort", pslErrors.color_token(variableSort), "but term", pslErrors.color_token(term), "does not."]))
def lex_code(pslFile): """ Given an iterable of lines of PSL code, returns a dictionary mapping section names to lists of statements. """ sectionStmts = {heading:[] for heading in pslTree.SECTION_HEADINGS} numberedLines = [(tokenize(line), num) for line, num in number_lines(pslFile)] statement = pslTree.Statement() #List of pairs of comment token with line number. startComment = [] errorMsgs = [] for i in range(len(numberedLines)): line, num = numberedLines[i] for j in range(len(line)): token = line[j].strip() try: nextToken = line[j+1] except IndexError: try: nextToken = numberedLines[i+1][0][0] except IndexError: nextToken = nextNextToken = '' else: try: nextNextToken = numberedLines[i+1][0][1] except IndexError: try: nextNextToken = numberedLines[i+2][0][0] except IndexError: nextNextToken = '' else: try: nextNextToken = line[j+2] except IndexError: try: nextNextToken = numberedLines[i+1][0][0] except IndexError: nextNextToken = '' if token == r'/*': startComment.append((token, num)) elif token == r'*/': try: if startComment[-1][0] == r'/*': startComment = startComment[:-1] continue else: raise IndexError() except IndexError: raise errorMsgs.append(' '.join([pslErrors.error, pslErrors.color_line_number(num), "Unexpected end comment token:", pslErrors.color_token(token)])) if startComment or re.match(singleLineComment, token): continue elif not sectionStmts['Start'] and re.match(startSpec, token): stmt = pslTree.Statement([token], [num]) sectionStmts["Start"].append(stmt) continue elif is_start_of_section(token, nextToken): section = token elif token in END_LINE and not protocol_step(nextToken, nextNextToken): statement.append(token, num) sectionStmts[section].append(statement) statement = pslTree.Statement() else: statement.append(token, num) for token, num in startComment: errorMsgs.append(' '.join([pslErrors.error, pslErrors.color_line_number(num), "Dangling comment token:", pslErrors.color_token(token)])) if errorMsgs: raise pslErrors.LexingError('\n'.join(errorMsgs)) #Removes some unnecessary end of comment statements that are still floating around. for key in sectionStmts: for stmt in sectionStmts[key]: stmt.tokens = [token for token in stmt.tokens if token != '*/'] return sectionStmts
def process_error(error, parseTree): """ Given a partially evaluated PSL specification, extracts the offending error term, and extracts from the error term the information need for a usable error message. Then raises a TranslationError containing said usable error message. """ errorTermStart = error.index("$$$") errorType, errorTerm = error[errorTermStart:].split('(', 1) numParens = 1 endOfTerm = compute_end_of_term(errorType, errorTerm) errorTerm = errorTerm[:endOfTerm] if errorType == "$$$infiniteIdem": invalidMapping, lineNumbers = errorTerm.rsplit(',', 1) lineNumbers = [number.strip() for number in lineNumbers.split(':')] raise pslErrors.TranslationError(' '.join([ pslErrors.error, pslErrors.color_line_number(','.join(lineNumbers)), "The substitution: ", invalidMapping, "cannot be made idempotent." ])) elif errorType.strip() == "$$$malformedDefs": errorDefs = [] for error in errorTerm.split("$$;;;;$$"): pair, lineNumber = error.split("$$,$$") lineNumber = lineNumber.replace(")", '') errorDefs.append(' '.join([ pslErrors.error, pslErrors.color_line_number(lineNumber.strip()), #Stripping off the () around the definition "Malformed Definition:", pslErrors.color_token(pair[1:-1]) ])) raise pslErrors.TranslationError('\n'.join(errorDefs)) elif errorType.strip() == "$$$malformedTerm": errorTerm, lineNumber = errorTerm.rsplit(',', 1) raise pslErrors.TranslationError(' '.join([ pslErrors.error, pslErrors.color_line_number(lineNumber.strip()), "Malformed term:", errorTerm ])) elif errorType.strip() == "$$$notAFunction": errorMappings = errorTerm.split("$$$;;;$$$") errorMsg = [] for error in errorMappings: var, results = [string.strip() for string in error.split('|->')] problemTerms = [] lineNumbers = [] for result in [ r.strip() for r in results.split('}$') if r.strip() ]: result = result.replace('${', '') startLineNum = result.rindex(';') + 1 lineNumber = result[startLineNum:].strip() result = result[:startLineNum - 1].strip() problemTerms.append(result) lineNumbers.append(lineNumber) errorMsg.append(''.join([ pslErrors.errorNoLine, " Substitution does not", " define a function. Variable: ", pslErrors.color_token(var), " maps to the terms:\n\t", '\n\t'.join([ ' Line: '.join([ pslErrors.color_token(term), pslErrors.color_line_number(lineNum) ]) for term, lineNum in zip(problemTerms, lineNumbers) ]) ])) raise pslErrors.TranslationError('\n'.join(errorMsg)) elif errorType.strip() == "$$$invalidSorting": var, termLineNum = [s.strip() for s in errorTerm.split('|->')] var, variableSort = var.split(':') termLineNum = termLineNum.replace('${', '').replace('}$', '') lineNumberIndex = termLineNum.rindex(';') + 1 lineNum = termLineNum[lineNumberIndex:].strip() term = termLineNum[:lineNumberIndex - 1].strip() raise pslErrors.TranslationError(' '.join([ pslErrors.error, pslErrors.color_line_number(lineNum), "Variable", pslErrors.color_token(var), "has sort", pslErrors.color_token(variableSort), "but term", pslErrors.color_token(term), "does not." ]))
def gen_intermediate(parseTree, theoryFileName): """ Generates the maude code needed to translate a PSL specification into a trio of Maude-NPA module. Returns the code as a list of lines. """ code = [ 'load psl.maude', ' '.join(['load', theoryFileName]), 'mod INTERMEDIATE is', 'protecting TRANSLATION-TO-MAUDE-NPA .', 'protecting PROTOCOL-EXAMPLE-SYMBOLS .' ] defPairs = [] for defNode in parseTree.get_protocol().get_defs(): defPairs.extend(defNode.def_pairs()) defMap = {} for defPair in defPairs: if (defPair.role(), defPair.term()) in defMap: otherDef = defMap[(defPair.role(), defPair.term())] raise pslErrors.TranslationError(' '.join([ pslErrors.error, pslErrors.color_line_number(defPair.lineNum) + ",", otherDef[DEF_LINE_NUM], "Term", pslErrors.color_token(defPair.term()), "has multiple shorthands: ", pslErrors.color_token(defPair.shorthand()) + ",", pslErrors.color_token(otherDef[DEF_SHORTHAND]) ])) else: defMap[(defPair.role(), defPair.term())] = (defPair.shorthand(), defPair.lineNum) shorthandSortMap = compute_sorts(defMap, theoryFileName, parseTree) code.extend([ ' '.join(['op', shorthand, ':', '->', sort, '.']) for shorthand, sort in shorthandSortMap.items() ]) code.append('endm') code.append('rew') code.extend(['Specification', '{']) protocol = parseTree.get_protocol() code.extend(stmt for stmt in protocol.translate() if stmt) intruder = parseTree.get_intruder() code.extend([stmt for stmt in intruder.translate() if stmt]) attacks = parseTree.get_attacks() code.extend([stmt for stmt in attacks.translate() if stmt]) code.append('}') #Empty StrandData for protocols code.append('[mt]') #Empty strand set for intruders code.append('[empty]') if defMap: defs = ', '.join([ ' '.join([ ''.join([ '(', shorthandLineNum[DEF_SHORTHAND], ', ', str(shorthandLineNum[DEF_LINE_NUM]), ')' ]), ':=', roleTermPair[DEF_KEY_TERM] ]) for roleTermPair, shorthandLineNum in defMap.items() ]) else: defs = '$noDefs' code.append(' '.join(['[', '$makeIdem($checkWellFormed(', defs, '))', ']'])) code.append('.') return code
def lex_code(pslFile): """ Given an iterable of lines of PSL code, returns a dictionary mapping section names to lists of statements. """ sectionStmts = {heading: [] for heading in pslTree.SECTION_HEADINGS} numberedLines = [(tokenize(line), num) for line, num in number_lines(pslFile)] statement = pslTree.Statement() #List of pairs of comment token with line number. startComment = [] errorMsgs = [] for i in range(len(numberedLines)): line, num = numberedLines[i] for j in range(len(line)): token = line[j].strip() try: nextToken = line[j + 1] except IndexError: try: nextToken = numberedLines[i + 1][0][0] except IndexError: nextToken = nextNextToken = '' else: try: nextNextToken = numberedLines[i + 1][0][1] except IndexError: try: nextNextToken = numberedLines[i + 2][0][0] except IndexError: nextNextToken = '' else: try: nextNextToken = line[j + 2] except IndexError: try: nextNextToken = numberedLines[i + 1][0][0] except IndexError: nextNextToken = '' if token == r'/*': startComment.append((token, num)) elif token == r'*/': try: if startComment[-1][0] == r'/*': startComment = startComment[:-1] continue else: raise IndexError() except IndexError: raise errorMsgs.append(' '.join([ pslErrors.error, pslErrors.color_line_number(num), "Unexpected end comment token:", pslErrors.color_token(token) ])) if startComment or re.match(singleLineComment, token): continue elif not sectionStmts['Start'] and re.match(startSpec, token): stmt = pslTree.Statement([token], [num]) sectionStmts["Start"].append(stmt) continue elif is_start_of_section(token, nextToken): section = token elif token in END_LINE and not protocol_step( nextToken, nextNextToken): statement.append(token, num) sectionStmts[section].append(statement) statement = pslTree.Statement() else: statement.append(token, num) for token, num in startComment: errorMsgs.append(' '.join([ pslErrors.error, pslErrors.color_line_number(num), "Dangling comment token:", pslErrors.color_token(token) ])) if errorMsgs: raise pslErrors.LexingError('\n'.join(errorMsgs)) #Removes some unnecessary end of comment statements that are still floating around. for key in sectionStmts: for stmt in sectionStmts[key]: stmt.tokens = [token for token in stmt.tokens if token != '*/'] return sectionStmts