Exemplo n.º 1
0
def parse(input_: Union[str, FileStream], default_base: Optional[str]=None) -> Optional[Schema]:
    """
    Parse the text in infile and return the resulting schema
    :param input_: text or input stream to parse
    :param default_base_: base URI for relative URI's in schema
    :return: ShExJ Schema object.  None if error.
    """

    # Step 1: Tokenize the input stream
    error_listener = ParseErrorListener()
    if not isinstance(input_, FileStream):
        input_ = InputStream(input_)
    lexer = ShExDocLexer(input_)
    lexer.addErrorListener(error_listener)
    tokens = CommonTokenStream(lexer)
    tokens.fill()
    if error_listener.n_errors:         # Lexer prints errors directly
        return None

    # Step 2: Generate the parse tree
    parser = ShExDocParser(tokens)
    parser.addErrorListener(error_listener)
    parse_tree = parser.shExDoc()
    if error_listener.n_errors:
        print('\n'.join(error_listener.errors), file=sys.stderr)
        return None

    # Step 3: Transform the results the results
    parser = ShexDocParser(default_base=default_base)
    parser.visit(parse_tree)

    return parser.context.schema
Exemplo n.º 2
0
    def create_token_stream(self, line):
        istream = InputStream(line)
        lexer = MetrinkFrontendLexer(istream)
        stream = CommonTokenStream(lexer)

        stream.fill()

        return stream
Exemplo n.º 3
0
    def split(self, path: str):
        input = InputStream(path)
        lexer = XPathLexer(input)

        def recover(self, e):
            raise e

        lexer.recover = recover
        lexer.removeErrorListeners()
        lexer.addErrorListener(ErrorListener())  # XPathErrorListener does no more
        tokenStream = CommonTokenStream(lexer)
        try:
            tokenStream.fill()
        except LexerNoViableAltException as e:
            pos = lexer.getColumn()
            msg = "Invalid tokens or characters at index " + str(pos) + " in path '" + path + "'"
            raise Exception(msg, e)

        tokens = tokenStream.getTokens()
        elements = list()
        n = len(tokens)
        i = 0
        while i < n:
            el = tokens[i]
            next = None
            if el.type in [XPathLexer.ROOT, XPathLexer.ANYWHERE]:
                anywhere = el.type == XPathLexer.ANYWHERE
                i += 1
                next = tokens[i]
                invert = next.type == XPathLexer.BANG
                if invert:
                    i += 1
                    next = tokens[i]
                pathElement = self.getXPathElement(next, anywhere)
                pathElement.invert = invert
                elements.append(pathElement)
                i += 1

            elif el.type in [XPathLexer.TOKEN_REF, XPathLexer.RULE_REF, XPathLexer.WILDCARD]:
                elements.append(self.getXPathElement(el, False))
                i += 1

            elif el.type == Token.EOF:
                break

            else:
                raise Exception("Unknown path element " + str(el))

        return elements
Exemplo n.º 4
0
def parse(
    text: str,
    start: str,
    strict=False,
    transform: Union[str, Callable] = None,
    error_listener: ErrorListener = None,
) -> ParseTree:

    lexer = LexerGo(antlr4.InputStream(text))
    lexer.removeErrorListeners()
    lexer.addErrorListener(LexerErrorListener())

    stream = CommonTokenStream(lexer)
    parser = ParserGo(stream)

    tree = parser.sourceFile()
    printer = ParserGoListener()
    walker = ParseTreeWalker()
    walker.walk(printer, tree)
    visitor = ParserGoVisitor()

    parser.buildParseTrees = True  # default

    if strict:
        error_listener = StrictErrorListener()

    if error_listener is not None and error_listener is not True:
        parser.removeErrorListeners()
        if error_listener:
            parser.addErrorListener(error_listener)

    print(Trees.toStringTree(tree, None, parser))

    return tree
Exemplo n.º 5
0
def parseFromStream(events, stream, source):
    lexer = DeviationLexer(stream)
    stream = CommonTokenStream(lexer)
    parser = DeviationParser(stream)
    tree = parser.unit()
    visitor = Visitor(events, source)
    return visitor.visitUnit(tree)
Exemplo n.º 6
0
def get_tree(file):
    inp = FileStream(file)
    lexer = MineScriptLexer(inp)
    stream = CommonTokenStream(lexer)
    parser = MineScriptParser(stream)
    tree = parser.prog()
    return tree
Exemplo n.º 7
0
def parse_query(text, optimize=True):
    from .gen.ContactQLLexer import ContactQLLexer
    from .gen.ContactQLParser import ContactQLParser

    stream = InputStream(text)
    lexer = ContactQLLexer(stream)
    tokens = CommonTokenStream(lexer)
    parser = ContactQLParser(tokens)
    parser._errHandler = BailErrorStrategy()

    try:
        tree = parser.parse()
    except ParseCancellationException as ex:
        message = None
        if ex.args and isinstance(ex.args[0], NoViableAltException):
            token = ex.args[0].offendingToken
            if token is not None and token.type != ContactQLParser.EOF:
                message = "Search query contains an error at: %s" % token.text

        if message is None:
            message = "Search query contains an error"

        raise SearchException(message)

    visitor = ContactQLVisitor()

    query = ContactQuery(visitor.visit(tree))
    return query.optimized() if optimize else query
Exemplo n.º 8
0
def make_parser(data):
    # type: (str) -> RelayParser
    """Construct a RelayParser a given data stream."""
    input_stream = InputStream(data)
    lexer = RelayLexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    return RelayParser(token_stream)
Exemplo n.º 9
0
    def compile_file(self, path, incl_search_paths=None):
        """
        Parse & compile a single file and append it to RDLCompiler's root
        namespace.
        
        If any exceptions (:class:`~systemrdl.RDLCompileError` or other)
        occur during compilation, then the RDLCompiler object should be discarded.
        
        Parameters
        ----------
        path:str
            Path to an RDL source file
        
        incl_search_paths:list
            List of additional paths to search to resolve includes.
            If unset, defaults to an empty list.
            
            Relative include paths are resolved in the following order:
            
            1. Search each path specified in ``incl_search_paths``.
            2. Path relative to the source file performing the include.
            
        Raises
        ------
        :class:`~systemrdl.RDLCompileError`
            If any fatal compile error is encountered.
        """

        if incl_search_paths is None:
            incl_search_paths = []

        fpp = preprocessor.FilePreprocessor(self.env, path, incl_search_paths)
        preprocessed_text, seg_map = fpp.preprocess()
        input_stream = preprocessor.PreprocessedInputStream(
            preprocessed_text, seg_map)

        lexer = SystemRDLLexer(input_stream)
        lexer.removeErrorListeners()
        lexer.addErrorListener(messages.RDLAntlrErrorListener(self.msg))

        token_stream = CommonTokenStream(lexer)

        parser = SystemRDLParser(token_stream)
        parser.removeErrorListeners()
        parser.addErrorListener(messages.RDLAntlrErrorListener(self.msg))

        # Run Antlr parser on input
        parsed_tree = parser.root()
        if self.msg.error_count:
            self.msg.fatal("Parse aborted due to previous errors")

        # Traverse parse tree with RootVisitor
        self.visitor.visit(parsed_tree)

        # Reset default property assignments from namespace.
        # They should not be shared between files since that would be confusing.
        self.namespace.default_property_ns_stack = [{}]

        if self.msg.error_count:
            self.msg.fatal("Compile aborted due to previous errors")
Exemplo n.º 10
0
def main():
    program = "a = set[0 5] \n"
    program += "b = set[2 8 2] \n"

    program += "c = a.belongs(2) \n"
    program += "d = a.elementSum() \n"
    program += "e = a.elementAvg() \n"
    program += "f = a.length() \n"
    program += "g = a.intersection(b) \n"
    program += "h = a.union(b) \n"
    program += "i = a.difference(b) \n"
    program += "j = a.complement(b) \n"

    input = InputStream(program)
    lexer = TpConjuntosLexer(input)
    stream = CommonTokenStream(lexer)
    parser = TpConjuntosParser(stream)

    tree = parser.program()

    listener = RealListener()
    walker = ParseTreeWalker()
    walker.walk(listener, tree)

    print(listener.variables)
 def test_specification_ex2(self):
     with open('./../examples/ex2.monitor', 'r') as m:
         for line in m:
             data = json.loads(line)
             print("Topics: %s" % data['topics'])
             self.topics = data['topics']
             print("Specification: %s" % data['specification'])
             lexer = SpecificationLexer(InputStream(data['specification']))
             parser = SpecificationParser(CommonTokenStream(lexer))
             ast = AST().visit(parser.specification())
             self.formula = ast.accept(Rewriter())
             print("Formula: %s" % self.formula)
     monitor = Monitor(formula=self.formula, topics=self.topics)
     with open('./../examples/ex2.trace', 'r') as trace:
         for event in trace:
             event = json.loads(event)
             monitor.step(timestamp=event.pop('timestamp', None),
                          state=event)
             if len(monitor.boolean_verdicts) != 0:
                 print("Boolean verdicts for time-point %d:" % monitor.now)
                 for boolean_verdict in sorted(monitor.boolean_verdicts):
                     print("(%d,%d):%s" %
                           (boolean_verdict[0][0], boolean_verdict[0][1],
                            boolean_verdict[1]))
             if len(monitor.equivalence_verdicts) != 0:
                 print("Equivalence verdicts for time-point %d:" %
                       monitor.now)
                 for equivalence_verdict in sorted(
                         monitor.equivalence_verdicts):
                     print("(%d,%d) = (%d,%d)" %
                           (equivalence_verdict[0][0][0],
                            equivalence_verdict[0][0][1],
                            equivalence_verdict[1][0][0],
                            equivalence_verdict[1][0][1]))
     self.assertEqual(BExp.FalseConstant(), monitor.previous[-1])
Exemplo n.º 12
0
def main(argv):
    """
    CLI arg is the path to CSPICE.java.
    """

    input_stream = FileStream(argv[1])
    lexer = DeclarationsLexer(input_stream)
    stream = CommonTokenStream(lexer)
    parser = DeclarationsParser(stream)
    functions = parser.cspice().result

    template_dir = os.path.join('src', 'build', 'templates')
    out = os.path.join('build', 'generated', 'sources', 'automation', 'main')
    java_out = os.path.join(out, 'java')
    proto_out = os.path.join(out, 'proto')
    for o in [out, java_out, proto_out]:
        if not os.path.exists(o):
            os.makedirs(o)

    functions = [func for func in functions if valid_function(func)]

    generate_proto(functions, proto_out, template_dir)
    generate_endpoints(functions, ['ParSPICE.java', 'SpiceService.java'],
                       java_out, template_dir)

    for func in functions:
        if func.classification == parse_tree.Classification.NORMAL:
            try:
                generate_java(func, ['Call.java', 'Batch.java'], java_out,
                              template_dir)
            except ValueError:
                print('not yet working: %s' % func.name)
Exemplo n.º 13
0
    def check_codegen(self, input, expect):
        if isinstance(input, str):
            lexer = BKITLexer(InputStream(input))
            tokens = CommonTokenStream(lexer)
            parser = BKITParser(tokens)
            tree = parser.program()
            input = ASTGeneration().visit(tree)

        StaticChecker(input).check()
        path = 'test/codegen_out/'
        if not os.path.exists(path):
            os.makedirs(path)
        CodeGenerator().gen(input, path)
        # os.environ.pop('_JAVA_OPTIONS', None)
        jasmin_cmd = [
            'java', 'jasmin.Main', '-d', path, '-g', path + 'MCClass.j'
        ]
        p = subprocess.run(jasmin_cmd, capture_output=True, text=True)
        if p.returncode != 0:
            self.fail("Jasmin Assembler Error:\n" + p.stderr)
        cmd = ['java', '-Xverify:all', '-cp', path + ':lib', 'MCClass']
        p = subprocess.run(cmd, timeout=2, capture_output=True, text=True)
        if p.returncode != 0:
            self.fail("Java Runtime Error:\n" + p.stderr)
        output = p.stdout
        self.assertEqual(output, expect)
Exemplo n.º 14
0
    def execute(self, sql):
        # class Strategy(BailErrorStrategy):
        #     def recover(self, recognizer, e):
        #         recognizer._errHandler.reportError(recognizer, e)
        #         super().recover(recognizer, e)

        class StringErrorListener(ErrorListener):
            def syntaxError(self, recognizer, offending_symbol, line, column,
                            msg, e):
                raise ParseCancellationException("line " + str(line) + ":" +
                                                 str(column) + " " + msg)

        self.visitor.time_cost()
        input_stream = InputStream(sql)
        lexer = SQLLexer(input_stream)
        lexer.removeErrorListeners()
        lexer.addErrorListener(StringErrorListener())
        tokens = CommonTokenStream(lexer)
        parser = SQLParser(tokens)
        parser.removeErrorListeners()
        parser.addErrorListener(StringErrorListener())
        # parser._errHandler = Strategy()
        try:
            tree = parser.program()
        except ParseCancellationException as e:
            return [
                QueryResult(None, None, str(e), cost=self.visitor.time_cost())
            ]
        try:
            return self.visitor.visit(tree)
        except Error as e:
            return [QueryResult(message=str(e), cost=self.visitor.time_cost())]
Exemplo n.º 15
0
    def generate_stateless_alu(self, alu_name, potential_operands):
        # Grab the stateless alu file name by using
        input_stream = FileStream(self.stateless_alu_filename_)
        lexer = aluLexer(input_stream)
        stream = CommonTokenStream(lexer)
        parser = aluParser(stream)
        tree = parser.alu()

        sketch_stateless_alu_visitor = \
            SketchStatelessAluVisitor(
                self.stateless_alu_filename_, self.sketch_name_ + '_' +
                alu_name, potential_operands, self.generate_mux,
                self.constant_arr_size_)
        sketch_stateless_alu_visitor.visit(tree)
        self.add_holes(sketch_stateless_alu_visitor.global_holes)
        self.stateless_alu_hole_arguments_ = [
            x for x in sorted(
                sketch_stateless_alu_visitor.stateless_alu_args
            )]

        self.num_stateless_muxes_ = \
            len(sketch_stateless_alu_visitor.packet_fields)

        return (sketch_stateless_alu_visitor.helper_function_strings +
                sketch_stateless_alu_visitor.main_function)
Exemplo n.º 16
0
    def get_default_netlist(self, cn_id: str,
                            name_gen: NameGenerator) -> MINTDevice:
        if self.type is not PrimitiveType.NETLIST:
            raise Exception(
                "Cannot execute this method for this kind of a  primitive")

        default_mint_file = parameters.LIB_DIR.joinpath(
            self._default_netlist).resolve()

        if not path.exists(default_mint_file):
            raise Exception("Default netlist file does not exist")

        finput = FileStream(default_mint_file)

        lexer = mintLexer(finput)

        stream = CommonTokenStream(lexer)

        parser = mintParser(stream)

        tree = parser.netlist()

        walker = ParseTreeWalker()

        listener = MINTCompiler()

        walker.walk(listener, tree)

        device = listener.current_device

        name_gen.rename_netlist(cn_id, device)
        # Return the default netlist
        return device
Exemplo n.º 17
0
    def _parse(self, grammar, encoding, lib_dir):
        work_list = {grammar}
        root = None

        while work_list:
            grammar = work_list.pop()

            antlr_parser = self.antlr_parser_cls(
                CommonTokenStream(
                    self.antlr_lexer_cls(FileStream(grammar,
                                                    encoding=encoding))))
            current_root = antlr_parser.grammarSpec()
            # assert antlr_parser._syntaxErrors > 0, 'Parse error in ANTLR grammar.'

            # Save the 'outermost' grammar.
            if not root:
                root = current_root
            else:
                # Unite the rules of the imported grammar with the host grammar's rules.
                for rule in current_root.rules().ruleSpec():
                    root.rules().addChild(rule)

            work_list |= self._collect_imports(current_root, dirname(grammar),
                                               lib_dir)

        return root
Exemplo n.º 18
0
 def parse_java9_file(self, input_stream: InputStream):
     lexer = Java9Lexer(input_stream)
     stream = CommonTokenStream(lexer)
     parser = Java9Parser(stream)
     tree = parser.compilationUnit()
     listener = Java9ListenerExtended()
     return self.walk(listener, tree)
Exemplo n.º 19
0
def main():
    input_stream = InputStream(sys.stdin.read())
    lexer = Example2Lexer(input_stream)
    stream = CommonTokenStream(lexer)
    parser = Example2Parser(stream)
    parser.r()
    print("Finished")
Exemplo n.º 20
0
 def parse_kotlin_file(self, input_stream: InputStream):
     lexer = KotlinLexer(input_stream)
     stream = CommonTokenStream(lexer)
     parser = KotlinParser(stream)
     tree = parser.kotlinFile()
     listener = KotlinParserListenerExtended()
     return self.walk(listener, tree)
 def execute(self, code_text):
     lexer = LanguageLexer(InputStream(code_text))
     stream = CommonTokenStream(lexer)
     parser = LanguageParser(stream)
     tree = parser.program()
     executor = StatementExecutor(SymbolTable(), RuntimeStack())
     executor.visitProgram(tree)
Exemplo n.º 22
0
def main(argv):
    logfmt = ('[%(levelname)s]\t%(name)s:%(threadName)-10s' +
              '(%(asctime)s.%(msecs)d) ' +
              '%(filename)s:%(lineno)d:%(message)s')
    datefmt = '%H:%M:%S'
    logging.basicConfig(level=logging.INFO, format=logfmt, datefmt=datefmt)

    #input = FileStream(argv[1])
    gname = argv[1]
    inputstream = FileStream(gname, encoding='utf-8')
    lexer = SparqlLexer(inputstream)
    stream = CommonTokenStream(lexer)
    parser = SparqlParser(stream)
    #tree = parser.StartRule()
    tree = parser.query()
    fmind = Fmind(gname)
    fnode = fmind.make_right(u"root")
    toFmind(fnode, tree)
    fmind.unfold_all()
    fmind.dump_to_file("l2.mm")

    #tree = parser.prologue()

    morpher = MorpherContext2()
    listener = MySparqlParserListener(morpher)
    walker = ParseTreeWalker()
    walker.walk(listener, tree)

    logging.info("Output:%s", sys.argv[1])
    print "# ", sys.argv[1]
    print morpher.get_result()
Exemplo n.º 23
0
def parse_code(code, class_parser, class_lexer):
    """
    Parses a code and returns a tree.

    @param      code                code to parse
    @param      class_parser        parser
    @param      class_lexer         lexer
    @return                         parsed code

    .. exref::
        :title: Check the syntax of a script PIG

        ::

            code = '''
            A = LOAD 'filename.txt' USING PigStorage('\t');
            STORE A INTO 'samefile.txt' ;
            '''

            clparser,cllexer = get_parser_lexer("Pig")
            parser = parse_code(code, clparser, cllexer)
            tree = parser.parse()
            st = get_tree_string(tree, parser, None)
            print(st)
    """
    if isinstance(code, str):
        # we assume it is a string
        code = InputStream(code)

    lexer = class_lexer(code)
    stream = CommonTokenStream(lexer)
    parser = class_parser(stream)
    return parser
Exemplo n.º 24
0
    def parse_criteria(self, s: str) -> Criteria:
        parser = search_grammarParser(
            CommonTokenStream(search_grammarLexer(InputStream(s))))

        parser._listeners = [SearchPatternParseListener()]

        return self._build(self._visitor.visit(parser.start()))
Exemplo n.º 25
0
def getTree(file):
    input = FileStream(file)
    lexer = NetAddDslLexer(input)
    stream = CommonTokenStream(lexer)
    parser = NetAddDslParser(stream)
    tree = parser.root()
    return tree
Exemplo n.º 26
0
    def getPythonCode(self):
        preprocessor = PML_Preprocessor(self.rootDir)
        preprocessor.process(self.pmlFilePath)

        inputStream = InputStream(preprocessor.getStream())
        lexer = pmlLexer(inputStream)
        stream = CommonTokenStream(lexer)
        parser = pmlParser(stream)

        parser.removeErrorListeners()
        exceptionListener = ParserExceptionListener()
        parser.addErrorListener(exceptionListener)

        try:
            tree = parser.styles()
        except ParserException as e:
            line, col, msg = e.errParams()
            localFile, localLine = preprocessor.trackDownLineNr(line)
            raise Exception(
                "Error in file {file} on line {line}, col {col}: {msg}".format(
                    file=localFile, line=localLine, col=col, msg=msg))

        translator = PythonListener()
        walker = ParseTreeWalker()
        walker.walk(translator, tree)

        return translator.getCode()
Exemplo n.º 27
0
def configurationFromStream(stream, source):
    lexer = DeviationLexer(stream)
    stream = CommonTokenStream(lexer)
    parser = DeviationParser(stream)
    tree = parser.configuration()
    visitor = Visitor(None, source)
    return visitor.visitConfiguration(tree)
Exemplo n.º 28
0
def main():

    program = "a = 37 \n"
    program += "b = a+3 \n"

    program += "j = set[5 10 1] \n"
    program += "c = set[10 15 1] \n"
    program += "c \n"
    program += "d = c.belongs(19)"
    program += "e = c.sum"
    program += "f = c.prom"
    program += "g = c.long"
    program += "h = c.comp"
    #program +=  "i = j union c"
    #program +=  "m = c inter j"
    program += "k = c diff j"

    input = InputStream(program)
    lexer = ConjuntosLexer(input)
    stream = CommonTokenStream(lexer)
    parser = ConjuntosParser(stream)

    tree = parser.program()

    listener = RealListener()
    walker = ParseTreeWalker()
    walker.walk(listener, tree)

    print(listener.variables)
Exemplo n.º 29
0
def _stringToAST(s):

    input_stream = antlr4.InputStream(s)

    error_listener = _SimpleErrorListener()

    #
    # Lexer
    #
    lexer = LangLexer.LangLexer(input_stream, output=None)
    lexer.removeErrorListeners()
    lexer.addErrorListener(error_listener)

    tokens = CommonTokenStream(lexer)

    #
    # Parser
    #
    parser = LangParser.LangParser(tokens, output=None)
    parser.removeErrorListeners()
    parser.addErrorListener(error_listener)

    parsed_tree = parser.program()

    return parser, parsed_tree
Exemplo n.º 30
0
def main(argv):
    # don't know if this works for all OS
    input_stream = FileStream(argv[1])
    lexer = pmlLexer(input_stream)
    stream = CommonTokenStream(lexer)
    parser = pmlParser(stream)

    parser.removeErrorListeners()
    exceptionListener = ParserExceptionListener()
    parser.addErrorListener(exceptionListener)

    # error management
    hadSyntaxErrors = False
    try:
        tree = parser.styles()
    except Exception as e:
        errorText = str(e)
        hadSyntaxErrors = True

    if not hadSyntaxErrors:
        translator = PythonListener()
        walker = ParseTreeWalker()
        walker.walk(translator, tree)
        sys.stdout.write(translator.getCode())
    else:
        sys.stdout.write(errorText)
Exemplo n.º 31
0
def comp(source, file=False):
	"""Parse the Switch source code and walk it, then return the python
	code"""

	output = bytearray("", "utf-8")

	namespace = {
		"->": "print_no_nl",
		":": "SwitchMap",
		"...": "SwitchList",
	}

	if file:
		lexer = switchLexer(FileStream(source))
	else:
		lexer = switchLexer(InputStream(source))

	stream = CommonTokenStream(lexer)
	parser = switchParser(stream)

	lexer.removeErrorListeners()
	lexer.addErrorListener(ExceptionListener())

	parser.removeErrorListeners()
	parser.addErrorListener(ExceptionListener())

	parse_tree = parser.switch_file()
	printer = SwitchPrintListener(output, namespace)
	walker = MyWalker()
	walker.walk(printer, parse_tree)

	return output
Exemplo n.º 32
0
def run_parser(quil):
    # type: (str) -> List[AbstractInstruction]
    """
    Run the ANTLR parser.

    :param str quil: a single or multiline Quil program
    :return: list of instructions that were parsed
    """
    # Step 1: Run the Lexer
    input_stream = InputStream(quil)
    lexer = QuilLexer(input_stream)
    stream = CommonTokenStream(lexer)

    # Step 2: Run the Parser
    parser = QuilParser(stream)
    parser.removeErrorListeners()
    parser.addErrorListener(CustomErrorListener())
    tree = parser.quil()

    # Step 3: Run the Listener
    pyquil_listener = PyQuilListener()
    walker = ParseTreeWalker()
    walker.walk(pyquil_listener, tree)

    return pyquil_listener.result
Exemplo n.º 33
0
    def execute(self, input_source):
        parser = JavaParser(CommonTokenStream(JavaLexer(FileStream(input_source, encoding="utf-8"))))
        walker = ParseTreeWalker()
        walker.walk(self.listener, parser.compilationUnit())
        # print(self.listener.called_methods)
        # print(self.listener.methods)
        # print(self.listener.calsses)
        print(self.listener.calledMethodToMethod)
        for key in self.listener.calledMethodToMethod:
            print(key)
        
        for value in self.listener.calledMethodToMethod.values():
            print(value)

        save_row = {}

        with open("a.csv",'w') as f:
            fieldnames = ['called method', 'method']
            writer = csv.DictWriter(f, fieldnames=fieldnames, delimiter=",",quotechar='"')
            writer.writeheader()

            for calledMethod in self.listener.calledMethodToMethod.keys():
                writer.writerow({'called method': calledMethod, 'method': self.listener.calledMethodToMethod[calledMethod] })
                print(calledMethod)
                print(self.listener.calledMethodToMethod[calledMethod])
Exemplo n.º 34
0
class Loader:
    def __init__(self, inputstream=None):
        self.input = inputstream
        self.lexer = Lexer(self.input)
        self.filter = IndentTokenFilter(self.lexer)
        self.cts = CommonTokenStream(self.filter)
        self.parser = Parser(self.cts)

    def set_input(self, inputstream):
        self.input = inputstream
        self.lexer.inputStream = self.input
        self.reset()

    def reset(self):
        self.lexer.reset()
        self.filter.reset()
        self.cts.reset()
        self.parser.reset()

    def load(self):
        tree = self.parser.r()
        return self.tree2dict(tree)

    def load_file(self, filename):
        try:
            self.set_input(FileStream(filename))
        except UnicodeDecodeError:
            self.set_input(FileStream(filename, encoding="Latin-1"))
        return self.load()

    @staticmethod
    def tree2dict(tree):
        def obj2dict(obj_node):
            obj_dict = OrderedDict()
            for p in obj_node.pair():
                key = p.ID().symbol.text
                value = get_value(p.value())
                obj_dict[key] = value
            return obj_dict
        def get_value(val):
            if val.obj():
                return obj2dict(val.obj())
            elif val.VALUE():
                return val.VALUE().symbol.text
            elif val.listItem():
                return [get_value(item.value()) for item in val.listItem()]
        return obj2dict(tree.obj())
Exemplo n.º 35
0
class Loader:
    def __init__(self, inputstream=None):
        self.input = inputstream
        self.lexer = Lexer(self.input)
        self.cts = CommonTokenStream(self.lexer)
        self.parser = Parser(self.cts)

    def set_input(self, inputstream):
        self.input = inputstream
        self.lexer.inputStream = self.input
        self.reset()

    def reset(self):
        self.lexer.reset()
        self.cts.reset()
        self.parser.reset()

    def load(self):
        tree = self.parser.r()
        return self.tree2dict(tree)

    def load_file(self, filename):
        try:
            self.set_input(FileStream(filename))
        except UnicodeDecodeError:
            self.set_input(FileStream(filename, encoding="Latin-1"))
        return self.load()

    @staticmethod
    def tree2dict(tree):
        data = {}
        metadata = OrderedDict()
        for metadatum in tree.pair():
            key = metadatum.ID().symbol.text
            value = metadatum.VALUE().symbol.text
            if key not in metadata:
                # first occurence; just assign directly
                metadata[key] = value;
            else:
                # we've already have value(s) with this key, 
                # so append to list of existing values
                if type(metadata[key]) is not list:
                    # this is the second value, so we need to
                    # convert existing lone value to list of itself
                    metadata[key] = [metadata[key]]
                # append new value to list
                metadata[key].append(value)
        data['metadata'] = metadata
        data['rows'] = [[f.symbol.text for f in row.FIELD()] for row in tree.row()]
        return data

    @staticmethod
    def flattened_dict(d, record_name='rows'):
        result = OrderedDict(d['metadata'])
        result[record_name] = d['rows']
        return result

    @staticmethod
    def with_field_names(d, field_names, record_name='rows'):
        result = d.copy()
        result[record_name] = []
        for r in d[record_name]:
            result[record_name].append( OrderedDict(zip(field_names, r)) )
        return result
Exemplo n.º 36
0
 def __init__(self, inputstream=None):
     self.input = inputstream
     self.lexer = Lexer(self.input)
     self.cts = CommonTokenStream(self.lexer)
     self.parser = Parser(self.cts)