Beispiel #1
0
def SynErr_idx(string):
    """Return the index of a SyntaxError, or None."""

    try:
        ast.parse(string)
    except SyntaxError as e:
        return e.offset - 1 # we want zero-indexed
Beispiel #2
0
def transpile(input, prefix='.', outdir=None, namespace='python', verbosity=0):
    transpiler = Transpiler(namespace=namespace, verbosity=verbosity)

    for file_or_dir in input:
        if os.path.isfile(file_or_dir):
            if verbosity:
                print("Compiling %s ..." % file_or_dir)

            with open(file_or_dir) as source:
                ast_module = ast.parse(source.read(), mode='exec')
                transpiler.transpile(file_or_dir, ast_module, prefix)
        elif os.path.isdir(file_or_dir):
            for root, dirs, files in os.walk(file_or_dir, followlinks=True):
                for filename in files:
                    if os.path.splitext(filename)[1] == '.py':
                        source_file = os.path.join(root, filename)
                        if verbosity:
                            print("Compiling %s ..." % source_file)
                        with open(source_file) as source:
                            ast_module = ast.parse(source.read(), mode='exec')
                            transpiler.transpile(source_file, ast_module, prefix)
        else:
            print("Unknown source file: %s" % file_or_dir, file=sys.stderr)

    transpiler.write(outdir)
Beispiel #3
0
def is_valid_python(source):
    try:
        ast.parse(source, mode='eval')
    except SyntaxError:
        return False
    else:
        return True
Beispiel #4
0
def parse_config(files, commands):
    inner_globals = dict(SAFE_FUNCTIONS)
    inner_globals["__builtins__"] = {} # Disable any builtin function
    inner_globals["named_items"] = {} # Store the provided named items
    
    for file in files:
        text = file.read()
        root = ast.parse(text, filename=file.name)
        execute_code(root, file.name, inner_globals)
    
    for text in commands:
        root = ast.parse(text, filename="<execute>")
        execute_code(root, '--execute', inner_globals)
    
    # Make sure we have the necessary variables in the inner_globals dictionary
    for varname in REQUIRED:
        if varname not in inner_globals:
            raise ParseException("Missing the {!r} variable".format(varname))
    
    comparer = inner_globals["comparer"]
    groups = inner_globals["groups"]
    items = inner_globals["named_items"]
    
    if "total" in inner_globals:
        total = inner_globals["total"]
    else:
        total = len(groups)
    
    return Config(comparer, items, groups, total)
Beispiel #5
0
def ParseCodeToTree(code):
  """Parse the given code to a lib2to3 pytree.

  Arguments:
    code: a string with the code to parse.

  Raises:
    SyntaxError if the code is invalid syntax.
    parse.ParseError if some other parsing failure.

  Returns:
    The root node of the parsed tree.
  """
  # This function is tiny, but the incantation for invoking the parser correctly
  # is sufficiently magical to be worth abstracting away.
  try:
    # Try to parse using a Python 3 grammar, which is more permissive (print and
    # exec are not keywords).
    parser_driver = driver.Driver(_GRAMMAR_FOR_PY3, convert=pytree.convert)
    tree = parser_driver.parse_string(code, debug=False)
  except parse.ParseError:
    # Now try to parse using a Python 2 grammar; If this fails, then
    # there's something else wrong with the code.
    try:
      parser_driver = driver.Driver(_GRAMMAR_FOR_PY2, convert=pytree.convert)
      tree = parser_driver.parse_string(code, debug=False)
    except parse.ParseError:
      # Raise a syntax error if the code is invalid python syntax.
      try:
        ast.parse(code)
      except SyntaxError as e:
        raise e
      else:
        raise
  return _WrapEndMarker(tree)
 def post(self, filename):
     cserver = self.get_server()
     isFolder = self.get_argument('isFolder', default=None)
     if isFolder:
         self.write(cserver.ensure_dir(filename))
     else:
         contents = self.get_argument('contents', default='')
         force = int(self.get_argument('force', default=0))
         if filename.endswith('.py') or cserver.is_macro(filename):
             if not contents.endswith('\n'):
                 text = contents + '\n' # to make ast.parse happy
             else:
                 text = contents
             try:
                 ast.parse(text, filename=filename, mode='exec') # parse it looking for syntax errors
             except Exception as err:
                 cserver.send_pub_msg(str(err), 'file_errors')
                 self.send_error(400)
                 return
             if not force:
                 ret = cserver.file_forces_reload(filename)
                 if ret:
                     self.send_error(409)  # user will be prompted to overwrite file and reload project
                     return
         self.write(str(cserver.write_file(filename, contents)))
Beispiel #7
0
    def walk(filename, visitor):
        """Generate an AST for the given filename and walk over it using
        the given visitor instance.
        """

        filename = os.path.abspath(filename)

        try:
            tree = ast.parse(open(filename, 'r').read())
        except SyntaxError:
            if sys.version_info[0] < 3:
                e = sys.exc_info()[1]
                log.warn('SyntaxError while parsing file %s: %s' %
                         (filename, str(e)))
                return
            # We're probably in Python 3 and looking at a file intended for
            # Python 2.  Otherwise there's an unintended SyntaxError in the
            # file, so there are bigger problems anyways
            try:
                import lib2to3.refactor

                tool = StringRefactoringTool(
                    lib2to3.refactor.get_fixers_from_package('lib2to3.fixes'))
                tool.refactor_file(filename, write=True)
                tree = ast.parse(tool.refactored[filename])
            except ImportError:
                # Without 2to3 we can't do much more.
                # TODO: Issue a warning?
                return

        visitor.visit(tree)
Beispiel #8
0
        def run(data):
            if not hasattr(data, 'can_reproduce_example_from_repr'):
                data.can_reproduce_example_from_repr = True
            with local_settings(self.settings):
                with BuildContext(data, is_final=is_final):
                    with deterministic_PRNG():
                        args, kwargs = data.draw(self.search_strategy)
                    if expected_failure is not None:
                        text_repr[0] = arg_string(test, args, kwargs)

                    if print_example:
                        example = '%s(%s)' % (
                            test.__name__, arg_string(test, args, kwargs))
                        try:
                            ast.parse(example)
                        except SyntaxError:
                            data.can_reproduce_example_from_repr = False
                        report('Falsifying example: %s' % (example,))
                    elif current_verbosity() >= Verbosity.verbose:
                        report(
                            lambda: 'Trying example: %s(%s)' % (
                                test.__name__, arg_string(test, args, kwargs)))

                    with deterministic_PRNG():
                        return test(*args, **kwargs)
Beispiel #9
0
def infer_ast(src):
    """Attempts to infer an abstract syntax tree from the provided value.
    
    - Python ast.AST instances are passed through.
    - Strings are parsed. A SyntaxError is raised if invalid.
    - Functions are sent through cypy.fn_get_source to get a source
      string, then parsed. If the source can't be a found, an exception is 
      raised by fn_get_source.
      
    .. WARNING:: Functions defined on the iPython command line do not have 
                 their source saved. A bug has been filed: 
                 
                 http://github.com/ipython/ipython/issues/issue/120
    """
    if isinstance(src, _ast.AST):
        return src
    elif isinstance(src, basestring):
        return _ast.parse(src)
    else:
        # if a function instance is passed in, it's source is found
        # and parsed. note that finding source can be precarious for
        # functions defined on the command line. If you get an error
        # you'll have to use strings instead of regular function
        # definitions
        src = cypy.fn_get_source(src)
        return _ast.parse(src)
    def test_function(self):
        """
        Function - must exist in the module defined by the owner.  Numeric
                    functions have a blank owner.
        """
        # First we want to check the functions that have owners, attempt to
        # import the owners and check if the function is a member of the owner.
        idx = (self.data.owner != '') | (self.data.functiontype == 'NumexprFunction')
        for data in self.data[idx][['owner', 'function']].itertuples():
            # TODO - Validate the numeric functions.
            if data.owner == '__builtin__':
                try:
                    ast.parse(data.function)
                    idx[data.Index - 1] = True
                except SyntaxError:
                    idx[data.Index - 1] = False
            elif data.owner != '':
                try:
                    module = importlib.import_module(data.owner)
                    idx[data.Index - 1] = hasattr(module, data.function)
                except ImportError:
                    idx[data.Index - 1] = False

        idx = numpy.logical_not(idx)
        self.assertEqual(len(self.data[idx]), 0, msg='Invalid function:\n%s' %
                                                     self.data[idx][['id', 'function', 'owner']])
Beispiel #11
0
def scripts2rst(path, f):
    """ creates rst summary documentation for files in scripts folder
    which is not a package so cannot be imported """
       
    # get list of script files
    try:
        files = [name for name in os.listdir(path)
                    if not name.startswith("_")
                    and name.endswith(".py")]
    except:
        return
        
    f.write("**Scripts**\n\n")
    
    # loop over script files
    for name in files:
        sfile = os.path.join(path, name)
        try:
            try:
                source = ast.parse(open(sfile, "r", encoding="utf8").read())
            except:
                #py2 fails if encoding in string
                source = ast.parse(open(sfile, "r").read())
        except Exception as e:
            log.warning("Problem parsing %s\n%s"%(name, e))
        f.write(name+"\n")
        doc = i.cleandoc(ast.get_docstring(source)) or "."
        doc = py2decode(doc) #py2
        f.write(" "+doc.splitlines()[0]+"\n")
    f.write("\n")
Beispiel #12
0
  def __init__(self, ast_node=None, source=None, path=None, filename=None):
    if filename:
      with open(filename, 'r') as file_obj:
        source = file_obj.read()
      ast_node = ast.parse(source)
      self._filename = filename
      self._path = path

    elif path:
      source = inspect.getmodule()
      ast_node = ast.parse(self._source)
      self._filename = inspect.getsourcefile(Import(path=path)._import)
      self._path = path

    else:
      self._path = None
      self._filename = None

    if ast_node and not isinstance(ast_node, ast.Module):
      raise TypeError('Expected an ast.Module object')

    super(Module, self).__init__(ast_node=ast_node, source=source)

    map( # set metadata properties on the object
      lambda x: setattr(self, x, self.get_var('__' + x + '__')),
      METADATA
    )
    self._type = 'Module'
    def test_free_vars(self):
        stmt = ast.parse("foo", "", "single")
        with self.assertRaises(ValueError):
            freevars(stmt, [])

        suite = ast.parse("foo; bar();", "exec")
        with self.assertRaises(ValueError):
            freevars(suite, [])

        def freevars_(source, env=[]):
            return freevars(ast.parse(source, "", "eval"), env)

        self.assertEqual(freevars_("1"), [])
        self.assertEqual(freevars_("..."), [])
        self.assertEqual(freevars_("a"), ["a"])
        self.assertEqual(freevars_("a", ["a"]), [])
        self.assertEqual(freevars_("f(1)"), ["f"])
        self.assertEqual(freevars_("f(x)"), ["f", "x"])
        self.assertEqual(freevars_("f(x)", ["f"]), ["x"])
        self.assertEqual(freevars_("a + 1"), ["a"])
        self.assertEqual(freevars_("a + b"), ["a", "b"])
        self.assertEqual(freevars_("a + b", ["a", "b"]), [])
        self.assertEqual(freevars_("a[b]"), ["a", "b"])
        self.assertEqual(freevars_("a[b]", ["a", "b"]), [])
        self.assertEqual(freevars_("f(x, *a)", ["f"]), ["x", "a"])
        self.assertEqual(freevars_("f(x, *a, y=1)", ["f"]), ["x", "a"])
        self.assertEqual(freevars_("f(x, *a, y=1, **k)", ["f"]),
                         ["x", "a", "k"])
        if sys.version_info >= (3, 5):
            self.assertEqual(freevars_("f(*a, *b, k=c, **d, **e)", ["f"]),
                             ["a", "b", "c", "d", "e"])

        self.assertEqual(freevars_("True"), [])
        self.assertEqual(freevars_("'True'"), [])
        self.assertEqual(freevars_("None"), [])
        self.assertEqual(freevars_("b'None'"), [])

        self.assertEqual(freevars_("a < b"), ["a", "b"])
        self.assertEqual(freevars_("a < b <= c"), ["a", "b", "c"])
        self.assertEqual(freevars_("1 < a <= 3"), ["a"])

        self.assertEqual(freevars_("{}"), [])
        self.assertEqual(freevars_("[]"), [])
        self.assertEqual(freevars_("()"), [])
        self.assertEqual(freevars_("[a, 1]"), ["a"])
        self.assertEqual(freevars_("{a: b}"), ["a", "b"])
        self.assertEqual(freevars_("{a, b}"), ["a", "b"])
        self.assertEqual(freevars_("0 if abs(a) < 0.1 else b", ["abs"]),
                         ["a", "b"])
        self.assertEqual(freevars_("lambda a: b + 1"), ["b"])
        self.assertEqual(freevars_("lambda a: b + 1", ["b"]), [])
        self.assertEqual(freevars_("lambda a: a + 1"), [])
        self.assertEqual(freevars_("(lambda a: a + 1)(a)"), ["a"])
        self.assertEqual(freevars_("lambda a, *arg: arg + (a,)"), [])
        self.assertEqual(freevars_("lambda a, *arg, **kwargs: arg + (a,)"), [])

        self.assertEqual(freevars_("[a for a in b]"), ["b"])
        self.assertEqual(freevars_("[1 + a for c in b if c]"), ["a", "b"])
        self.assertEqual(freevars_("{a for _ in [] if b}"), ["a", "b"])
        self.assertEqual(freevars_("{a for _ in [] if b}", ["a", "b"]), [])
Beispiel #14
0
def veloce_generate_js(filepath, requirejs=False, root_path=None, output=None, deep=None):
    dirname = os.path.abspath(os.path.dirname(filepath))
    if not root_path:
        root_path = dirname
    basename = os.path.basename(filepath)
    output_name = os.path.join(dirname, basename + '.js')
    if not output:
        print('Generating {}'.format(output_name))
    # generate js
    with open(os.path.join(dirname, basename)) as f:
        input = parse(f.read())
    tree = parse(input)
    python_core = Veloce()
    python_core.visit(tree)
    script = python_core.writer.value()
    if requirejs:
        out = 'define(function(require) {\n'
        out += script
        if isinstance(python_core.__all__, str):
            out += '\nreturn {};\n'.format(python_core.__all__)
        elif python_core.__all__:
            public = '{{{}}}'.format(', '.join(map(lambda x: '{}: {}'.format(x[0], x[1]), zip(python_core.__all__, python_core.__all__))))
            out += '\nreturn {};\n'.format(public)
        else:
            raise Exception('__all__ is not defined!')
        out += '\n})\n'
        script = out
    if deep:
        for dependency in python_core.dependencies:
            if dependency.startswith('.'):
                generate_js(os.path.join(dirname, dependency + '.py'), requirejs, root_path, output, deep)
            else:
                generate_js(os.path.join(root_path, dependency[1:] + '.py'), requirejs, root_path, output, deep)
    output.write(script)
Beispiel #15
0
 def update(self, name, script, template):
     self.name = name
     try:
         ast.parse(script)
         self.script = script
     except SyntaxError, e:
         raise UserException(e)
Beispiel #16
0
def correct_parentheses(s):
    try:
        t = re.sub('([)\]])', r'\1,', s)
        ast.parse(t)
        return True
    except Exception:
        return False
Beispiel #17
0
def _analyzeGens(top, absnames):
    genlist = []
    for g in top:
        if isinstance(g, _UserCode):
            tree = g
        elif isinstance(g, (_AlwaysComb, _AlwaysSeq, _Always)):
            f = g.func
            s = inspect.getsource(f)
            s = _dedent(s)
            tree = ast.parse(s)
            #print ast.dump(tree)
            tree.sourcefile  = inspect.getsourcefile(f)
            tree.lineoffset = inspect.getsourcelines(f)[1]-1
            tree.symdict = f.func_globals.copy()
            tree.callstack = []
            # handle free variables
            tree.nonlocaldict = {}
            if f.func_code.co_freevars:
                for n, c in zip(f.func_code.co_freevars, f.func_closure):
                    obj = _cell_deref(c)
                    if isinstance(g, _AlwaysComb):
                        if not ( isinstance(obj, (int, long, EnumType,_Signal)) or \
                                 _isMem(obj) or _isTupleOfInts(obj)
                               ):
                            info =  "File %s, line %s: " % (tree.sourcefile, tree.lineoffset)
                            print type(obj)
                            raise ConversionError(_error.UnsupportedType, n, info)
                    tree.symdict[n] = obj
                    # currently, only intbv as automatic nonlocals (until Python 3.0)
                    if isinstance(obj, intbv):
                        tree.nonlocaldict[n] = obj
            tree.name = absnames.get(id(g), str(_Label("BLOCK"))).upper()
            v = _FirstPassVisitor(tree)
            v.visit(tree)
            if isinstance(g, _AlwaysComb):
                v = _AnalyzeAlwaysCombVisitor(tree, g.senslist)
            elif isinstance(g, _AlwaysSeq):
                v = _AnalyzeAlwaysSeqVisitor(tree, g.senslist, g.reset, g.sigregs, g.varregs)
            else:
                v = _AnalyzeAlwaysDecoVisitor(tree, g.senslist)
            v.visit(tree)
        else: # @instance
            f = g.gen.gi_frame
            s = inspect.getsource(f)
            s = _dedent(s)
            tree = ast.parse(s)
            # print ast.dump(tree)
            tree.sourcefile = inspect.getsourcefile(f)
            tree.lineoffset = inspect.getsourcelines(f)[1]-1
            tree.symdict = f.f_globals.copy()
            tree.symdict.update(f.f_locals)
            tree.nonlocaldict = {}
            tree.callstack = []
            tree.name = absnames.get(id(g), str(_Label("BLOCK"))).upper()
            v = _FirstPassVisitor(tree)
            v.visit(tree)
            v = _AnalyzeBlockVisitor(tree)
            v.visit(tree)
        genlist.append(tree)
    return genlist
Beispiel #18
0
 def prettyPrintNode(self, p):
     '''The driver for beautification: beautify a single node.'''
     # c = self.c
     if not should_beautify(p):
         # @nobeautify is in effect.
         return
     if not p.b:
         # Pretty printing might add text!
         return
     if not p.b.strip():
         # Do this *after* we are sure @beautify is in effect.
         self.replace_body(p, '')
         return
     t1 = time.time()
     # Replace Leonine syntax with special comments.
     comment_string, s0 = comment_leo_lines(p)
     try:
         s1 = g.toEncodedString(s0)
         node1 = ast.parse(s1, filename='before', mode='exec')
     except IndentationError:
         self.skip_message('IndentationError', p)
         return
     except SyntaxError:
         self.skip_message('SyntaxError', p)
         return
     except Exception:
         g.es_exception()
         self.skip_message('Exception', p)
         return
     t2 = time.time()
     readlines = g.ReadLinesClass(s0).next
     tokens = list(tokenize.generate_tokens(readlines))
     t3 = time.time()
     s2 = self.run(tokens)
     t4 = time.time()
     try:
         s2_e = g.toEncodedString(s2)
         node2 = ast.parse(s2_e, filename='before', mode='exec')
         ok = compare_ast(node1, node2)
     except Exception:
         g.es_exception()
         g.trace('Error in %s...\n%s' % (p.h, s2_e))
         self.skip_message('BeautifierError', p)
         return
     if not ok:
         self.skip_message('BeautifierError', p)
         return
     t5 = time.time()
     # Restore the tags after the compare
     s3 = uncomment_leo_lines(comment_string, p, s2)
     self.replace_body(p, s3)
     # Update the stats
     self.n_input_tokens += len(tokens)
     self.n_output_tokens += len(self.code_list)
     self.n_strings += len(s3)
     self.parse_time += (t2 - t1)
     self.tokenize_time += (t3 - t2)
     self.beautify_time += (t4 - t3)
     self.check_time += (t5 - t4)
     self.total_time += (t5 - t1)
    def visit_Call(self, node):

        name = node.func.id
        args = node.args[0]

        if name in self.variables:
            if isinstance(args, UnaryOp):
                # we have s(+1)
                if (isinstance(args.op, UAdd)):
                    args = args.operand
                    date = args.n
                elif (isinstance(args.op, USub)):
                    args = args.operand
                    date = -args.n
                else:
                    raise Exception("Unrecognized subscript.")
            else:
                date = args.n
            if self.shift =='S':
                return ast.parse('{}'.format(name)).body[0].value
            else:
                new_date = date+self.shift
                if new_date != 0:
                    return ast.parse('{}({})'.format(name,new_date)).body[0].value
                else:
                    return ast.parse('{}'.format(name)).body[0].value
        else:

            # , keywords=node.keywords,  kwargs=node.kwargs)
            return Call(func=node.func, args=[self.visit(e) for e in node.args], keywords=[])
Beispiel #20
0
    def __init__(self, network, name, source, target, parameters):
        Connector.__init__(self, network, name, source, target, parameters)

        # lets load up the weight ModularConnectorFunction's
        self.weight_functions = {}
        self.delay_functions = {}
        self.simulator_time_step = self.sim.get_time_step()
        # lets determine the list of variables in weight expressions
        v = ExpVisitor()
        v.visit(ast.parse(self.parameters.weight_expression))
        self.weight_function_names = v.names
        # lets determine the list of variables in delay expressions
        v = ExpVisitor()
        v.visit(ast.parse(self.parameters.delay_expression))
        self.delay_function_names = v.names

        for k in self.weight_function_names:
            self.weight_functions[k] = load_component(self.parameters.weight_functions[k].component)(
                self.source, self.target, self.parameters.weight_functions[k].params
            )
            assert isinstance(self.weight_functions[k], ModularConnectorFunction)

        for k in self.delay_function_names:
            self.delay_functions[k] = load_component(self.parameters.delay_functions[k].component)(
                self.source, self.target, self.parameters.delay_functions[k].params
            )
    def test_forbidden_names(self):
        tree = ast.parse("__abc__")
        with self.assertRaises(exceptions.SyntaxError) as cm:
            self.validator(tree)
        self.assertEquals(1, cm.exception.lineno)
        self.assertEquals(1, cm.exception.offset)
        self.assertEquals("double_underscore_name", cm.exception.reason)

        tree = ast.parse("__")
        with self.assertRaises(exceptions.SyntaxError) as cm:
            self.validator(tree)
        self.assertEquals(1, cm.exception.lineno)
        self.assertEquals(1, cm.exception.offset)
        self.assertEquals("double_underscore_name", cm.exception.reason)

        tree = ast.parse("___xtra_underscores___")
        with self.assertRaises(exceptions.SyntaxError) as cm:
            self.validator(tree)
        self.assertEquals(1, cm.exception.lineno)
        self.assertEquals(1, cm.exception.offset)
        self.assertEquals("double_underscore_name", cm.exception.reason)

        tree = ast.parse("__abc__.__attr__")
        with self.assertRaises(exceptions.SyntaxError) as cm:
            self.validator(tree)
        self.assertEquals(1, cm.exception.lineno)
        self.assertEquals(1, cm.exception.offset)
        self.assertEquals("double_underscore_name", cm.exception.reason)
Beispiel #22
0
    def test_generate_files(self):
        """Test generate_files returns a tuple."""
        self.prototype.set_jinja_env(self.api_version)
        details = self.prototype.generate_files()
        self.assertIsInstance(details, list)
        # namedtuples in tuple
        for file_details in details:
            self.assertIsInstance(file_details, tuple)
            self.assertIsInstance(file_details.filename, basestring)
            self.assertIsInstance(file_details.filecontent, basestring)

            name, contents = file_details
            if name.endswith(".py"):
                # We have a "coding utf-8" line in there, we need to encode
                contents = contents.encode("utf-8")
                ast.parse(contents)
                if pep8:
                    checker = pep8.Checker(
                        name,
                        contents.splitlines(True))
                    res = checker.check_all()
                    self.assertFalse(
                        res,
                        "Python file {0} has pep8 errors:\n"
                        "{1}\n{2}".format(name, checker.report.messages,
                                          repr(contents))
                    )

            elif name.endswith(".xml"):
                # TODO validate valid odoo xml
                lxml.etree.fromstring(contents)
Beispiel #23
0
 def runcode(self, the_code, source, filename='<input>'):
     # code taken from InteractiveInterpreter.runsource in code.py
     try:
         tree = ast.parse(source)
         try:
             expr = ast.parse(source, mode='eval')
         except:
             expr = None
         #todo get this to work for multiple expr's, not just 1:
         if expr and len(tree.body) == 1:
             # _ = expr_value
             tree.body[0] = ast_wrap_in_assn('_', tree.body[0])
             # print _
             underscore = _ast.Name(id="_", ctx=_ast.Load())
             print_node = ast_print_node([_ast.Str(s=' '*50), underscore])
             tree.body.append(print_node)
             # play_whatever
                 #todo doesn't work for generators yet
             play_whatever_node = ast_call_node('music.play_whatever', '_', show_notes=SHOW_NOTES)
             tree.body.append(play_whatever_node)
         #print ast.dump(tree)
         code_obj = compile(tree, '<input>', 'exec')
         exec code_obj in self.locals
     except SystemExit:
         raise
     except:
         self.showtraceback()
     else:
         if code.softspace(sys.stdout, 0):
             print
Beispiel #24
0
    def warn_about_none_ast(self, node, module_path, lineno):
        """
        Returns an AST issuing a warning if the value of node is `None`.
        This is used to warn the user when asserting a function that asserts
        internally already.
        See issue #3191 for more details.
        """

        # Using parse because it is different between py2 and py3.
        AST_NONE = ast.parse("None").body[0].value
        val_is_none = ast.Compare(node, [ast.Is()], [AST_NONE])
        send_warning = ast.parse(
            """
from _pytest.warning_types import PytestWarning
from warnings import warn_explicit
warn_explicit(
    PytestWarning('asserting the value None, please use "assert is None"'),
    category=None,
    filename={filename!r},
    lineno={lineno},
)
            """.format(
                filename=module_path.strpath, lineno=lineno
            )
        ).body
        return ast.If(val_is_none, send_warning, [])
Beispiel #25
0
    def test_get_docstring(self):
        node = ast.parse('def foo():\n  """line one\n  line two"""')
        self.assertEqual(ast.get_docstring(node.body[0]),
                         'line one\nline two')

        node = ast.parse('async def foo():\n  """spam\n  ham"""')
        self.assertEqual(ast.get_docstring(node.body[0]), 'spam\nham')
Beispiel #26
0
    def OnApply(self, event):
        """Event handler for Apply button"""

        # See if we have valid python
        try:
            ast.parse(self.macros)
        except:
            # Grab the traceback and print it for the user
            s = StringIO()
            e = exc_info()
            # usr_tb will more than likely be none because ast throws
            #   SytnaxErrorsas occurring outside of the current
            #   execution frame
            usr_tb = get_user_codeframe(e[2]) or None
            print_exception(e[0], e[1], usr_tb, None, s)
            post_command_event(self.parent, self.MacroErrorMsg,
                               err=s.getvalue())
            success = False
        else:
            self.result_ctrl.SetValue('')
            post_command_event(self.parent, self.MacroReplaceMsg,
                               macros=self.macros)
            post_command_event(self.parent, self.MacroExecuteMsg)
            success = True

        event.Skip()
        return success
Beispiel #27
0
	def get_changes(self):
		if not self.text_before: return
		tree_before = ast.parse(self.text_before)
		tree = ast.parse(self.text)
		if ast.dump(tree)==ast.dump(tree_before): print('status','no changes to the script')
		else: 
			print('status','executing changes to %s'%self.file)
			# identify changed nodes in the tree and execute
			# note that this feature reruns any changed child of the script parent
			#! track line numbers are report to the user?
			tree_before,tree = [[self.CodeChunk(i,index=ii) for ii,i in 
				enumerate(ast.iter_child_nodes(ast.parse(t)))]
				for t in [self.text_before,self.text]]
			intersect = set.intersection(set(tree),set(tree_before))
			novel = list(set.difference(set(tree),intersect))
			novel_linenos = set([i.this.lineno for i in novel])
			class CodeSurgery(ast.NodeTransformer):
				def visit(self, node):
					if hasattr(node,'lineno') and node.lineno not in novel_linenos: 
						return ast.parse('last_lineno = %d'%node.lineno).body[0]
					else: return ast.NodeTransformer.generic_visit(self,node)
			code_ready = ast.fix_missing_locations(CodeSurgery().visit(ast.parse(self.text)))
			# run the remainder
			out = self.namespace
			#! exec to eval for python <2.7.15
			eval(compile(code_ready,filename='<ast>',mode='exec'),out,out)
Beispiel #28
0
def python_code(tokenIterator, envName, endToken):
    """
    Given an iterator of tokens, and the name of current environment, iterates through to the end of
    the environment, and returns the text in the environment as python code. This function only
    works for environments that are supposed to contain Python code.
    """
    count = 0
    pythonLine = ''
    pythonCode = []
    #parseNodes = []
    startLine = 0
    for token, lineNum in tokenIterator:
        if not startLine:
            #The first token is the endline at the end of \begin{envName}, so the environment starts on the same line as the first token in tokenIterator
            startLine = lineNum
        if endToken == token:
            pythonCode = [line for line in pythonCode if line.strip()]
            #We use this to figure out what the top level indent is, and strip that away so that Python can parse the code properly.
            topLevelIndent = 1 if pythonCode[0][0] == '\t' else compute_indent(pythonCode[0])
            try:
                ast.parse(''.join(line[topLevelIndent:] for line in  pythonCode))
            except SyntaxError, e:
                raise transExceptions.TranslationError(' '.join([parseTree.color("Error:", parseTree.bcolors.RED), 'Error in Python code found in', parseTree.color(envName,parseTree.bcolors.YELLOW),  
                    'environment. Environment start line:', parseTree.color(str(startLine), parseTree.bcolors.GREEN),  'Python error:\n\n', str(e)]))
            else:
                return (startLine, pythonCode)
        else:
            if token.strip(' \t') == '\n':
                pythonCode.append(pythonLine + token)
                pythonLine = ''
            else:
                pythonLine += token
Beispiel #29
0
def compare_strings(a,b):
    t1 = ast.parse(a)
    t2 = ast.parse(b)
    comp = Compare()
    val = comp.compare(t1,t2)
    d = comp.d
    return val
Beispiel #30
0
def pythonium_generate_js(filepath, requirejs=False, root_path=None, output=None, deep=None):
    dirname = os.path.abspath(os.path.dirname(filepath))
    if not root_path:
        root_path = dirname
    basename = os.path.basename(filepath)
    output_name = os.path.join(dirname, basename + '.js')
    if not output:
        print('Generating {}'.format(output_name))
    # generate js
    with open(os.path.join(dirname, basename)) as f:
        input = parse(f.read())
    tree = parse(input)
    pythonium = Pythonium()
    pythonium.visit(tree)
    script = pythonium.writer.value()
    if requirejs:
        out = 'define(function(require) {\n'
        out += script
        all = map(lambda x: "'{}': {}".format(x, x), pythonium.__all__)
        all = '{{{}}}'.format(', '.join(all))
        out += 'return {}'.format(all)
        out += '\n})\n'
        script = out
    if deep:
        for dependency in python_core.dependencies:
            if dependency.startswith('.'):
                generate_js(os.path.join(dirname, dependency + '.py'), requirejs, root_path, output, deep)
            else:
                generate_js(os.path.join(root_path, dependency[1:] + '.py'), requirejs, root_path, output, deep)
    output.write(script)
Beispiel #31
0
def find_version(*parts):
    finder = VersionFinder()
    finder.visit(ast.parse(read(*parts)))
    return finder.version
Beispiel #32
0
def parse_code(student_code):
    """
    parses code and returns a new CaitNode that is the root of the student
    """
    return CaitNode(ast.parse(student_code))
Beispiel #33
0
def temp_code(input_file):
  f = open(input_file,"r")
  x = v()
  t = ast.parse(f.read())
  x.visit(t)
  return x.listL
Beispiel #34
0
    def check_django_versions(self, git_repo):
        """
        A repo is either a library or an application.

        If it's a library, then it should have a setup.py or a setup.cfg, and it should have a
        tox.ini file that runs its tests with at least django 1.8 and django 1.11, if
        its setup.py lists Django as a dependency.

        If it's a repository, then it should have a requirements/base.txt
        (or at least a requirements.txt) and that should pin
        either Django==1.8 or Django==1.11, if it specifies Django as a dependency.
        """
        working_dir = Path(git_repo.working_tree_dir)

        setup_py = working_dir / 'setup.py'
        setup_cfg = working_dir / 'setup.cfg'
        requirements_base_txt = working_dir / 'requirements/base.txt'
        requirements_txt = working_dir / 'requirements.txt'
        manage_py = working_dir / 'manage.py'
        tox_ini = working_dir / 'tox.ini'

        is_django_application = manage_py.exists()

        print(is_django_application)
        print(setup_py.exists())

        if requirements_base_txt.exists():
            requirements_file = requirements_base_txt
        else:
            requirements_file = requirements_txt

        print(requirements_file.exists())

        if not is_django_application and setup_py.exists():
            parsed_setup_py = ast.parse(setup_py.bytes(), 'setup.py')

            if uses_pbr(parsed_setup_py):
                has_django = requirements_txt_has_django(requirements_file)
            else:
                has_django = setup_py_has_django(parsed_setup_py)

            if not has_django:
                return

            tested_versions = tox_tested_django_versions(tox_ini)
            assert LIBRARY_REQUIRED_DJANGO_VERSIONS in tested_versions

        elif requirements_file.exists():
            django_specifier = None

            for req in parsed_requirements_txt(requirements_file):
                if requirement_is_django(req):
                    if django_specifier is None:
                        django_specifier = req.specifier
                    else:
                        django_specifier &= req.specifier

            if django_specifier is None:
                return

            largest_accepted_version = max(version
                                           for version in DJANGO_VERSIONS
                                           if version in django_specifier)

            for supported_version in APPLICATION_ALLOWED_DJANGO_VERSIONS:
                if largest_accepted_version in supported_version:
                    return
            msg = (
                "No allowed version range contained the largest allowed django "
                +
                f"version {largest_accepted_version} in the version specifier {django_specifier}"
            )
            assert False, msg
        else:
            assert False, "Couldn't determine if repo is an application or a library"
Beispiel #35
0
    def _log_problem(self, error_code, message):
        self.problems.append(Problem(self._cursor.node, error_code, message))


class Checker:
    name = "flake8-bionic-dnode-match"
    version = "0.1"

    def __init__(self, tree):
        self._tree = tree

    def run(self):
        visitor = MatchVisitor()
        visitor.visit(self._tree)
        for problem in visitor.problems:
            yield problem.to_flake8_tuple()


if __name__ == "__main__":
    import sys
    from pathlib import Path

    filenames = sys.argv[1:]

    for filename in filenames:
        code = Path(filename).read_text()
        tree = ast.parse(code)
        for message in Checker(tree).run():
            print(message)  # noqa: T001
def second(file_name):
    print("Second star: {}".format(sum(eval(compile((translate_back if (translate_back := ast.NodeTransformer()) and not setattr(translate_back, 'visit_Add', lambda _: ast.Mult()) and not setattr(translate_back, 'visit_Mult', lambda _: ast.Add()) else None).visit(ast.parse(io.open(file_name).read().translate({43: 42, 42: 43, 10: 44}), mode='eval')), '', 'eval')))))
Beispiel #37
0
def run(
    script_path: str,
    method_name: str = "main",
    args: Optional[Tuple] = None,
    kwargs: Optional[Dict] = None,
    project: Any = None,
    _include_frame: bool = False,
) -> Any:
    """Loads a project script and runs a method in it.

    script_path: path of script to load
    method_name: name of method to run
    args: method args
    kwargs: method kwargs
    project: (deprecated)

    Returns: return value from called method
    """
    if args is None:
        args = tuple()
    if kwargs is None:
        kwargs = {}

    script, project = _get_path(script_path)

    # temporarily add project objects to the main namespace, so the script can import them
    if project is not None:
        project._add_to_main_namespace()

    # modify sys.path to ensure script can be imported
    root_path = Path(".").resolve().root
    sys.path.insert(0, root_path)

    try:
        module = _import_from_path(script)

        name = module.__name__

        if not hasattr(module, method_name):
            raise AttributeError(f"Module '{name}' has no method '{method_name}'")
        try:
            module_path = Path(module.__file__).relative_to(Path(".").absolute())
        except ValueError:
            module_path = Path(module.__file__)
        print(
            f"\nRunning '{color('bright blue')}{module_path}{color}::"
            f"{color('bright cyan')}{method_name}{color}'..."
        )
        func = getattr(module, method_name)
        if not _include_frame:
            return func(*args, **kwargs)

        # this voodoo preserves the call frame of the function after it has finished executing.
        # we do this so that `brownie run -i` is able to drop into the console with the same
        # namespace as the function that it just ran.

        # first, we extract the source code from the function and parse it to an AST
        source = inspect.getsource(func)
        func_ast = ast.parse(source)

        # next, we insert some new logic into the beginning of the function. this imports
        # the sys module and assigns the current frame to a global var `__brownie_frame`
        injected_source = "import sys\nglobal __brownie_frame\n__brownie_frame = sys._getframe()"
        injected_ast = ast.parse(injected_source)
        func_ast.body[0].body = injected_ast.body + func_ast.body[0].body  # type: ignore

        # now we compile the AST into a code object, using the module's `__dict__` as our globals
        # so that we have access to all the required imports and other objects
        f_locals: Dict = module.__dict__.copy()
        del f_locals[method_name]
        func_code = compile(func_ast, "", "exec")
        exec(func_code, f_locals)

        # finally, we execute our new function from inside the copied globals dict. the frame
        # is added to the dict as `__global_frame` per our injected code, and we return it for
        # use within the console. so simple!
        return_value = f_locals[method_name](*args, **kwargs)
        return return_value, f_locals["__brownie_frame"]

    finally:
        # cleanup namespace and sys.path
        sys.path.remove(root_path)
        if project is not None:
            project._remove_from_main_namespace()
Beispiel #38
0
    def _uses_loop(function):
        loop_statements = ast.For, ast.While, ast.AsyncFor, ast.ListComp

        nodes = ast.walk(ast.parse(inspect.getsource(function)))
        return any(isinstance(node, loop_statements) for node in nodes)
def check(code, filename="<unknown>", options=None):
    if options is None:
        options = Flake8Options
    checker.memoize.mem = {}
    Flake8Checker.parse_options(options)
    return list(Flake8Checker(ast.parse(code), filename).run())
Beispiel #40
0
    def from_code(code: str,
                  field: str = "",
                  inner=True,
                  simplify=True,
                  conds_align=False):
        """
        Get a Flowchart instance from a str of Python code.

        Args:

            code:  str,  Python code to draw flowchart
            field: str,  path to field (function) you want to draw flowchart
            inner: bool, True: parse the body of field; Field: parse the body as an object
            simplify: bool, for If & Loop statements: simplify the one-line-body or not.
            conds_align: bool, for consecutive If statements: conditionNode alignment support (Issue#14) or not

        Returns:
            A Flowchart instance parsed from given code.

        `inner=True` means parse `field.body`, otherwise parse [field]. E.g.

        ```
        def a():
            print('a')
        ```

        inner=True  => `st (function a) -> subroutine (print) -> end`
        inner=False => `op=>operation: def a(): print('a')`

        The field is the path to the target of flowchartilizing.
        It should be the *path* to a `def` code block in code. E.g.

        ```
        def foo():
            pass

        class Bar():
            def fuzz(self):
                pass
            def buzz(self, f):
                def g(self):
                    f(self)
                return g(self)

        Bar().buzz(foo)
        ```

        Available path:

        - "" (means the whole code)
        - "foo"
        - "Bar.fuzz"
        - "Bar.buzz"
        - "Bar.buzz.g"
        """
        code_ast = ast.parse(code)

        field_ast = Flowchart.find_field_from_ast(code_ast, field)

        assert hasattr(field_ast, "body")
        assert field_ast.body, f"{field}: nothing to parse. Check given code and field please."

        f = field_ast.body if inner else [field_ast]
        p = parse(f, simplify=simplify, conds_align=conds_align)
        return Flowchart(p.head)
Beispiel #41
0
def process(filename, exclude_dirs=['test','site-packages']):
    """Process a VFS filename for Brython."""
    print("Generating {}".format(filename))
    nb = 0
    nb_err = 0
    main_root = os.path.dirname(filename)
    VFS = {}
    for stdlib_dir in ("libs", "Lib"):
        lib_path = os.path.join(main_root, stdlib_dir)
        for root, _dir, files in os.walk(lib_path):
            flag = False
            root_elts = root.split(os.sep)
            for exclude in exclude_dirs:
                if exclude in root_elts:
                   flag = True
                   continue
            if flag:
               continue  # skip these modules
            if '__pycache__' in _dir:
                _dir.remove("__pycache__")
            nb += 1

            if stdlib_dir == "Lib":
                if root == lib_path:
                    package = []
                else:
                    package = root[len(lib_path) + 1:].split(os.sep)

            for _file in files:
                ext = os.path.splitext(_file)[1]
                if ext not in ('.js', '.py'):
                    continue
                if re.match(r'^module\d+\..*$', _file):
                    continue
                if not git.in_index(_file):
                    print(_file, "not in git index")
                    continue
                nb += 1

                file_name = os.path.join(root, _file)
                with open(file_name, encoding='utf-8') as f:
                    data = f.read()

                if ext == '.py':
                    data = python_minifier.minify(data, preserve_lines=True)
                    path_elts = package[:]
                    if os.path.basename(filename) != "__init__.py":
                        path_elts.append(os.path.basename(file_name)[:-3])
                    fqname = ".".join(path_elts)
                    with open(os.path.join(root, file_name), encoding="utf-8") as f:
                        tree = ast.parse(f.read())
                        visitor = Visitor(lib_path, package)
                        visitor.visit(tree)
                        imports = sorted(list(visitor.imports))

                vfs_path = os.path.join(root, _file).replace(main_root, '')
                vfs_path = vfs_path.replace("\\", "/")

                if vfs_path.startswith('/libs/crypto_js/rollups/'):
                   if _file not in ('md5.js', 'sha1.js', 'sha3.js',
                                'sha224.js', 'sha256.js', 'sha384.js',
                                'sha512.js'):
                      continue

                mod_name = vfs_path[len(stdlib_dir) + 2:].replace('/', '.')
                mod_name, ext = os.path.splitext(mod_name)
                is_package = mod_name.endswith('__init__')
                if ext == ".py":
                    if is_package:
                       mod_name = mod_name[:-9]
                       VFS[mod_name] = [ext, data, imports, 1]
                    else:
                        VFS[mod_name] = [ext, data, imports]
                else:
                   VFS[mod_name] = [ext, data]
                print("adding {}".format(mod_name))

    print('{} files, {} errors'.format(nb, nb_err))
    with open(filename, "w") as out:
      out.write('__BRYTHON__.use_VFS = true;\n')
      out.write('__BRYTHON__.VFS={}\n\n'.format(json.dumps(VFS)))
Beispiel #42
0
def safe_eval(expr, locals={}, include_exceptions=False):
    '''
    this is intended for allowing things like:
    with_items: a_list_variable
    where Jinja2 would return a string
    but we do not want to allow it to call functions (outside of Jinja2, where
    the env is constrained)

    Based on:
    http://stackoverflow.com/questions/12523516/using-ast-and-whitelists-to-make-pythons-eval-safe
    '''

    # this is the whitelist of AST nodes we are going to
    # allow in the evaluation. Any node type other than
    # those listed here will raise an exception in our custom
    # visitor class defined below.
    SAFE_NODES = set((
        ast.Expression,
        ast.Compare,
        ast.Str,
        ast.List,
        ast.Tuple,
        ast.Dict,
        ast.Call,
        ast.Load,
        ast.BinOp,
        ast.UnaryOp,
        ast.Num,
        ast.Name,
        ast.Add,
        ast.Sub,
        ast.Mult,
        ast.Div,
    ))

    # AST node types were expanded after 2.6
    if not sys.version.startswith('2.6'):
        SAFE_NODES.union(set((ast.Set, )))

    # builtin functions that are not safe to call
    INVALID_CALLS = (
        'classmethod',
        'compile',
        'delattr',
        'eval',
        'execfile',
        'file',
        'filter',
        'help',
        'input',
        'object',
        'open',
        'raw_input',
        'reduce',
        'reload',
        'repr',
        'setattr',
        'staticmethod',
        'super',
        'type',
    )

    class CleansingNodeVisitor(ast.NodeVisitor):
        def generic_visit(self, node):
            if type(node) not in SAFE_NODES:
                #raise Exception("invalid expression (%s) type=%s" % (expr, type(node)))
                raise Exception("invalid expression (%s)" % expr)
            super(CleansingNodeVisitor, self).generic_visit(node)

        def visit_Call(self, call):
            if call.func.id in INVALID_CALLS:
                raise Exception("invalid function: %s" % call.func.id)

    if not isinstance(expr, basestring):
        # already templated to a datastructure, perhaps?
        if include_exceptions:
            return (expr, None)
        return expr

    try:
        parsed_tree = ast.parse(expr, mode='eval')
        cnv = CleansingNodeVisitor()
        cnv.visit(parsed_tree)
        compiled = compile(parsed_tree, expr, 'eval')
        result = eval(compiled, {}, locals)

        if include_exceptions:
            return (result, None)
        else:
            return result
    except SyntaxError, e:
        # special handling for syntax errors, we just return
        # the expression string back as-is
        if include_exceptions:
            return (expr, None)
        return expr
Beispiel #43
0
def setup(loader):
    module = ast.parse(SRC)
    module.__preinit__('threading', '', None, internal=True)
    loader.setupStandrad('threading', module)
Beispiel #44
0
def _replace_magics(
    source: Sequence[str],
    magic_substitutions: List[MagicHandler],
    command: str,
    *,
    skip_bad_cells: bool,
) -> str:
    """
    Replace IPython line magics with valid python code.

    Parameters
    ----------
    source
        Source from notebook cell.
    magic_substitutions
        List to store all the ipython magics substitutions

    Returns
    -------
    str
        Line from cell, with line magics replaced with python code
    """
    try:
        ast.parse("".join(source))
    except SyntaxError:
        pass
    else:
        # Source has no IPython magic, return it directly
        return "".join(source)

    cell_magic_finder = CellMagicFinder()
    body = TransformerManager().transform_cell("".join(source))
    try:
        tree = ast.parse(body)
    except SyntaxError:
        if skip_bad_cells:
            handler = MagicHandler("".join(source), command, magic_type=None)
            magic_substitutions.append(handler)
            return handler.replacement
        return "".join(source)
    cell_magic_finder.visit(tree)

    # if first line is cell magic, process it separately
    if cell_magic_finder.header is not None:
        assert cell_magic_finder.body is not None
        header = _process_source(
            cell_magic_finder.header,
            command,
            magic_substitutions,
            skip_bad_cells=skip_bad_cells,
        )
        cell = _process_source(
            cell_magic_finder.body,
            command,
            magic_substitutions,
            skip_bad_cells=skip_bad_cells,
        )
        return "\n".join([header, cell])

    return _process_source(
        "".join(source), command, magic_substitutions, skip_bad_cells=skip_bad_cells
    )
Beispiel #45
0
def h_visit(code):
    '''Compile the code into an AST tree and then pass it to
    :func:`~radon.metrics.h_visit_ast`.
    '''
    return h_visit_ast(ast.parse(code))
def recursive_finder(name, data, py_module_names, py_module_cache, zf):
    """
    Using ModuleDepFinder, make sure we have all of the module_utils files that
    the module its module_utils files needs.
    """
    # Parse the module and find the imports of ansible.module_utils
    tree = ast.parse(data)
    finder = ModuleDepFinder()
    finder.visit(tree)

    #
    # Determine what imports that we've found are modules (vs class, function.
    # variable names) for packages
    #

    normalized_modules = set()
    # Loop through the imports that we've found to normalize them
    # Exclude paths that match with paths we've already processed
    # (Have to exclude them a second time once the paths are processed)

    module_utils_paths = [
        p for p in module_utils_loader._get_paths(subdirs=False)
        if os.path.isdir(p)
    ]
    module_utils_paths.append(_MODULE_UTILS_PATH)
    for py_module_name in finder.submodules.difference(py_module_names):
        module_info = None

        if py_module_name[0] == 'six':
            # Special case the python six library because it messes up the
            # import process in an incompatible way
            module_info = imp.find_module('six', module_utils_paths)
            py_module_name = ('six', )
            idx = 0
        elif py_module_name[0] == '_six':
            # Special case the python six library because it messes up the
            # import process in an incompatible way
            module_info = imp.find_module(
                '_six', [os.path.join(p, 'six') for p in module_utils_paths])
            py_module_name = ('six', '_six')
            idx = 0
        else:
            # Check whether either the last or the second to last identifier is
            # a module name
            for idx in (1, 2):
                if len(py_module_name) < idx:
                    break
                try:
                    module_info = imp.find_module(py_module_name[-idx], [
                        os.path.join(p, *py_module_name[:-idx])
                        for p in module_utils_paths
                    ])
                    break
                except ImportError:
                    continue

        # Could not find the module.  Construct a helpful error message.
        if module_info is None:
            msg = [
                'Could not find imported module support code for %s.  Looked for'
                % (name, )
            ]
            if idx == 2:
                msg.append('either %s.py or %s.py' %
                           (py_module_name[-1], py_module_name[-2]))
            else:
                msg.append(py_module_name[-1])
            raise AnsibleError(' '.join(msg))

        # Found a byte compiled file rather than source.  We cannot send byte
        # compiled over the wire as the python version might be different.
        # imp.find_module seems to prefer to return source packages so we just
        # error out if imp.find_module returns byte compiled files (This is
        # fragile as it depends on undocumented imp.find_module behaviour)
        if module_info[2][2] not in (imp.PY_SOURCE, imp.PKG_DIRECTORY):
            msg = [
                'Could not find python source for imported module support code for %s.  Looked for'
                % name
            ]
            if idx == 2:
                msg.append('either %s.py or %s.py' %
                           (py_module_name[-1], py_module_name[-2]))
            else:
                msg.append(py_module_name[-1])
            raise AnsibleError(' '.join(msg))

        if idx == 2:
            # We've determined that the last portion was an identifier and
            # thus, not part of the module name
            py_module_name = py_module_name[:-1]

        # If not already processed then we've got work to do
        # If not in the cache, then read the file into the cache
        # We already have a file handle for the module open so it makes
        # sense to read it now
        if py_module_name not in py_module_cache:
            if module_info[2][2] == imp.PKG_DIRECTORY:
                # Read the __init__.py instead of the module file as this is
                # a python package
                normalized_name = py_module_name + ('__init__', )
                if normalized_name not in py_module_names:
                    normalized_path = os.path.join(
                        os.path.join(module_info[1], '__init__.py'))
                    normalized_data = _slurp(normalized_path)
                    py_module_cache[normalized_name] = (normalized_data,
                                                        normalized_path)
                    normalized_modules.add(normalized_name)
            else:
                normalized_name = py_module_name
                if normalized_name not in py_module_names:
                    normalized_path = module_info[1]
                    normalized_data = module_info[0].read()
                    module_info[0].close()
                    py_module_cache[normalized_name] = (normalized_data,
                                                        normalized_path)
                    normalized_modules.add(normalized_name)

            # Make sure that all the packages that this module is a part of
            # are also added
            for i in range(1, len(py_module_name)):
                py_pkg_name = py_module_name[:-i] + ('__init__', )
                if py_pkg_name not in py_module_names:
                    pkg_dir_info = imp.find_module(py_pkg_name[-1], [
                        os.path.join(p, *py_pkg_name[:-1])
                        for p in module_utils_paths
                    ])
                    normalized_modules.add(py_pkg_name)
                    py_module_cache[py_pkg_name] = (_slurp(pkg_dir_info[1]),
                                                    pkg_dir_info[1])

    # FIXME: Currently the AnsiBallZ wrapper monkeypatches module args into a global
    # variable in basic.py.  If a module doesn't import basic.py, then the AnsiBallZ wrapper will
    # traceback when it tries to monkypatch.  So, for now, we have to unconditionally include
    # basic.py.
    #
    # In the future we need to change the wrapper to monkeypatch the args into a global variable in
    # their own, separate python module.  That way we won't require basic.py.  Modules which don't
    # want basic.py can import that instead.  AnsibleModule will need to change to import the vars
    # from the separate python module and mirror the args into its global variable for backwards
    # compatibility.
    if ('basic', ) not in py_module_names:
        pkg_dir_info = imp.find_module('basic', module_utils_paths)
        normalized_modules.add(('basic', ))
        py_module_cache[('basic', )] = (_slurp(pkg_dir_info[1]),
                                        pkg_dir_info[1])
    # End of AnsiballZ hack

    #
    # iterate through all of the ansible.module_utils* imports that we haven't
    # already checked for new imports
    #

    # set of modules that we haven't added to the zipfile
    unprocessed_py_module_names = normalized_modules.difference(
        py_module_names)

    for py_module_name in unprocessed_py_module_names:
        py_module_path = os.path.join(*py_module_name)
        py_module_file_name = '%s.py' % py_module_path

        zf.writestr(os.path.join("ansible/module_utils", py_module_file_name),
                    py_module_cache[py_module_name][0])
        display.vvvvv("Using module_utils file %s" %
                      py_module_cache[py_module_name][1])

    # Add the names of the files we're scheduling to examine in the loop to
    # py_module_names so that we don't re-examine them in the next pass
    # through recursive_finder()
    py_module_names.update(unprocessed_py_module_names)

    for py_module_file in unprocessed_py_module_names:
        recursive_finder(py_module_file, py_module_cache[py_module_file][0],
                         py_module_names, py_module_cache, zf)
        # Save memory; the file won't have to be read again for this ansible module.
        del py_module_cache[py_module_file]
Beispiel #47
0
 def parse_stmt(stmt):
     return ast.parse(stmt).body[0]
Beispiel #48
0
def read_version():
    """Read version from httpretty/version.py without loading any files"""
    finder = VersionFinder()
    finder.visit(ast.parse(local_file('httpretty', '__init__.py')))
    return finder.version
Beispiel #49
0
def core_module_docstrings(include_core=True,
                           include_user=False,
                           config=None,
                           format="md"):
    """
    Get docstrings for all core modules and user ones if requested
    returns a dict of {<module_name>: <docstring>}
    """
    paths = {}
    docstrings = {}
    if include_core:
        for file in os.listdir(modules_directory()):
            if file.endswith(".py"):
                name = file[:-3]
                if name != "__init__":
                    paths[name] = (os.path.join(modules_directory(),
                                                file), "core")

    if include_user:
        # include user modules
        for include_path in sorted(config["include_paths"]):
            include_path = os.path.abspath(include_path) + "/"
            if not os.path.isdir(include_path):
                continue
            for file in sorted(os.listdir(include_path)):
                if not file.endswith(".py"):
                    continue
                name = file[:-3]
                paths[name] = (os.path.join(include_path, file), "user")
    for name in paths:
        path, module_type = paths[name]
        with open(path) as f:
            try:
                module = ast.parse(f.read())
            except SyntaxError:
                # there is a syntax error so ignore module
                continue
            raw_docstring = ast.get_docstring(module)

            # prevent issue when no docstring exists
            if raw_docstring is None:
                continue

            # remove any sample outputs
            parts = re.split("^SAMPLE OUTPUT$", raw_docstring, flags=re.M)
            docstring = parts[0]

            if format == "md":
                docstring = [
                    d for d in _from_docstring_md(
                        str(docstring).strip().split("\n"))
                ]
            elif format == "rst":
                docstring = [
                    d for d in _from_docstring_rst(
                        str(docstring).strip().split("\n"))
                ]
            else:
                raise Exception("`md` and `rst` format supported only")

            docstrings[name] = docstring + ["\n"]
    return docstrings
Beispiel #50
0
 def parse_expr(expr):
     return ast.parse(expr).body[0].value
Beispiel #51
0
 def parse(self, string, **kwargs):
     ex = ast.parse(string, mode='eval')
     return self.handle(ex.body, **kwargs)
Beispiel #52
0
 def make_single_statement(stmts):
     template = 'if 1: pass'
     t = ast.parse(template).body[0]
     t.body = stmts
     return t
 def freevars_(source, env=None):
     return freevars(ast.parse(source, "", "eval"), env or [])
Beispiel #54
0
    def _create_assign_lambda(s, o, lamb):
        assert isinstance(
            o, Signal
        ), "You can only assign(//=) a lambda function to a Wire/InPort/OutPort."

        srcs, line = inspect.getsourcelines(lamb)
        assert len(
            srcs
        ) == 1, "We can only handle single-line lambda connect right now."

        src = compiled_re.sub(r'\2', srcs[0]).lstrip(' ')
        root = ast.parse(src)
        assert isinstance(root, ast.Module) and len(
            root.body) == 1, "Invalid lambda (contact pymtl3 developer)"

        root = root.body[0]
        assert isinstance(root, ast.AugAssign) and isinstance(
            root.op, ast.FloorDiv)

        lhs, rhs = root.target, root.value
        # We expect the lambda to have no argument:
        # {'args': [], 'vararg': None, 'kwonlyargs': [], 'kw_defaults': [], 'kwarg': None, 'defaults': []}
        assert isinstance( rhs, ast.Lambda ) and not rhs.args.args and rhs.args.vararg is None, \
          "The lambda shouldn't contain any argument."

        rhs = rhs.body

        # Compose a new and valid function based on the lambda's lhs and rhs
        # Note that we don't need to add those source code of closure var
        # assignment to linecache. To get the matching line number in the
        # error message, we set the line number of update block

        blk_name = "_lambda__{}".format(repr(o).replace(".", "_"))
        lambda_upblk = ast.FunctionDef(
            name=blk_name,
            args=ast.arguments(args=[],
                               vararg=None,
                               kwonlyargs=[],
                               kw_defaults=[],
                               kwarg=None,
                               defaults=[]),
            body=[
                ast.Assign(targets=[lhs], value=rhs, lineno=2, col_offset=6)
            ],
            decorator_list=[],
            returns=None,
            lineno=1,
            col_offset=4,
        )
        lambda_upblk_module = ast.Module(body=[lambda_upblk])

        # Manually wrap the lambda upblk with a closure function that adds the
        # desired variables to the closure of `_lambda__*`
        # We construct AST for the following function to add free variables in the
        # closure of the lambda function to the closure of the generated lambda
        # update block.
        #
        # def closure( lambda_closure ):
        #   <FreeVarName1> = lambda_closure[<Idx1>].cell_contents
        #   <FreeVarName2> = lambda_closure[<Idx2>].cell_contents
        #   ...
        #   <FreeVarNameN> = lambda_closure[<IdxN>].cell_contents
        #   def _lambda__<lambda_blk_name>():
        #     # the assignment statement appears here
        #   return _lambda__<lambda_blk_name>

        new_root = ast.Module(body=[
            ast.FunctionDef(
                name="closure",
                args=ast.arguments(args=[
                    ast.arg(arg="lambda_closure",
                            annotation=None,
                            lineno=1,
                            col_offset=12)
                ],
                                   vararg=None,
                                   kwonlyargs=[],
                                   kw_defaults=[],
                                   kwarg=None,
                                   defaults=[]),
                body=[
                    ast.Assign(
                        targets=[
                            ast.Name(id=var,
                                     ctx=ast.Store(),
                                     lineno=1 + idx,
                                     col_offset=2)
                        ],
                        value=ast.Attribute(
                            value=ast.Subscript(
                                value=ast.Name(
                                    id='lambda_closure',
                                    ctx=ast.Load(),
                                    lineno=1 + idx,
                                    col_offset=5 + len(var),
                                ),
                                slice=ast.Index(value=ast.Num(
                                    n=idx,
                                    lineno=1 + idx,
                                    col_offset=19 + len(var),
                                ), ),
                                ctx=ast.Load(),
                                lineno=1 + idx,
                                col_offset=5 + len(var),
                            ),
                            attr='cell_contents',
                            ctx=ast.Load(),
                            lineno=1 + idx,
                            col_offset=5 + len(var),
                        ),
                        lineno=1 + idx,
                        col_offset=2,
                    ) for idx, var in enumerate(lamb.__code__.co_freevars)
                ] + [lambda_upblk] + [
                    ast.Return(
                        value=ast.Name(
                            id=blk_name,
                            ctx=ast.Load(),
                            lineno=4 + len(lamb.__code__.co_freevars),
                            col_offset=9,
                        ),
                        lineno=4 + len(lamb.__code__.co_freevars),
                        col_offset=2,
                    )
                ],
                decorator_list=[],
                returns=None,
                lineno=1,
                col_offset=0,
            )
        ])

        # In Python 3 we need to supply a dict as local to get the newly
        # compiled function from closure.
        # Then `closure(lamb.__closure__)` returns the lambda update block with
        # the correct free variables in its closure.

        dict_local = {}
        exec(compile(new_root, blk_name, "exec"), lamb.__globals__, dict_local)
        blk = dict_local['closure'](lamb.__closure__)

        # Add the source code to linecache for the compiled function

        new_src = "def {}():\n {}\n".format(blk_name, src.replace("//=", "="))
        linecache.cache[blk_name] = (len(new_src), None, new_src.splitlines(),
                                     blk_name)

        ComponentLevel1.update(s, blk)

        # This caching here does no caching because the block name contains
        # the signal name intentionally to avoid conflicts. With //= it is
        # more possible than normal update block to have conflicts:
        # if param == 1:  s.out //= s.in_ + 1
        # else:           s.out //= s.out + 100
        # Here these two blocks will implicity have the same name but they
        # have different contents based on different param.
        # So the cache call here is just to reuse the existing interface to
        # register the AST/src of the generated block for elaborate or passes
        # to use.
        s._cache_func_meta(blk,
                           is_update_ff=False,
                           given=("".join(srcs), lambda_upblk_module, line,
                                  inspect.getsourcefile(lamb)))
        return blk
Beispiel #55
0
    def run(self):
        assert (isinstance(self.ast_root, ast.Module)
                and self.ast_root._fields == ("body", )
                and len(self.ast_root.body) == 1
                and isinstance(self.ast_root.body[0], ast.FunctionDef))
        main_func_node = self.ast_root.body[0]

        assert hasattr(self.definition, "_gtscript_")
        # self.resolved_externals = self.resolve_external_symbols(
        #     self.definition._gtscript_["nonlocals"],
        #     self.definition._gtscript_["imported"],
        #     self.external_context,
        # )
        self.resolved_externals = self.definition._gtscript_["externals"]
        api_signature, fields_decls, parameter_decls = self.extract_arg_descriptors(
        )

        # Inline constant values
        for name, value in self.resolved_externals.items():
            if hasattr(value, "_gtscript_"):
                assert callable(value)
                func_node = ast.parse(gt_meta.get_ast(value)).body[0]
                local_context = self.resolve_external_symbols(
                    value._gtscript_["nonlocals"],
                    value._gtscript_["imported"],
                    self.external_context,
                    exhaustive=False,
                )
                ValueInliner.apply(func_node, context=local_context)
                value._gtscript_["ast"] = func_node
                value._gtscript_["local_context"] = local_context

        local_context = self.resolve_external_symbols(
            self.definition._gtscript_["nonlocals"],
            self.definition._gtscript_["imported"],
            self.external_context,
            exhaustive=False,
        )
        ValueInliner.apply(main_func_node, context=local_context)

        # Inline function calls
        CallInliner.apply(main_func_node, context=local_context)

        # Generate definition IR
        domain = gt_ir.Domain.LatLonGrid()
        computations = IRMaker(
            fields=fields_decls,
            parameters=parameter_decls,
            local_symbols={},  # Not used
            externals=local_context,
            domain=domain,
            extra_temp_decls={},  # Not used
        )(self.ast_root)

        self.definition_ir = gt_ir.StencilDefinition(
            name=self.main_name,
            domain=domain,
            api_signature=api_signature,
            api_fields=[
                fields_decls[item.name] for item in api_signature
                if item.name in fields_decls
            ],
            parameters=[
                parameter_decls[item.name] for item in api_signature
                if item.name in parameter_decls
            ],
            computations=computations,
            externals=self.resolved_externals,
        )

        return self.definition_ir
 def validate_(source):
     return validate_exp(ast.parse(source, mode="eval"))
Beispiel #57
0
def ast_node(expr: str) -> ast.AST:
    """Helper function to parse a string denoting an expression into an AST node"""
    # ast.parse returns "Module(body=[Node])"
    return ast.parse(expr).body[0]
Beispiel #58
0
def version():
    """Return version string."""
    with io.open('autopep8.py') as input_file:
        for line in input_file:
            if line.startswith('__version__'):
                return ast.parse(line).body[0].value.s
Beispiel #59
0
def dump(source, mode):
    """Dump source after parsing with mode"""
    a = ast.parse(source, mode=mode)
    return ast.dump(a, annotate_fields=True, include_attributes=False)
Beispiel #60
0
 def __init__(self, source):
     self.names = list()
     self.imports = list()
     self.visit(ast.parse(source=source))