Esempio n. 1
0
 def __init__(self, filename,sliced=False):
     vm_name = 'cvm_' + os.path.splitext(os.path.basename(filename))[0]
     if sliced:
         vm_name += '_sliced'
     vm_name += '.pyd'
     print 'Constructing VM for "' + filename + '" as ' + vm_name
     if sliced:
         self.vm_desc = vm_desc = decompiler.process_file(filename,interestingPorts=[0x02,0x03])
     else:
         self.vm_desc = vm_desc = decompiler.process_file(filename)
     sourcetime = os.path.getmtime(filename)
     
     with changedir(file = __file__):
         compiler.savefiles(vm_desc.declarations, vm_desc.statements)
         rebuild = True
         if os.path.exists(vm_name):
             filetime = os.path.getmtime(vm_name)
             sources = [inspect.getsourcefile(CompiledVMConstructor),
                        inspect.getsourcefile(compiler),
                        inspect.getsourcefile(decompiler),
                        inspect.getsourcefile(decompiler.asm),
                        'compiled_vm.c']
             sourcetime = max([os.path.getmtime(s) for s in sources] + [sourcetime])  
             if sourcetime < filetime:
                 #print 'Skipping rebuild'
                 rebuild = False
         if rebuild: compiler.build(vm_name)
         self.vmwrapper = compiler.vmwrapper(vm_name, vm_desc)
Esempio n. 2
0
    def _monitor_item(self, obj):
        # don't monitor objects initialized in the interpreter

        if obj.__module__ == '__main__':
            return

        mod = obj.__module__
        cls = obj.__class__.__name__

        # Monitor the actual configuration file rather than the
        # module the Configuration class is defined in

        if isinstance(obj, Configuration):
            filename = obj.filename

        elif isinstance(obj, (Component, IMenuItemHandler)):
            filename = inspect.getsourcefile(obj.__class__)

        elif isinstance(obj, type):
            filename = inspect.getsourcefile(obj)

        monitoring = self.monitoring.setdefault(filename, [])

        monitoring.append({
            'class': cls,
            'instance': weakref.ref(obj),
            'module': mod,
            })

        return
Esempio n. 3
0
def pmodule(c):
    global _pmodule_lookup    
    
    name = c._name = c.__name__.lower()
    
    assert type(name) is str

    ret = _pmodule_lookup.setdefault(name, c)
    
    if not ret is c:
        if (inspect.getsourcefile(ret) == inspect.getsourcefile(c)
            or inspect.getfile(ret) == inspect.getfile(c)):
        
            return c         
        
        raise NameError("Processing Module '%s' doubly defined in files %s and %s." 
                        % (name, inspect.getfile(ret), inspect.getfile(c)))

        
    c.log = logging.getLogger(c._name)

    parameters.processPModule(c)

    parameters.registerPreset("r." + name, PModulePreset(name),
                              description = "Runs processing module '%s'." % c.__name__)
    
    logging.getLogger("Manager").debug("Processing module '%s' registered." % c.__name__)
        
    c._is_pmodule = True
    
    return c
Esempio n. 4
0
    def _runmapper(self, ftasks, fexamples, fout,
                   streaming_jar="/usr/lib/hadoop/contrib/streaming/" \
                   "hadoop-0.18.3-2cloudera0.3.0-streaming.jar"):
        """Runs the dumbomapper with input `ftasks` and
        `fexamples`.
        """
        import dumbomapper
        import auxtrainer

        fmapper = inspect.getsourcefile(dumbomapper)
        fauxtrainer = inspect.getsourcefile(auxtrainer)
        futil = inspect.getsourcefile(util)

        param = {"ftasks": ftasks, "fexamples": fexamples, "fout": fout,
                 "streaming_jar": streaming_jar, "futil": futil,
                 "fmapper": fmapper, "fauxtrainer": fauxtrainer}

        cmd = """hadoop jar %(streaming_jar)s \
        -input %(ftasks)s \
        -output %(fout)s \
        -mapper dumbomapper.py \
        -file %(fmapper)s \
        -file %(fauxtrainer)s \
        -file %(futil)s \
        -cacheFile %(fexamples)s#examples.npy \
        -jobconf mapred.reduce.tasks=0 \
        -jobconf mapred.input.format.class=org.apache.hadoop.mapred.lib.NLineInputFormat \
        -jobconf mapred.line.input.format.linespermap=1
        """ % param

        cmd = shlex.split(cmd)
        dn = open("/dev/null")
        retcode = subprocess.call(cmd, stdout=dn, stderr=dn)
        dn.close()
        return retcode
Esempio n. 5
0
    def get_class(self):
        """ XXX: if the are a monkey patch i wont know about it
        """ 
        myclass = self.context.__class__
        ancestors = {}
        get_ancestors(myclass, ancestors)

        try: 
            mysupclass_name = self.request['QUERY_STRING'].split('/')[0]
        except: 
            return ''

        mysupclass = ancestors[mysupclass_name]
         
        try:
            code = '### Reading' + inspect.getsourcefile(mysupclass) 
            code = inspect.getsource(mysupclass ) 
            source = highlight(code, PythonLexer(), HtmlFormatter())
 
        except TypeError:
            source = '<pre>' + inspect.getsource( mysupclass) + '</pre>'
        except NameError:
            source = inspect.getsource(mysupclass)
        except:
            source = "" 

        status = 'Reading ' + inspect.getsourcefile(mysupclass) 
        result = { 'status': status,  'bottom': source}
        return json.dumps(result, ensure_ascii= True, indent=4)
Esempio n. 6
0
def reload_plugin_module(module):
    """helper function to relead aplugin module previously loaded via
    load_plugin"""
    from inspect import getsourcefile, ismodule
    if ismodule(module) and getsourcefile(module):
        return load_plugin(getsourcefile(module), module.__name__)
    return module
Esempio n. 7
0
    def check(self):
        super(PublishSnapshot16Test, self).check()

        self.check_exists('public/dists/maverick/InRelease')
        self.check_exists('public/dists/maverick/Release')
        self.check_exists('public/dists/maverick/Release.gpg')

        self.check_exists('public/dists/maverick/main/binary-i386/Packages')
        self.check_exists('public/dists/maverick/main/binary-i386/Packages.gz')
        self.check_exists('public/dists/maverick/main/binary-i386/Packages.bz2')
        self.check_exists('public/dists/maverick/main/binary-amd64/Packages')
        self.check_exists('public/dists/maverick/main/binary-amd64/Packages.gz')
        self.check_exists('public/dists/maverick/main/binary-amd64/Packages.bz2')
        self.check_exists('public/dists/maverick/main/source/Sources')
        self.check_exists('public/dists/maverick/main/source/Sources.gz')
        self.check_exists('public/dists/maverick/main/source/Sources.bz2')

        self.check_exists('public/pool/main/g/gnuplot/gnuplot-doc_4.6.1-1~maverick2_all.deb')
        self.check_exists('public/pool/main/g/gnuplot/gnuplot_4.6.1-1~maverick2.debian.tar.gz')
        self.check_exists('public/pool/main/g/gnuplot/gnuplot_4.6.1-1~maverick2.dsc')
        self.check_exists('public/pool/main/g/gnuplot/gnuplot_4.6.1.orig.tar.gz')

        # verify contents except of sums
        self.check_file_contents('public/dists/maverick/Release', 'release', match_prepare=strip_processor)
        self.check_file_contents('public/dists/maverick/main/source/Sources', 'sources', match_prepare=lambda s: "\n".join(sorted(s.split("\n"))))

        # verify signatures
        self.run_cmd(["gpg", "--keyring", os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "aptly.pub"),
                      "--verify", os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/InRelease')])
        self.run_cmd(["gpg",  "--keyring", os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "aptly.pub"),
                      "--verify", os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/Release.gpg'),
                      os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/Release')])
Esempio n. 8
0
    def run(self):
        task = getattr(tasks, 'task_%s' % (self.arguments[0],))

        commands = task()
        lines = ['.. prompt:: bash $', '']

        for command in commands:
            if type(command) != Run:
                raise self.error("task: %s not supported"
                                 % (type(command).__name__,))
            lines += ['   %s' % (command.command,)]

        # The following three lines record (some?) of the dependencies of the
        # directive, so automatic regeneration happens.  Specifically, it
        # records this file, and the file where the task is declared.
        task_file = getsourcefile(task)
        tasks_file = getsourcefile(tasks)
        self.state.document.settings.record_dependencies.add(task_file)
        self.state.document.settings.record_dependencies.add(tasks_file)
        self.state.document.settings.record_dependencies.add(__file__)

        node = nodes.Element()
        text = StringList(lines)
        self.state.nested_parse(text, self.content_offset, node)
        return node.children
Esempio n. 9
0
    def check(self):
        super(PublishSnapshot17Test, self).check()

        self.check_exists('public/dists/maverick/InRelease')
        self.check_exists('public/dists/maverick/Release')
        self.check_exists('public/dists/maverick/Release.gpg')

        self.check_exists('public/dists/maverick/main/binary-i386/Packages')
        self.check_exists('public/dists/maverick/main/binary-i386/Packages.gz')
        self.check_exists('public/dists/maverick/main/binary-i386/Packages.bz2')
        self.check_exists('public/dists/maverick/main/source/Sources')
        self.check_exists('public/dists/maverick/main/source/Sources.gz')
        self.check_exists('public/dists/maverick/main/source/Sources.bz2')

        self.check_exists('public/pool/main/p/pyspi/pyspi_0.6.1-1.3.dsc')
        self.check_exists('public/pool/main/p/pyspi/pyspi_0.6.1-1.3.diff.gz')
        self.check_exists('public/pool/main/p/pyspi/pyspi_0.6.1.orig.tar.gz')
        self.check_exists('public/pool/main/p/pyspi/pyspi-0.6.1-1.3.stripped.dsc')
        self.check_exists('public/pool/main/b/boost-defaults/libboost-program-options-dev_1.49.0.1_i386.deb')

        # verify contents except of sums
        self.check_file_contents('public/dists/maverick/Release', 'release', match_prepare=strip_processor)
        self.check_file_contents('public/dists/maverick/main/source/Sources', 'sources', match_prepare=lambda s: "\n".join(sorted(s.split("\n"))))
        self.check_file_contents('public/dists/maverick/main/binary-i386/Packages', 'binary', match_prepare=lambda s: "\n".join(sorted(s.split("\n"))))

        # verify signatures
        self.run_cmd(["gpg", "--keyring", os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "aptly.pub"),
                      "--verify", os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/InRelease')])
        self.run_cmd(["gpg",  "--keyring", os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "aptly.pub"),
                      "--verify", os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/Release.gpg'),
                      os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/Release')])
Esempio n. 10
0
def isCorrectPath(req):
    try:
        installedPlugInPath = os.path.normpath(
            abspath(getsourcefile(lambda: 0)).split(str(NAME) + '.bundle', 1)[0])
        targetPath = os.path.normpath(Core.storage.join_path(
            Core.app_support_path, Core.config.bundles_dir_name))
        if installedPlugInPath != targetPath:
            Log.Debug('************************************************')
            Log.Debug('Wrong installation path detected!!!!')
            Log.Debug('')
            Log.Debug('Currently installed in:')
            Log.Debug(installedPlugInPath)
            Log.Debug('Correct path is:')
            Log.Debug(Core.storage.join_path(Core.app_support_path,
                                             Core.config.bundles_dir_name, NAME + '.bundle'))
            Log.Debug('************************************************')
            installedPlugInPath, skipStr = abspath(
                getsourcefile(lambda: 0)).split('/Contents', 1)
            msg = '<h1>Wrong installation path detected</h1>'
            msg = msg + '<p>It seems like you installed ' + \
                NAME + ' into the wrong folder</p>'
            msg = msg + '<p>You installed ' + NAME + ' here:<p>'
            msg = msg + installedPlugInPath
            msg = msg + '<p>but the correct folder is:<p>'
            msg = msg + Core.storage.join_path(
                Core.app_support_path, Core.config.bundles_dir_name, NAME + '.bundle')
            req.clear()
            req.set_status(404)
            req.finish(msg)
        else:
            Log.Info('Verified a correct install path as: ' + targetPath)
    except Exception, e:
        Log.Exception('Exception in isCorrectPath was %s' % (str(e)))
Esempio n. 11
0
def _analyzeGens(top, absnames):
    genlist = []
    for g in top:
        if isinstance(g, _UserCode):
            tree = g
        elif isinstance(g, (_AlwaysComb, _AlwaysSeq, _Always)):
            f = g.func
            s = inspect.getsource(f)
            s = _dedent(s)
            tree = ast.parse(s)
            #print ast.dump(tree)
            tree.sourcefile  = inspect.getsourcefile(f)
            tree.lineoffset = inspect.getsourcelines(f)[1]-1
            tree.symdict = f.func_globals.copy()
            tree.callstack = []
            # handle free variables
            tree.nonlocaldict = {}
            if f.func_code.co_freevars:
                for n, c in zip(f.func_code.co_freevars, f.func_closure):
                    obj = _cell_deref(c)
                    if isinstance(g, _AlwaysComb):
                        if not ( isinstance(obj, (int, long, EnumType,_Signal)) or \
                                 _isMem(obj) or _isTupleOfInts(obj)
                               ):
                            info =  "File %s, line %s: " % (tree.sourcefile, tree.lineoffset)
                            print type(obj)
                            raise ConversionError(_error.UnsupportedType, n, info)
                    tree.symdict[n] = obj
                    # currently, only intbv as automatic nonlocals (until Python 3.0)
                    if isinstance(obj, intbv):
                        tree.nonlocaldict[n] = obj
            tree.name = absnames.get(id(g), str(_Label("BLOCK"))).upper()
            v = _FirstPassVisitor(tree)
            v.visit(tree)
            if isinstance(g, _AlwaysComb):
                v = _AnalyzeAlwaysCombVisitor(tree, g.senslist)
            elif isinstance(g, _AlwaysSeq):
                v = _AnalyzeAlwaysSeqVisitor(tree, g.senslist, g.reset, g.sigregs, g.varregs)
            else:
                v = _AnalyzeAlwaysDecoVisitor(tree, g.senslist)
            v.visit(tree)
        else: # @instance
            f = g.gen.gi_frame
            s = inspect.getsource(f)
            s = _dedent(s)
            tree = ast.parse(s)
            # print ast.dump(tree)
            tree.sourcefile = inspect.getsourcefile(f)
            tree.lineoffset = inspect.getsourcelines(f)[1]-1
            tree.symdict = f.f_globals.copy()
            tree.symdict.update(f.f_locals)
            tree.nonlocaldict = {}
            tree.callstack = []
            tree.name = absnames.get(id(g), str(_Label("BLOCK"))).upper()
            v = _FirstPassVisitor(tree)
            v.visit(tree)
            v = _AnalyzeBlockVisitor(tree)
            v.visit(tree)
        genlist.append(tree)
    return genlist
Esempio n. 12
0
    def getSchemaForContentType(self):
        result = OrderedDict()
        schema = self.context.Schema()

        field_ids = schema.keys()
        field_ids.sort()

        for i in field_ids:
            field = schema[i]
            widget = field.widget
            field_py_file = inspect.getsourcefile(field.__class__)
            field_py_lineno = inspect.getsourcelines(field.__class__)[1]

            widget_py_file = inspect.getsourcefile(widget.__class__)
            widget_py_lineno = inspect.getsourcelines(widget.__class__)[1]

            label = widget.label

            condition = widget.getCondition()
            visibility = widget.visible

            result[i] = {'field' : field.__class__.__name__,
                         'field_py_file' : field_py_file,
                         'field_py_lineno' : field_py_lineno,
                         'widget' : widget.__class__.__name__,
                         'widget_py_file' : widget_py_file,
                         'widget_py_lineno' : widget_py_lineno,
                         'label' : label,
                         'condition' : condition,
                         'visibility' : visibility,}


        return result
Esempio n. 13
0
def linkcode_resolve(domain, info):
    """
    Determine the URL corresponding to Python object
    """
    if domain != 'py':
        return None

    modname = info['module']
    fullname = info['fullname']

    submod = sys.modules.get(modname)
    if submod is None:
        return None

    obj = submod
    for part in fullname.split('.'):
        try:
            obj = getattr(obj, part)
        except:
            return None

    try:
        fn = inspect.getsourcefile(obj)
    except:
        fn = None
    if not fn:
        try:
            fn = inspect.getsourcefile(sys.modules[obj.__module__])
        except:
            fn = None
    if not fn:
        return None

    try:
        source, lineno = inspect.getsourcelines(obj)
    except:
        lineno = None

    if lineno:
        linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1)
    else:
        linespec = ""

    startdir = os.path.abspath(os.path.join(dirname(scipy.__file__), '..'))
    fn = relpath(fn, start=startdir).replace(os.path.sep, '/')

    if fn.startswith('scipy/'):
        m = re.match(r'^.*dev0\+([a-f0-9]+)$', scipy.__version__)
        if m:
            return "https://github.com/scipy/scipy/blob/%s/%s%s" % (
                m.group(1), fn, linespec)
        elif 'dev' in scipy.__version__:
            return "https://github.com/scipy/scipy/blob/master/%s%s" % (
                fn, linespec)
        else:
            return "https://github.com/scipy/scipy/blob/v%s/%s%s" % (
                scipy.__version__, fn, linespec)
    else:
        return None
Esempio n. 14
0
    def test_a110_one(self):
        pid, fd = os.forkpty()
        #cmd = [sys.executable]
        cmd = ['coverage', 'run']
        cmd += [
            inspect.getsourcefile(run),
            'one',
            '-i',
            inspect.getsourcefile(data_sample_handler)
        ]

        if pid == 0:
            # child
            os.execvp(cmd[0], cmd)
        else:
            # parent
            def wait_text(timeout=1):
                import select
                text = []
                while True:
                    rl, wl, xl = select.select([fd], [], [], timeout)
                    if not rl:
                        break
                    try:
                        t = os.read(fd, 1024)
                    except OSError:
                        break
                    if not t:
                        break
                    t = utils.text(t)
                    text.append(t)
                    print(t, end='')
                return ''.join(text)

            text = wait_text()
            self.assertIn('new task data_sample_handler:on_start', text)
            self.assertIn('pyspider shell', text)

            os.write(fd, utils.utf8('run()\n'))
            text = wait_text()
            self.assertIn('task done data_sample_handler:on_start', text)

            os.write(fd, utils.utf8('crawl("%s/pyspider/test.html")\n' % self.httpbin))
            text = wait_text()
            self.assertIn('/robots.txt', text)

            os.write(fd, utils.utf8('crawl("%s/links/10/0")\n' % self.httpbin))
            text = wait_text(2)
            self.assertIn('"title": "Links"', text)

            os.write(fd, utils.utf8('crawl("%s/404")\n' % self.httpbin))
            text = wait_text()
            self.assertIn('task retry', text)

            os.write(fd, b'quit_pyspider()\n')
            text = wait_text()
            self.assertIn('scheduler exiting...', text)
            os.close(fd)
            os.kill(pid, signal.SIGINT)
Esempio n. 15
0
 def test_getsourcefile(self):
     self.assertEqual(normcase(inspect.getsourcefile(mod.spam)), modfile)
     self.assertEqual(normcase(inspect.getsourcefile(git.abuse)), modfile)
     fn = "_non_existing_filename_used_for_sourcefile_test.py"
     co = compile("None", fn, "exec")
     self.assertEqual(inspect.getsourcefile(co), None)
     linecache.cache[co.co_filename] = (1, None, "None", co.co_filename)
     self.assertEqual(normcase(inspect.getsourcefile(co)), fn)
Esempio n. 16
0
 def test_20_create_error(self):
     import easywebdav
     with self.assertRaises(easywebdav.OperationFailed):
         self.webdav.upload(inspect.getsourcefile(data_sample_handler),
                            'bad_file_name')
     with self.assertRaises(easywebdav.OperationFailed):
         self.webdav.upload(inspect.getsourcefile(data_sample_handler),
                            'bad.file.name')
 def ok_to_add_function(self, member, member_name, parent):
     if inspect.getsourcefile(member) != inspect.getsourcefile(parent):
         return False
     if not inspect.ismodule(parent):
         msg = "def {}(...): IGNORED because {} is not a module.".format(member.__name__, parent.__name__)
         print t.red(msg)
         return False
     return True
    def ok_to_add_klass(self, member, parent):
        if member.__name__.startswith(self.target.__name__): # TODO: why?
            return False

        if inspect.getsourcefile(member) != inspect.getsourcefile(parent):
            return False

        return True
Esempio n. 19
0
 def get_file(self):
     try:
         if inspect.isfunction(self.obj):
             return inspect.getsourcefile(self.obj)
         else:
             return inspect.getsourcefile(self.obj.__class__)
     except TypeError:
         # Error: Built-in module, class, or function.
         return ""
Esempio n. 20
0
def linkcode_resolve(domain, info):
    """Determine the URL corresponding to Python object

    This code is stolen with thanks from the scipy team.
    """
    if domain != 'py':
        return None

    modname = info['module']
    fullname = info['fullname']

    submod = sys.modules.get(modname)
    if submod is None:
        return None

    obj = submod
    for part in fullname.split('.'):
        try:
            obj = getattr(obj, part)
        except:
            return None
    # try and sneak past a decorator
    try:
        obj = obj.im_func.func_closure[0].cell_contents
    except (AttributeError, TypeError):
        pass

    try:
        fn = inspect.getsourcefile(obj)
    except:
        fn = None
    if not fn:
        try:
            fn = inspect.getsourcefile(sys.modules[obj.__module__])
        except:
            fn = None
    if not fn:
        return None

    try:
        source, lineno = inspect.findsource(obj)
    except:
        lineno = None

    if lineno:
        linespec = "#L%d" % (lineno + 1)
    else:
        linespec = ""

    fn = os.path.relpath(fn, start=os.path.dirname(gwpy.__file__))
    if fn.startswith(os.path.pardir):
        return None

    tag = gwpy_version.git_tag or gwpy_version.git_branch
    return ("http://github.com/gwpy/gwpy/tree/%s/gwpy/%s%s"
            % (tag, fn, linespec))
Esempio n. 21
0
def _wrapforsplop(optype, wrapped, style, docpy):

    if inspect.isclass(wrapped):
        if not callable(wrapped):
            raise TypeError('Class must be callable')

        _valid_identifier(wrapped.__name__)

        class _op_class(wrapped):

            __doc__ = wrapped.__doc__

            @functools.wraps(wrapped.__init__)
            def __init__(self,*args,**kwargs):
                super(_op_class, self).__init__(*args,**kwargs)
                if ec._is_supported():
                    ec._save_opc(self)
                ec._callable_enter(self)

            def _splpy_shutdown(self):
                ec._callable_exit_clean(self)

        _op_class.__wrapped__ = wrapped
        # _op_class.__doc__ = wrapped.__doc__
        _op_class._splpy_optype = optype
        _op_class._splpy_callable = 'class'
        if hasattr(wrapped, '__call__'):
            _op_class._splpy_style = _define_style(wrapped, wrapped.__call__, style)
            _op_class._splpy_fixed_count = _define_fixed(_op_class, _op_class.__call__)
        _op_class._splpy_file = inspect.getsourcefile(wrapped)
        _op_class._splpy_docpy = docpy
        return _op_class
    if not inspect.isfunction(wrapped):
        raise TypeError('A function or callable class is required')

    _valid_identifier(wrapped.__name__)

    #fnstyle =

    #if fnstyle == 'tuple':
    #    @functools.wraps(wrapped)
    #    def _op_fn(*args):
    #        return wrapped(args)
    #else:
    #    @functools.wraps(wrapped)
    #    def _op_fn(*args, **kwargs):
    #       return wrapped(*args, **kwargs)
    _op_fn = wrapped

    _op_fn._splpy_optype = optype
    _op_fn._splpy_callable = 'function'
    _op_fn._splpy_style = _define_style(_op_fn, _op_fn, style)
    _op_fn._splpy_fixed_count = _define_fixed(_op_fn, _op_fn)
    _op_fn._splpy_file = inspect.getsourcefile(wrapped)
    _op_fn._splpy_docpy = docpy
    return _op_fn
Esempio n. 22
0
    def check(self):
        super(PublishSnapshot1Test, self).check()

        self.check_exists('public/dists/maverick/InRelease')
        self.check_exists('public/dists/maverick/Release')
        self.check_exists('public/dists/maverick/Release.gpg')

        self.check_exists('public/dists/maverick/main/binary-i386/Packages')
        self.check_exists('public/dists/maverick/main/binary-i386/Packages.gz')
        self.check_exists('public/dists/maverick/main/binary-i386/Packages.bz2')
        self.check_exists('public/dists/maverick/main/binary-amd64/Packages')
        self.check_exists('public/dists/maverick/main/binary-amd64/Packages.gz')
        self.check_exists('public/dists/maverick/main/binary-amd64/Packages.bz2')

        self.check_exists('public/pool/main/g/gnuplot/gnuplot-doc_4.6.1-1~maverick2_all.deb')

        # verify contents except of sums
        self.check_file_contents('public/dists/maverick/Release', 'release', match_prepare=strip_processor)

        # verify signatures
        self.run_cmd(["gpg", "--keyring", os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "aptly.pub"),
                      "--verify", os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/InRelease')])
        self.run_cmd(["gpg",  "--keyring", os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "aptly.pub"),
                      "--verify", os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/Release.gpg'),
                      os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/Release')])

        # verify sums
        release = self.read_file('public/dists/maverick/Release').split("\n")
        release = [l for l in release if l.startswith(" ")]
        pathsSeen = set()
        for l in release:
            fileHash, fileSize, path = l.split()
            pathsSeen.add(path)

            fileSize = int(fileSize)

            st = os.stat(os.path.join(os.environ["HOME"], ".aptly", 'public/dists/maverick/', path))
            if fileSize != st.st_size:
                raise Exception("file size doesn't match for %s: %d != %d" % (path, fileSize, st.st_size))

            if len(fileHash) == 32:
                h = hashlib.md5()
            elif len(fileHash) == 40:
                h = hashlib.sha1()
            else:
                h = hashlib.sha256()

            h.update(self.read_file(os.path.join('public/dists/maverick', path)))

            if h.hexdigest() != fileHash:
                raise Exception("file hash doesn't match for %s: %s != %s" % (path, fileHash, h.hexdigest()))

        if pathsSeen != set(['main/binary-amd64/Packages', 'main/binary-i386/Packages', 'main/binary-i386/Packages.gz',
                             'main/binary-amd64/Packages.gz', 'main/binary-amd64/Packages.bz2', 'main/binary-i386/Packages.bz2']):
            raise Exception("path seen wrong: %r" % (pathsSeen, ))
    def ok_to_add_method(self, member, parent):
        if inspect.getsourcefile(member) != inspect.getsourcefile(parent):
            return False

        # Use line inspection to work out whether the method is defined on this
        # klass. Possibly not the best way, but I can't think of another atm.
        lines, start_line = inspect.getsourcelines(member)
        parent_lines, parent_start_line = inspect.getsourcelines(parent)
        if start_line < parent_start_line or start_line > parent_start_line + len(parent_lines):
            return False
        return True
Esempio n. 24
0
    def prepare(self):
        super(IncludeRepo5Test, self).prepare()

        self.tempSrcDir = tempfile.mkdtemp()

        shutil.copytree(os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "changes"), os.path.join(self.tempSrcDir, "01"))

        shutil.copy(os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "pyspi_0.6.1-1.3.diff.gz"),
                    os.path.join(self.tempSrcDir, "01", "pyspi_0.6.1-1.3.diff.gz"))

        self.runCmd += self.tempSrcDir
 def ok_to_add_klass(self, member, parent):
     if any((member.__name__.startswith(source.__name__) for source in self.sources)):  # TODO: why?
         return False
     try:
         if inspect.getsourcefile(member) != inspect.getsourcefile(parent):
             if parent.__name__ in member.__module__:
                 self.add_new_import_path(member, parent)
             return False
     except TypeError:
         return False
     return True
Esempio n. 26
0
def _analyzeGens(top, absnames):
    genlist = []
    for g in top:
        if isinstance(g, _UserCode):
            ast = g
        elif isinstance(g, (_AlwaysComb, _Always)):
            f = g.func
            s = inspect.getsource(f)
            # remove decorators
            s = re.sub(r"@.*", "", s)
            s = s.lstrip()
            ast = compiler.parse(s)
            # print ast
            ast.sourcefile = inspect.getsourcefile(f)
            ast.lineoffset = inspect.getsourcelines(f)[1]-1
            ast.symdict = f.func_globals.copy()
            ast.callstack = []
            # handle free variables
            if f.func_code.co_freevars:
                for n, c in zip(f.func_code.co_freevars, f.func_closure):
                    obj = _cell_deref(c)
                    if isinstance(g, _AlwaysComb):
                        # print type(obj)
                        assert isinstance(obj, (int, long, Signal)) or \
                               _isMem(obj) or _isTupleOfInts(obj)
                    ast.symdict[n] = obj
            ast.name = absnames.get(id(g), str(_Label("BLOCK"))).upper()
            v = _NotSupportedVisitor(ast)
            compiler.walk(ast, v)
            if isinstance(g, _AlwaysComb):
                v = _AnalyzeAlwaysCombVisitor(ast, g.senslist)
            else:
                v = _AnalyzeAlwaysDecoVisitor(ast, g.senslist)
            compiler.walk(ast, v)
        else: # @instance
            f = g.gen.gi_frame
            s = inspect.getsource(f)
            # remove decorators
            s = re.sub(r"@.*", "", s)
            s = s.lstrip()
            ast = compiler.parse(s)
            # print ast
            ast.sourcefile = inspect.getsourcefile(f)
            ast.lineoffset = inspect.getsourcelines(f)[1]-1
            ast.symdict = f.f_globals.copy()
            ast.symdict.update(f.f_locals)
            ast.callstack = []
            ast.name = absnames.get(id(g), str(_Label("BLOCK"))).upper()
            v = _NotSupportedVisitor(ast)
            compiler.walk(ast, v)
            v = _AnalyzeBlockVisitor(ast)
            compiler.walk(ast, v)
        genlist.append(ast)
    return genlist
    def ok_to_add_method(self, member, parent):
        if inspect.getsourcefile(member) != inspect.getsourcefile(parent):
            return False

        # Use line inspection to work out whether the method is defined on this klass
        # PROBABLY NOT THE BEST WAY
        lines, start_line = inspect.getsourcelines(member)
        if start_line < klass_line_start or start_line > klass_line_end:
            return False

        return True
def linkcode_resolve(domain, info):
    """
    Determine the URL corresponding to Python object

    Code from scipy:
        http://nullege.com/codes/show/src@s@c@scipy-HEAD@doc@[email protected]
    """
    if domain != 'py':
        return None

    modname = info['module']
    fullname = info['fullname']

    submod = sys.modules.get(modname)
    if submod is None:
        return None

    obj = submod
    for part in fullname.split('.'):
        try:
            obj = getattr(obj, part)
        except:
            return None

    try:
        fn = inspect.getsourcefile(obj)
    except:
        fn = None
    if not fn:
        try:
            fn = inspect.getsourcefile(sys.modules[obj.__module__])
        except:
            fn = None
    if not fn:
        return None

    try:
        source, lineno = inspect.findsource(obj)
    except:
        lineno = None

    if lineno:
        linespec = "#L%d" % (lineno + 1)
    else:
        linespec = ""

    fn = relpath(fn, start=dirname(FlowCytometryTools.__file__))


    uri_to_doc = r'//github.com/eyurtsev/FlowCytometryTools/blob/{hexsha}/FlowCytometryTools/{file_name}{line_number}'

    return uri_to_doc.format(hexsha=commit_hash, file_name=fn, 
            line_number=linespec)
Esempio n. 29
0
File: add.py Progetto: antonio/aptly
    def prepare(self):
        super(AddRepo5Test, self).prepare()

        self.tempSrcDir = tempfile.mkdtemp()
        os.makedirs(os.path.join(self.tempSrcDir, "02", "03"), 0755)

        shutil.copy(os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "pyspi_0.6.1-1.3.dsc"),
            os.path.join(self.tempSrcDir, "02", "03"))
        shutil.copy(os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "files", "pyspi_0.6.1.orig.tar.gz"),
            os.path.join(self.tempSrcDir, "02", "03"))

        self.runCmd += self.tempSrcDir
Esempio n. 30
0
    def test_a100_all(self):
        import subprocess

        # cmd = [sys.executable]
        cmd = ["coverage", "run"]
        p = subprocess.Popen(
            cmd
            + [
                inspect.getsourcefile(run),
                "--taskdb",
                "sqlite+taskdb:///data/tests/all_test_task.db",
                "--resultdb",
                "sqlite+resultdb:///data/tests/all_test_result.db",
                "--projectdb",
                "local+projectdb://" + inspect.getsourcefile(data_sample_handler),
                "all",
            ],
            close_fds=True,
            preexec_fn=os.setsid,
        )

        try:
            limit = 30
            while limit >= 0:
                time.sleep(3)
                # click run
                try:
                    requests.post("http://localhost:5000/run", data={"project": "data_sample_handler"})
                except requests.exceptions.ConnectionError:
                    limit -= 1
                    continue
                break

            limit = 30
            data = requests.get("http://localhost:5000/counter")
            self.assertEqual(data.status_code, 200)
            while data.json().get("data_sample_handler", {}).get("5m", {}).get("success", 0) < 5:
                time.sleep(1)
                data = requests.get("http://localhost:5000/counter")
                limit -= 1
                if limit <= 0:
                    break

            self.assertGreater(limit, 0)
            rv = requests.get("http://localhost:5000/results?project=data_sample_handler")
            self.assertIn("<th>url</th>", rv.text)
            self.assertIn("class=url", rv.text)
        except:
            raise
        finally:
            time.sleep(1)
            os.killpg(p.pid, signal.SIGTERM)
            p.wait()
Esempio n. 31
0
 def dependencies(self):
     dependencies = set(super(PermissionsDataSource, self).dependencies)
     for permission_set in self.permission_sets.itervalues():
         dependencies.add(inspect.getsourcefile(permission_set))
     return list(dependencies)
Esempio n. 32
0
def check_rest(module, names, dots=True):
    """
    Check reStructuredText formatting of docstrings

    Returns: [(name, success_flag, output), ...]
    """

    try:
        skip_types = (dict, str, unicode, float, int)
    except NameError:
        # python 3
        skip_types = (dict, str, float, int)


    results = []

    if module.__name__[6:] not in OTHER_MODULE_DOCS:
        results += [(module.__name__,) +
                    validate_rst_syntax(inspect.getdoc(module),
                                        module.__name__, dots=dots)]

    for name in names:
        full_name = module.__name__ + '.' + name
        obj = getattr(module, name, None)

        if obj is None:
            results.append((full_name, False, "%s has no docstring" % (full_name,)))
            continue
        elif isinstance(obj, skip_types):
            continue

        if inspect.ismodule(obj):
            text = inspect.getdoc(obj)
        else:
            try:
                text = str(get_doc_object(obj))
            except:
                import traceback
                results.append((full_name, False,
                                "Error in docstring format!\n" +
                                traceback.format_exc()))
                continue

        m = re.search("([\x00-\x09\x0b-\x1f])", text)
        if m:
            msg = ("Docstring contains a non-printable character %r! "
                   "Maybe forgot r\"\"\"?" % (m.group(1),))
            results.append((full_name, False, msg))
            continue

        try:
            src_file = short_path(inspect.getsourcefile(obj))
        except TypeError:
            src_file = None

        if src_file:
            file_full_name = src_file + ':' + full_name
        else:
            file_full_name = full_name

        results.append((full_name,) + validate_rst_syntax(text, file_full_name, dots=dots))

    return results
Esempio n. 33
0
def getsourcefile(object):  # pylint: disable=redefined-builtin
  """TFDecorator-aware replacement for inspect.getsourcefile."""
  return _inspect.getsourcefile(tf_decorator.unwrap(object)[1])
Esempio n. 34
0
 def test_dump(self):
     self.assertEqual(header + "\n" + footer, dump([]))
     self.assertEqual(header + "\n" + serialize_enum(ENUM) + "\n" + footer,
                      dump([getsourcefile(TestFunctions)]))
Esempio n. 35
0
#! /usr/bin/env python3

import os
import subprocess
import sys
import platform
import argparse
import inspect
import re
from glob import glob

# See stackoverflow.com/questions/2632199: __file__ nor sys.argv[0]
# are guaranteed to always work, this one should though.
BASEPATH = os.path.dirname(os.path.abspath(
    inspect.getsourcefile(lambda: None)))


def base_path(*p):
    return os.path.abspath(os.path.join(BASEPATH, *p)).replace("\\", "/")


# Tests require at least CPython 3.3. If your default python3 executable
# is of lower version, you can point MICROPY_CPYTHON3 environment var
# to the correct executable.
if os.name == "nt":
    CPYTHON3 = os.getenv("MICROPY_CPYTHON3", "python")
    MICROPYTHON = os.getenv("MICROPY_MICROPYTHON",
                            base_path("../ports/windows/micropython.exe"))
else:
    CPYTHON3 = os.getenv("MICROPY_CPYTHON3", "python3")
    MICROPYTHON = os.getenv("MICROPY_MICROPYTHON",
Esempio n. 36
0
        log.info(error)
    #Inspect for Python
    if not path or not os.path.exists(path):
        log.info(
            'This is not a known Mel command, inspecting Python libs for : %s'
            % value)
        try:
            log.debug('value :  %s' % value)
            log.debug('value isString : ', isinstance(value, str))
            log.debug('value callable: ', callable(value))
            log.debug('value is module : ', inspect.ismodule(value))
            log.debug('value is method : ', inspect.ismethod(value))
            if isinstance(value, str):
                #if not callable(value):
                value = eval(value)
            path = inspect.getsourcefile(value)
            if path:
                #sourceType='python'
                log.info('path : %s' % path)
        except StandardError, error:
            log.exception(error)

    #Open the file with the default editor
    #FIXME: If Python and you're a dev then the .py file may be set to open in the default
    #Python runtime/editor and won't open as expected. Need to look at this.
    if path and os.path.exists(path):
        log.debug('NormPath : %s' % os.path.normpath(path))
        os.startfile(os.path.normpath(path))
        return True
    else:
        log.warning('No valid path or functions found matches selection')
Esempio n. 37
0
def load_manifest():
    # build the manifest of adapters, starting with builtin adapters
    result = manifest_from_file(
        os.path.join(
            os.path.dirname(os.path.dirname(inspect.getsourcefile(core))),
            "adapters", "builtin_adapters.plugin_manifest.json"))

    # layer contrib plugins after built in ones
    try:
        import opentimelineio_contrib as otio_c

        contrib_manifest = manifest_from_file(
            os.path.join(os.path.dirname(inspect.getsourcefile(otio_c)),
                         "adapters", "contrib_adapters.plugin_manifest.json"))
        result.adapters.extend(contrib_manifest.adapters)
        result.media_linkers.extend(contrib_manifest.media_linkers)
    except ImportError:
        pass

    # Discover setuptools-based plugins
    if pkg_resources:
        for plugin in pkg_resources.iter_entry_points(
                "opentimelineio.plugins"):
            plugin_name = plugin.name
            try:
                plugin_entry_point = plugin.load()
                try:
                    plugin_manifest = plugin_entry_point.plugin_manifest()
                except AttributeError:
                    if not pkg_resources.resource_exists(
                            plugin.module_name, 'plugin_manifest.json'):
                        raise
                    manifest_stream = pkg_resources.resource_stream(
                        plugin.module_name, 'plugin_manifest.json')
                    plugin_manifest = core.deserialize_json_from_string(
                        manifest_stream.read().decode('utf-8'))
                    manifest_stream.close()

            except Exception:
                logging.exception(
                    "could not load plugin: {}".format(plugin_name))
                continue

            result.adapters.extend(plugin_manifest.adapters)
            result.media_linkers.extend(plugin_manifest.media_linkers)
    else:
        # XXX: Should we print some kind of warning that pkg_resources isn't
        #        available?
        pass

    # read local adapter manifests, if they exist
    _local_manifest_path = os.environ.get("OTIO_PLUGIN_MANIFEST_PATH", None)
    if _local_manifest_path is not None:
        for json_path in _local_manifest_path.split(":"):
            if not os.path.exists(json_path):
                # XXX: In case error reporting is requested
                # print(
                #     "Warning: OpenTimelineIO cannot access path '{}' from "
                #     "$OTIO_PLUGIN_MANIFEST_PATH".format(json_path)
                # )
                continue

            LOCAL_MANIFEST = manifest_from_file(json_path)
            result.adapters.extend(LOCAL_MANIFEST.adapters)
            result.media_linkers.extend(LOCAL_MANIFEST.media_linkers)

    return result
Esempio n. 38
0
 def methodInfo(m):
     import inspect
     lines = inspect.getsourcelines(m)
     return "\nat %s:%s:\n %s" % (inspect.getsourcefile(m), lines[1],
                                  "\n".join(lines[0]))
Esempio n. 39
0
def linkcode_resolve(domain, info):
    """
    Determine the URL corresponding to Python object
    """
    if domain != 'py':
        return None

    modname = info['module']
    fullname = info['fullname']

    submod = sys.modules.get(modname)
    if submod is None:
        return None

    obj = submod
    for part in fullname.split('.'):
        try:
            obj = getattr(obj, part)
        except Exception:
            return None

    # strip decorators, which would resolve to the source of the decorator
    # possibly an upstream bug in getsourcefile, bpo-1764286
    try:
        unwrap = inspect.unwrap
    except AttributeError:
        pass
    else:
        obj = unwrap(obj)

    fn = None
    lineno = None

    # Make a poor effort at linking C extension types
    if isinstance(obj, type) and obj.__module__ == 'numpy':
        fn = _get_c_source_file(obj)

    if fn is None:
        try:
            fn = inspect.getsourcefile(obj)
        except Exception:
            fn = None
        if not fn:
            return None

        # Ignore re-exports as their source files are not within the numpy repo
        module = inspect.getmodule(obj)
        if module is not None and not module.__name__.startswith("numpy"):
            return None

        try:
            source, lineno = inspect.getsourcelines(obj)
        except Exception:
            lineno = None

        fn = relpath(fn, start=dirname(numpy.__file__))

    if lineno:
        linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1)
    else:
        linespec = ""

    if 'dev' in numpy.__version__:
        return "https://github.com/numpy/numpy/blob/main/numpy/%s%s" % (
            fn, linespec)
    else:
        return "https://github.com/numpy/numpy/blob/v%s/numpy/%s%s" % (
            numpy.__version__, fn, linespec)
Esempio n. 40
0
def _format_contract(contract: icontract._Contract) -> str:
    """Format the contract as reST."""
    # pylint: disable=too-many-branches
    decorator_inspection = None  # type: Optional[icontract._represent.DecoratorInspection]

    ##
    # Parse condition
    ##

    if not icontract._represent._is_lambda(a_function=contract.condition):
        condition_text = ':py:func:`{}`'.format(contract.condition.__name__)
    else:
        # We need to extract the source code corresponding to the decorator since inspect.getsource() is broken with
        # lambdas.

        # Find the line corresponding to the condition lambda
        lines, condition_lineno = inspect.findsource(contract.condition)
        filename = inspect.getsourcefile(contract.condition)
        if filename is None:
            filename = "<filename unavailable>"

        decorator_inspection = icontract._represent.inspect_decorator(
            lines=lines, lineno=condition_lineno, filename=filename)

        lambda_inspection = icontract._represent.find_lambda_condition(
            decorator_inspection=decorator_inspection)
        assert lambda_inspection is not None, \
            "Expected non-None lambda inspection with the condition: {}".format(contract.condition)

        condition_text = _condition_as_text(
            lambda_inspection=lambda_inspection)

    ##
    # Parse error
    ##

    error_type = None  # type: Optional[str]

    # Error message is set only for an error of a contract given as a lambda that takes no arguments and returns
    # a result of a call on a string literal (*e.g.*, ``error=ValueError("some message")``.
    error_msg = None  # type: Optional[str]

    if contract.error is not None:
        if isinstance(contract.error, type):
            error_type = contract.error.__qualname__
        elif inspect.isfunction(
                contract.error) and icontract._represent._is_lambda(
                    a_function=contract.error):
            if decorator_inspection is None:
                lines, condition_lineno = inspect.findsource(contract.error)
                filename = inspect.getsourcefile(contract.error)
                if filename is None:
                    filename = "<filename unavailable>"

                decorator_inspection = icontract._represent.inspect_decorator(
                    lines=lines, lineno=condition_lineno, filename=filename)

            error_type, error_msg = _error_type_and_message(
                decorator_inspection=decorator_inspection)
        else:
            # Error type could not be inferred
            pass

    ##
    # Format
    ##

    description = None  # type: Optional[str]
    if contract.description:
        description = contract.description
    elif error_msg is not None:
        description = error_msg
    else:
        # Description could not be inferred.
        pass

    doc = None  # type: Optional[str]
    if description and error_type:
        if description.strip()[-1] in [".", "!", "?"]:
            doc = "{} Raise :py:class:`{}`".format(description, error_type)
        elif description.strip()[-1] in [",", ";"]:
            doc = "{} raise :py:class:`{}`".format(description, error_type)
        else:
            doc = "{}; raise :py:class:`{}`".format(description, error_type)

    elif not description and error_type:
        doc = "Raise :py:class:`{}`".format(error_type)

    elif description and not error_type:
        doc = description

    else:
        # No extra documentation can be generated since the error type could not be inferred and
        # no contract description was given.
        doc = None

    if doc is not None:
        return "{} ({})".format(condition_text, doc)

    return condition_text
Esempio n. 41
0
 def _GetSourceInfo(cls, src_cls):
     """Gets a source info string given a source class."""
     return '%s@%s:%d' % (cls._ModelParamsClassKey(src_cls),
                          inspect.getsourcefile(src_cls),
                          inspect.getsourcelines(src_cls)[-1])
Esempio n. 42
0
    def __init__(self, *args, **kwargs):
        unittest.TestCase.__init__(self, *args, **kwargs)

        with open(inspect.getsourcefile(self.fodderModule)) as fp:
            self.source = fp.read()
def run_function_in_container(*,
        name: str, function,
        container: str,
        keyword_args: dict,
        input_file_keys: list,
        input_file_extensions: dict,
        output_file_keys: list,
        output_file_extensions: dict,
        additional_files: list=[],
        local_modules: list=[]
    ) -> Any:
    # generate source code
    with TemporaryDirectory(remove=True, prefix='tmp_hither_run_in_container_' + name) as temp_path:
        try:
            function_source_fname = os.path.abspath(inspect.getsourcefile(function))
        except:
            raise('Unable to get source file for function {}. Cannot run in a container.'.format(name))

        function_source_dirname = os.path.dirname(function_source_fname)
        function_source_basename = os.path.basename(function_source_fname)
        function_source_basename_noext = os.path.splitext(function_source_basename)[0]
        code = _read_python_code_of_directory(
            function_source_dirname,
            additional_files=additional_files,
            exclude_init=True
        )
        code['files'].append(dict(
            name='__init__.py',
            content='from .{} import {}'.format(
                function_source_basename_noext, name)
        ))
        hither_dir = os.path.dirname(os.path.realpath(__file__))
        kachery_dir = os.path.dirname(os.path.realpath(__file__))
        local_module_paths = []
        for lm in local_modules:
            if os.path.isabs(lm):
                local_module_paths.append(lm)
            else:
                local_module_paths.append(os.path.join(function_source_dirname, lm))
        code['dirs'].append(dict(
            name='_local_modules',
            content=dict(
                files=[],
                dirs=[
                    dict(
                        name=os.path.basename(local_module_path),
                        content=_read_python_code_of_directory(os.path.join(function_source_dirname, local_module_path), exclude_init=False)
                    )
                    for local_module_path in local_module_paths + [hither_dir]
                ]
            )
        ))

        _write_python_code_to_directory(os.path.join(temp_path, 'function_src'), code)

        keyword_args_adjusted = deepcopy(keyword_args)
        binds = dict()
        for iname in input_file_keys:
            if iname in keyword_args.keys():
                fname_outside = keyword_args[iname]
                if not _is_hash_url(fname_outside):
                    fname_inside = '/inputs/{}{}'.format(iname, input_file_extensions[iname])
                    keyword_args_adjusted[iname] = fname_inside
                    binds[fname_outside] = fname_inside
        outputs_tmp = os.path.join(temp_path, 'outputs')
        os.mkdir(outputs_tmp)
        binds[outputs_tmp] = '/outputs'
        outputs_to_copy = dict()
        for oname in output_file_keys:
            if oname in keyword_args.keys():
                fname_outside = keyword_args[oname]
                fname_inside = '/outputs/{}{}'.format(oname, output_file_extensions[oname])
                fname_temp = '{}/{}{}'.format(outputs_tmp, oname, output_file_extensions[oname])
                keyword_args_adjusted[oname] = fname_inside
                outputs_to_copy[fname_temp] = fname_outside

        run_py_script = """
            #!/usr/bin/env python

            from function_src import {function_name}
            import sys
            import json

            def main():
                _configure_kachery()
                kwargs = json.loads('{keyword_args_json}')
                retval = {function_name}(**kwargs)
                with open('/run_in_container/retval.json', 'w') as f:
                    json.dump(dict(retval=retval), f)
            
            def _configure_kachery():
                try:
                    import kachery as ka
                except:
                    return
                kachery_config = json.loads('{kachery_config_json}')
                ka.set_config(**kachery_config)

            if __name__ == "__main__":
                try:
                    main()
                except:
                    sys.stdout.flush()
                    sys.stderr.flush()
                    raise
        """.format(
            keyword_args_json=json.dumps(keyword_args_adjusted),
            kachery_config_json=json.dumps(ka.get_config()),
            function_name=name
        )

        # For unindenting
        ShellScript(run_py_script).write(os.path.join(temp_path, 'run.py'))

        env_vars_inside_container = dict(
            KACHERY_STORAGE_DIR='/kachery-storage',
            PYTHONPATH='/run_in_container/function_src/_local_modules',
            HOME='$HOME'
        )

        run_inside_script = """
            #!/bin/bash
            set -e

            {env_vars_inside_container} python3 /run_in_container/run.py
        """.format(
            env_vars_inside_container=' '.join(['{}={}'.format(k, v) for k, v in env_vars_inside_container.items()])
        )

        ShellScript(run_inside_script).write(os.path.join(temp_path, 'run.sh'))

        if not os.getenv('KACHERY_STORAGE_DIR'):
            raise Exception('You must set the environment variable: KACHERY_STORAGE_DIR')

        if os.getenv('HITHER_USE_SINGULARITY', None) == 'TRUE':
            run_outside_script = """
                #!/bin/bash

                singularity exec -e \\
                    -B $KACHERY_STORAGE_DIR:/kachery-storage \\
                    -B {temp_path}:/run_in_container \\
                    --nv \\
                    {binds_str} \\
                    {container} \\
                    bash /run_in_container/run.sh
            """.format(
                binds_str=' '.join(['-B {}:{}'.format(a, b) for a, b in binds.items()]),
                container=container,
                temp_path=temp_path
            )
        else:
            run_outside_script = """
                #!/bin/bash

                docker run -it \\
                    --gpus all \\
                    -v /etc/passwd:/etc/passwd -u `id -u`:`id -g` \\
                    -v $KACHERY_STORAGE_DIR:/kachery-storage \\
                    -v {temp_path}:/run_in_container \\
                    -v /tmp:/tmp \\
                    -v $HOME:$HOME \\
                    {binds_str} \\
                    {container} \\
                    bash /run_in_container/run.sh
            """.format(
                binds_str=' '.join(['-v {}:{}'.format(a, b) for a, b in binds.items()]),
                container=_docker_form_of_container_string(container),
                temp_path=temp_path
            )

        ss = ShellScript(run_outside_script, keep_temp_files=False)
        ss.start()
        retcode = ss.wait()

        if retcode != 0:
            raise Exception('Non-zero exit code ({}) running {} in container {}'.format(retcode, name, container))

        with open(os.path.join(temp_path, 'retval.json')) as f:
            obj = json.load(f)
        retval = obj['retval']

        for a, b in outputs_to_copy.items():
            shutil.copyfile(a, b)

        return retval
Esempio n. 44
0
#!/usr/bin/env python3
import sys
from inspect import getsourcefile
from os.path import dirname, abspath, join

file = getsourcefile(lambda: 0)
base = abspath(join(dirname(file), '..'))

sys.path.insert(0, base)
import tote
# sys.path.remove(base)

import tote.main as m

m.main()
Esempio n. 45
0
# -*- coding: utf-8 -*-
"""
debugee related thing
"""
import os
import inspect
from collections import OrderedDict

import log
import util
import defines

file_path = os.path.abspath(inspect.getsourcefile(lambda: 0))
file_dir = os.path.dirname(inspect.getsourcefile(lambda: 0))

# ---------------------------------------------------------------------------
# global - load from .txt file

#
# func addrs
# a dict:
#    {start: (start, end, name),
#    {start: (start, end, name)},
#     ...}
#
global v_tmp_func_list
v_tmp_func_list = None

# exclude func address
# global v_tmp_exclude_func_address
v_tmp_exclude_func_address = None
Esempio n. 46
0
def findsource(object):
    """Return the entire source file and starting line number for an object.
    The argument may be a module, class, method, function, traceback, frame,
    or code object.  The source code is returned as a list of all the lines
    in the file and the line number indexes a line in that list.  An IOError
    is raised if the source code cannot be retrieved.
    FIXED version with which we monkeypatch the stdlib to work around a bug."""

    file = getsourcefile(object) or getfile(object)
    # If the object is a frame, then trying to get the globals dict from its
    # module won't work. Instead, the frame object itself has the globals
    # dictionary.
    globals_dict = None
    if inspect.isframe(object):
        # XXX: can this ever be false?
        globals_dict = object.f_globals
    else:
        module = getmodule(object, file)
        if module:
            globals_dict = module.__dict__
    lines = linecache.getlines(file, globals_dict)
    if not lines:
        raise IOError('could not get source code')

    if ismodule(object):
        return lines, 0

    if isclass(object):
        name = object.__name__
        pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
        # make some effort to find the best matching class definition:
        # use the one with the least indentation, which is the one
        # that's most probably not inside a function definition.
        candidates = []
        for i in range(len(lines)):
            match = pat.match(lines[i])
            if match:
                # if it's at toplevel, it's already the best one
                if lines[i][0] == 'c':
                    return lines, i
                # else add whitespace to candidate list
                candidates.append((match.group(1), i))
        if candidates:
            # this will sort by whitespace, and by line number,
            # less whitespace first
            candidates.sort()
            return lines, candidates[0][1]
        else:
            raise IOError('could not find class definition')

    if ismethod(object):
        object = object.im_func
    if isfunction(object):
        object = object.func_code
    if istraceback(object):
        object = object.tb_frame
    if isframe(object):
        object = object.f_code
    if iscode(object):
        if not hasattr(object, 'co_firstlineno'):
            raise IOError('could not find function definition')
        pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
        pmatch = pat.match
        # fperez - fix: sometimes, co_firstlineno can give a number larger than
        # the length of lines, which causes an error.  Safeguard against that.
        lnum = min(object.co_firstlineno, len(lines)) - 1
        while lnum > 0:
            if pmatch(lines[lnum]):
                break
            lnum -= 1

        return lines, lnum
    raise IOError('could not find code object')
Esempio n. 47
0
#---------------------------- Try to find/load JIGSAW's API.

JLIBNAME = Path()

if (JLIBNAME == Path()):
    #---------------------------- set-up path for "local" binary

    WIN = "Windows"
    LNX = "Linux"
    MAC = "Darwin"

    #   stackoverflow.com/questions/2632199/
    #       how-do-i-get-the-
    #           path-of-the-current-executed-file-in-python
    FILENAME = \
        inspect.getsourcefile(lambda: +0)

    FILEPATH = \
        (Path(FILENAME).resolve()).parent

    if (platform.system() == WIN):
        JLIBNAME = \
            FILEPATH / "_lib" / "jigsaw.dll"

    elif (platform.system() == LNX):
        JLIBNAME = \
            FILEPATH / "_lib" / "libjigsaw.so"

    elif (platform.system() == MAC):
        JLIBNAME = \
            FILEPATH / "_lib" / "libjigsaw.dylib"
Esempio n. 48
0
import os
import sys
from inspect import getsourcefile
is_debug = False
for arg in sys.argv:
    if arg == '-d':
        is_debug = True
path = os.path.abspath(getsourcefile(lambda: 0))[:-11]
sys.path.insert(0, path + 'src')
sys.path.insert(0, path + 'plugins')
sys.path.insert(0, path + 'templates')
sys.path.insert(0, path + 'apps')
os.chdir(path + 'src')

import botbase

bot = botbase.BotBase(is_debug)
try:
    bot._start_(1)
except Exception as e:
    bot.sender.send(
        bot.keys.admin_id,
        'Exception in unknown place:\n' + str(type(e)) + '\n' + str(e))
Esempio n. 49
0
 def __init__(self, name):
     m = importlib.import_module(name)
     dirname = os.path.dirname(inspect.getsourcefile(m))
     self.dirname = os.path.abspath(dirname)
Esempio n. 50
0
    def test_a110_one(self):
        pid, fd = os.forkpty()
        #cmd = [sys.executable]
        cmd = ['coverage', 'run']
        cmd += [
            inspect.getsourcefile(run), 'one', '-i',
            inspect.getsourcefile(data_sample_handler)
        ]

        if pid == 0:
            # child
            os.execvp(cmd[0], cmd)
        else:
            # parent
            def wait_text(timeout=1):
                import select
                text = []
                while True:
                    rl, wl, xl = select.select([fd], [], [], timeout)
                    if not rl:
                        break
                    try:
                        t = os.read(fd, 1024)
                    except OSError:
                        break
                    if not t:
                        break
                    t = utils.text(t)
                    text.append(t)
                    print(t, end='')
                return ''.join(text)

            text = wait_text(3)
            self.assertIn('new task data_sample_handler:on_start', text)
            self.assertIn('pyspider shell', text)

            os.write(fd, utils.utf8('run()\n'))
            text = wait_text()
            self.assertIn('task done data_sample_handler:on_start', text)

            os.write(
                fd,
                utils.utf8('crawl("%s/pyspider/test.html")\n' % self.httpbin))
            text = wait_text()
            self.assertIn('/robots.txt', text)

            os.write(fd, utils.utf8('crawl("%s/links/10/0")\n' % self.httpbin))
            text = wait_text()
            if '"title": "Links"' not in text:
                os.write(fd,
                         utils.utf8('crawl("%s/links/10/1")\n' % self.httpbin))
                text = wait_text()
                self.assertIn('"title": "Links"', text)

            os.write(fd, utils.utf8('crawl("%s/404")\n' % self.httpbin))
            text = wait_text()
            self.assertIn('task retry', text)

            os.write(fd, b'quit_pyspider()\n')
            text = wait_text()
            self.assertIn('scheduler exiting...', text)
            os.close(fd)
            os.kill(pid, signal.SIGINT)
Esempio n. 51
0
 def local_path(self, filename):
     """Return the absolute path of the input file.:Overriding it here to use the absolute path instead of relative"""
     frame = inspect.stack()[1]
     source_file = inspect.getsourcefile(frame[0])
     source_dir = os.path.dirname(os.path.abspath(source_file))
     return os.path.join(source_dir, filename)
Esempio n. 52
0
def getSourceFile():
    log("lazilyTest1.a.getSourceFile called")
    from inspect import currentframe, getsourcefile
    print("lazilyTest1.a.getFrameInfo called: " +
          str(getsourcefile(currentframe().f_back)))
Esempio n. 53
0
def get_qualified_method_name(obj, method_name):
    # example of qualified test name:
    # test_mgmt_network.test_public_ip_addresses
    _, filename = os.path.split(inspect.getsourcefile(type(obj)))
    module_name, _ = os.path.splitext(filename)
    return '{0}.{1}'.format(module_name, method_name)
Esempio n. 54
0
import matplotlib.pyplot as plt
import numpy as np
from pathlib import Path
import time

from flightsim.axes3ds import Axes3Ds
from flightsim.world import World
from proj1_2.code.occupancy_map import OccupancyMap
from proj1_2.code.graph_search import graph_search

# Choose a test example file. You should write your own example files too!
# filename = 'test_empty.json'
filename = 'test_forest.json'

# Load the test example.
file = Path(inspect.getsourcefile(lambda:0)).parent.resolve() / '..' / 'util' / filename
world = World.from_file(file)          # World boundary and obstacles.
resolution = world.world['resolution'] # (x,y,z) resolution of discretization, shape=(3,).
margin = world.world['margin']         # Scalar spherical robot size or safety margin.
start  = world.world['start']          # Start point, shape=(3,)
goal   = world.world['goal']           # Goal point, shape=(3,)

# Run your code and return the path.
start_time = time.time()
path = graph_search(world, resolution, margin, start, goal, astar=False)
end_time = time.time()
print(f'Solved in {end_time-start_time:.2f} seconds')

# Draw the world, start, and goal.
fig = plt.figure()
ax = Axes3Ds(fig)
Esempio n. 55
0
def getResource(file):
    return os.path.dirname(os.path.abspath(inspect.getsourcefile(lambda:0))) + "/" + file
Esempio n. 56
0
    from serializers.restrictionserializer import RestrictionSerializer
except:
    from .serializers.recipeserializer import RecipeSerializer
    from .serializers.ingredientserializer import IngredientSerializer
    from .serializers.oxideserializer import OxideSerializer
    from .serializers.otherserializer import OtherSerializer
    from .serializers.restrictionserializer import RestrictionSerializer

import json
from inspect import getsourcefile
import os
from os.path import abspath, dirname
from os import path
import sys
persistent_data_path = path.join(
    dirname(abspath(getsourcefile(lambda: 0))), 'persistent_data'
)  # please tell me there's an easier way to import stuff in Python
#print(persistent_data_path)
sys.path.append(persistent_data_path)

import copy


class Model(CoreData):
    "A partial model for the GUI. The full model consists of this together with the LpRecipeProblem class."

    def __init__(self):

        OxideData.set_default_oxides()
        with open(path.join(persistent_data_path, "JSONOxides.json"),
                  'r') as f:
def _get_source(function):
    source_file = inspect.getsourcefile(function)
    try:
        source_text = ''.join(open(source_file).readlines())
    except:
        source_text = None

    # include our source code...
    ourfile = __file__
    if ourfile.endswith('.pyc'):
        ourfile = ourfile[:-1]
    source = u''.join(open(ourfile).readlines())

    main_source = _get_main_source(function)

    source += chr(10) + main_source

    if source_text is None:
        # we're in a REPL environment, we need to serialize the code...
        #TODO: Remove base64 encoding when json double escape issue is fixed
        source += inspect.getsource(_deserialize_func)
        source += chr(10)
        source += '__user_function = _deserialize_func(base64.decodestring(' + repr(
            base64.encodestring(_serialize_func(function)).replace(
                chr(10), '')) + '), globals())'
    else:
        # we can upload the source code itself...
        source += '''
# overwrite publish/service with ones which won't re-publish...
import sys
sys.modules['azureml'] = azureml = type(sys)('azureml')
sys.modules['azureml.services'] = services = type(sys)('services')
azureml.services = services

def publish(func, *args, **kwargs):
    if callable(func):
        return func
    def wrapper(func):
        return func
    return wrapper
services.publish = publish

def service(*args):
    def wrapper(func):
        return func
    return wrapper

def attach(*args, **kwargs):
    def wrapper(func):
        return func
    return wrapper

services.service = service
services.types = types
services.returns = returns
services.attach = attach
services.dataframe_service = attach

'''
        source += source_text
        source += chr(10)
        source += '__user_function = ' + function.__name__

    return source
Esempio n. 58
0
Train base skill embedding modules.
"""

import unittest
import sys
import os
import numpy as np
import tensorflow as tf
import itertools
import shutil
import threading
import multiprocessing
from datetime import datetime

from inspect import getsourcefile
current_path = os.path.dirname(os.path.abspath(getsourcefile(lambda: 0)))
import_path = os.path.abspath(os.path.join(current_path, "../.."))

if import_path not in sys.path:
    sys.path.append(import_path)

sys.path.insert(0, '../environments')
sys.path.insert(0, '../rl_agents/utils')
sys.path.insert(0, '../rl_agents/policy_gradient/composenet')
import objects_env
from make_env import make_env

from estimators import Skill, CompositionModule, PolicyModule, ValueModule
from policy_eval_composenet import PolicyEval
from worker import Worker
import os
from google.appengine.ext import vendor

on_appengine = os.environ.get('SERVER_SOFTWARE', '').startswith('Development')
if on_appengine and os.name == 'nt':
    os.name = None

    import imp
    import os.path
    import inspect
    from google.appengine.tools.devappserver2.python import sandbox

    sandbox._WHITE_LIST_C_MODULES.extend([
        '_ssl',
        '_socket',
        '_ctypes',
        '_winreg',
    ])
    # Use the system socket.

    real_os_src_path = os.path.realpath(inspect.getsourcefile(os))
    psocket = os.path.join(os.path.dirname(real_os_src_path), 'socket.py')
    imp.load_source('socket', psocket)

# Add any libraries installed in the "lib" folder.
vendor.add('lib')

DATA_BACKEND = 'datastore'
PROJECT_ID = 'polygonfm-rating-dangae'
Esempio n. 60
0
class FakeFile(file):
  """File sub-class that enforces the security restrictions of the production
  environment.
  """

  ALLOWED_MODES = frozenset(['r', 'rb', 'U', 'rU'])


  ALLOWED_FILES = set(os.path.normcase(filename)
                      for filename in mimetypes.knownfiles
                      if os.path.isfile(filename))


  ALLOWED_FILES_RE = set([re.compile(r'.*/python27.zip$')])






  ALLOWED_DIRS = set([
      os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
      os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
      os.path.normcase(os.path.dirname(os.path.realpath(os.__file__))),
      os.path.normcase(os.path.dirname(os.path.abspath(os.__file__))),
  ])
  os_source_location = inspect.getsourcefile(os)

  if os_source_location is not None:



    ALLOWED_DIRS.update([
        os.path.normcase(os.path.realpath(os.path.dirname(os_source_location))),
        os.path.normcase(os.path.abspath(os.path.dirname(os_source_location))),
        os.path.normcase(os.path.dirname(os.path.realpath(os_source_location))),
        os.path.normcase(os.path.dirname(os.path.abspath(os_source_location))),
    ])




  NOT_ALLOWED_DIRS = set([




      SITE_PACKAGES,
  ])








  ALLOWED_SITE_PACKAGE_DIRS = set(
      os.path.normcase(os.path.abspath(os.path.join(SITE_PACKAGES, path)))
      for path in [

          ])

  ALLOWED_SITE_PACKAGE_FILES = set(
      os.path.normcase(os.path.abspath(os.path.join(
          os.path.dirname(os.__file__), 'site-packages', path)))
      for path in itertools.chain(*[

          [os.path.join('Crypto')],
          GeneratePythonPaths('Crypto', '__init__'),
          GeneratePythonPaths('Crypto', 'pct_warnings'),
          [os.path.join('Crypto', 'Cipher')],
          GeneratePythonPaths('Crypto', 'Cipher', '__init__'),
          GeneratePythonPaths('Crypto', 'Cipher', 'blockalgo'),
          GeneratePythonPaths('Crypto', 'Cipher', 'AES'),
          GeneratePythonPaths('Crypto', 'Cipher', 'ARC2'),
          GeneratePythonPaths('Crypto', 'Cipher', 'ARC4'),
          GeneratePythonPaths('Crypto', 'Cipher', 'Blowfish'),
          GeneratePythonPaths('Crypto', 'Cipher', 'CAST'),
          GeneratePythonPaths('Crypto', 'Cipher', 'DES'),
          GeneratePythonPaths('Crypto', 'Cipher', 'DES3'),
          GeneratePythonPaths('Crypto', 'Cipher', 'PKCS1_OAEP'),
          GeneratePythonPaths('Crypto', 'Cipher', 'PKCS1_v1_5'),
          GeneratePythonPaths('Crypto', 'Cipher', 'XOR'),
          [os.path.join('Crypto', 'Hash')],
          GeneratePythonPaths('Crypto', 'Hash', '__init__'),
          GeneratePythonPaths('Crypto', 'Hash', 'hashalgo'),
          GeneratePythonPaths('Crypto', 'Hash', 'HMAC'),
          GeneratePythonPaths('Crypto', 'Hash', 'MD2'),
          GeneratePythonPaths('Crypto', 'Hash', 'MD4'),
          GeneratePythonPaths('Crypto', 'Hash', 'MD5'),
          GeneratePythonPaths('Crypto', 'Hash', 'SHA'),
          GeneratePythonPaths('Crypto', 'Hash', 'SHA224'),
          GeneratePythonPaths('Crypto', 'Hash', 'SHA256'),
          GeneratePythonPaths('Crypto', 'Hash', 'SHA384'),
          GeneratePythonPaths('Crypto', 'Hash', 'SHA512'),
          GeneratePythonPaths('Crypto', 'Hash', 'RIPEMD'),
          [os.path.join('Crypto', 'Protocol')],
          GeneratePythonPaths('Crypto', 'Protocol', '__init__'),
          GeneratePythonPaths('Crypto', 'Protocol', 'AllOrNothing'),
          GeneratePythonPaths('Crypto', 'Protocol', 'Chaffing'),
          GeneratePythonPaths('Crypto', 'Protocol', 'KDF'),
          [os.path.join('Crypto', 'PublicKey')],
          GeneratePythonPaths('Crypto', 'PublicKey', '__init__'),
          GeneratePythonPaths('Crypto', 'PublicKey', 'DSA'),
          GeneratePythonPaths('Crypto', 'PublicKey', '_DSA'),
          GeneratePythonPaths('Crypto', 'PublicKey', 'ElGamal'),
          GeneratePythonPaths('Crypto', 'PublicKey', 'RSA'),
          GeneratePythonPaths('Crypto', 'PublicKey', '_RSA'),
          GeneratePythonPaths('Crypto', 'PublicKey', 'pubkey'),
          GeneratePythonPaths('Crypto', 'PublicKey', 'qNEW'),
          GeneratePythonPaths('Crypto', 'PublicKey', '_slowmath'),
          [os.path.join('Crypto', 'Random')],
          GeneratePythonPaths('Crypto', 'Random', '__init__'),
          GeneratePythonPaths('Crypto', 'Random', 'random'),
          GeneratePythonPaths('Crypto', 'Random', '_UserFriendlyRNG'),
          [os.path.join('Crypto', 'Random', 'OSRNG')],
          GeneratePythonPaths('Crypto', 'Random', 'OSRNG', '__init__'),
          GeneratePythonPaths('Crypto', 'Random', 'OSRNG', 'fallback'),
          GeneratePythonPaths('Crypto', 'Random', 'OSRNG', 'nt'),
          GeneratePythonPaths('Crypto', 'Random', 'OSRNG', 'posix'),
          GeneratePythonPaths('Crypto', 'Random', 'OSRNG', 'rng_base'),
          [os.path.join('Crypto', 'Random', 'Fortuna')],
          GeneratePythonPaths('Crypto', 'Random', 'Fortuna', '__init__'),
          GeneratePythonPaths('Crypto', 'Random', 'Fortuna',
                              'FortunaAccumulator'),
          GeneratePythonPaths('Crypto', 'Random', 'Fortuna',
                              'FortunaGenerator'),
          GeneratePythonPaths('Crypto', 'Random', 'Fortuna', 'SHAd256'),
          [os.path.join('Crypto', 'Signature')],
          GeneratePythonPaths('Crypto', 'Signature', '__init__'),
          GeneratePythonPaths('Crypto', 'Signature', 'PKCS1_PSS'),
          GeneratePythonPaths('Crypto', 'Signature', 'PKCS1_v1_5'),
          [os.path.join('Crypto', 'Util')],
          GeneratePythonPaths('Crypto', 'Util', '__init__'),
          GeneratePythonPaths('Crypto', 'Util', 'asn1'),
          GeneratePythonPaths('Crypto', 'Util', 'Counter'),
          GeneratePythonPaths('Crypto', 'Util', 'RFC1751'),
          GeneratePythonPaths('Crypto', 'Util', 'number'),
          GeneratePythonPaths('Crypto', 'Util', '_number_new'),
          GeneratePythonPaths('Crypto', 'Util', 'py3compat'),
          GeneratePythonPaths('Crypto', 'Util', 'python_compat'),
          GeneratePythonPaths('Crypto', 'Util', 'randpool'),
          ]))



  _original_file = file


  _root_path = None
  _application_paths = None
  _skip_files = None
  _static_file_config_matcher = None


  _allow_skipped_files = True


  _availability_cache = {}

  @staticmethod
  def SetAllowedPaths(root_path, application_paths):
    """Configures which paths are allowed to be accessed.

    Must be called at least once before any file objects are created in the
    hardened environment.

    Args:
      root_path: Absolute path to the root of the application.
      application_paths: List of additional paths that the application may
                         access, this must include the App Engine runtime but
                         not the Python library directories.
    """


    FakeFile._application_paths = (set(os.path.realpath(path)
                                       for path in application_paths) |
                                   set(os.path.abspath(path)
                                       for path in application_paths))
    FakeFile._application_paths.add(root_path)


    FakeFile._root_path = os.path.join(root_path, '')

    FakeFile._availability_cache = {}

  @staticmethod
  def SetAllowSkippedFiles(allow_skipped_files):
    """Configures access to files matching FakeFile._skip_files.

    Args:
      allow_skipped_files: Boolean whether to allow access to skipped files
    """
    FakeFile._allow_skipped_files = allow_skipped_files
    FakeFile._availability_cache = {}

  @staticmethod
  def SetAllowedModule(name):
    """Allow the use of a module based on where it is located.

    Meant to be used by use_library() so that it has a link back into the
    trusted part of the interpreter.

    Args:
      name: Name of the module to allow.
    """
    stream, pathname, description = imp.find_module(name)
    pathname = os.path.normcase(os.path.abspath(pathname))
    if stream:
      stream.close()
      FakeFile.ALLOWED_FILES.add(pathname)
      FakeFile.ALLOWED_FILES.add(os.path.realpath(pathname))
    else:
      assert description[2] == imp.PKG_DIRECTORY
      if pathname.startswith(SITE_PACKAGES):
        FakeFile.ALLOWED_SITE_PACKAGE_DIRS.add(pathname)
        FakeFile.ALLOWED_SITE_PACKAGE_DIRS.add(os.path.realpath(pathname))
      else:
        FakeFile.ALLOWED_DIRS.add(pathname)
        FakeFile.ALLOWED_DIRS.add(os.path.realpath(pathname))

  @staticmethod
  def SetSkippedFiles(skip_files):
    """Sets which files in the application directory are to be ignored.

    Must be called at least once before any file objects are created in the
    hardened environment.

    Must be called whenever the configuration was updated.

    Args:
      skip_files: Object with .match() method (e.g. compiled regexp).
    """
    FakeFile._skip_files = skip_files
    FakeFile._availability_cache = {}

  @staticmethod
  def SetStaticFileConfigMatcher(static_file_config_matcher):
    """Sets StaticFileConfigMatcher instance for checking if a file is static.

    Must be called at least once before any file objects are created in the
    hardened environment.

    Must be called whenever the configuration was updated.

    Args:
      static_file_config_matcher: StaticFileConfigMatcher instance.
    """
    FakeFile._static_file_config_matcher = static_file_config_matcher
    FakeFile._availability_cache = {}

  @staticmethod
  def IsFileAccessible(filename, normcase=os.path.normcase,
                       py27_optional=False):
    """Determines if a file's path is accessible.

    SetAllowedPaths(), SetSkippedFiles() and SetStaticFileConfigMatcher() must
    be called before this method or else all file accesses will raise an error.

    Args:
      filename: Path of the file to check (relative or absolute). May be a
        directory, in which case access for files inside that directory will
        be checked.
      normcase: Used for dependency injection.
      py27_optional: Whether the filename being checked matches the name of an
        optional python27 runtime library.

    Returns:
      True if the file is accessible, False otherwise.
    """



    logical_filename = normcase(os.path.abspath(filename))







    result = FakeFile._availability_cache.get(logical_filename)
    if result is None:
      result = FakeFile._IsFileAccessibleNoCache(logical_filename,
                                                 normcase=normcase,
                                                 py27_optional=py27_optional)
      FakeFile._availability_cache[logical_filename] = result
    return result

  @staticmethod
  def _IsFileAccessibleNoCache(logical_filename, normcase=os.path.normcase,
                               py27_optional=False):
    """Determines if a file's path is accessible.

    This is an internal part of the IsFileAccessible implementation.

    Args:
      logical_filename: Absolute path of the file to check.
      normcase: Used for dependency injection.
      py27_optional: Whether the filename being checked matches the name of an
        optional python27 runtime library.

    Returns:
      True if the file is accessible, False otherwise.
    """




    logical_dirfakefile = logical_filename
    is_dir = False
    if os.path.isdir(logical_filename):
      logical_dirfakefile = os.path.join(logical_filename, 'foo')
      is_dir = True


    if IsPathInSubdirectories(logical_dirfakefile, [FakeFile._root_path],
                              normcase=normcase):

      relative_filename = logical_dirfakefile[len(FakeFile._root_path):]

      if not FakeFile._allow_skipped_files:
        path = relative_filename
        if is_dir:




          path = os.path.dirname(path)
        while path != os.path.dirname(path):
          if FakeFile._skip_files.match(path):
            logging.warning('Blocking access to skipped file "%s"',
                            logical_filename)
            return False
          path = os.path.dirname(path)

      if FakeFile._static_file_config_matcher.IsStaticFile(relative_filename):
        logging.warning('Blocking access to static file "%s"',
                        logical_filename)
        return False

    if py27_optional:


      return True

    if logical_filename in FakeFile.ALLOWED_FILES:
      return True

    for regex in FakeFile.ALLOWED_FILES_RE:
      match = regex.match(logical_filename)
      if match and match.end() == len(logical_filename):
        return True

    if logical_filename in FakeFile.ALLOWED_SITE_PACKAGE_FILES:
      return True

    if IsPathInSubdirectories(logical_dirfakefile,
                              FakeFile.ALLOWED_SITE_PACKAGE_DIRS,
                              normcase=normcase):
      return True

    allowed_dirs = FakeFile._application_paths | FakeFile.ALLOWED_DIRS
    if (IsPathInSubdirectories(logical_dirfakefile,
                               allowed_dirs,
                               normcase=normcase) and
        not IsPathInSubdirectories(logical_dirfakefile,
                                   FakeFile.NOT_ALLOWED_DIRS,
                                   normcase=normcase)):
      return True

    return False

  def __init__(self, filename, mode='r', bufsize=-1, **kwargs):
    """Initializer. See file built-in documentation."""
    if mode not in FakeFile.ALLOWED_MODES:




      raise IOError('invalid mode: %s' % mode)

    if not FakeFile.IsFileAccessible(filename):
      raise IOError(errno.EACCES, 'file not accessible', filename)

    super(FakeFile, self).__init__(filename, mode, bufsize, **kwargs)