def test_ancestry(self): top = imp.new_module('top') top.bot = imp.new_module('top.bot') top.bot.end = imp.new_module('top.bot.end') sys.modules['top'] = top sys.modules['top.bot'] = top.bot sys.modules['top.bot.end'] = top.bot.end class P: pass top.bot.P = P P.__module__ = 'top.bot' csf = ContextSuiteFactory() P_ancestors = list([a for a in csf.ancestry(P)]) self.assertEqual(P_ancestors, [top.bot, top]) end_ancestors = list([a for a in csf.ancestry(top.bot.end)]) self.assertEqual(end_ancestors, [top.bot, top]) bot_ancestors = list([a for a in csf.ancestry(top.bot)]) self.assertEqual(bot_ancestors, [top]) top_ancestors = list([a for a in csf.ancestry(top)]) self.assertEqual(top_ancestors, [])
def patch(cls, QtCore, QtGui, QtWidgets, PySide2): """ Patches QtCore, QtGui and QtWidgets :param QtCore: The QtCore module. :param QtGui: The QtGui module. :param QtWidgets: The QtWidgets module. """ qt_core_shim = imp.new_module("PySide.QtCore") qt_gui_shim = imp.new_module("PySide.QtGui") # Move everything from QtGui and QtWidgets unto the QtGui shim since # they belonged there in Qt 4. cls._move_attributes(qt_gui_shim, QtWidgets, dir(QtWidgets)) cls._move_attributes(qt_gui_shim, QtGui, dir(QtGui)) # Some classes from QtGui have been moved to QtCore, so put them back into QtGui cls._move_attributes(qt_gui_shim, QtCore, cls._core_to_qtgui) # Move the rest of QtCore in the new core shim. cls._move_attributes(qt_core_shim, QtCore, set(dir(QtCore)) - cls._core_to_qtgui) cls._patch_QTextCodec(qt_core_shim) cls._patch_QCoreApplication(qt_core_shim) cls._patch_QApplication(qt_gui_shim) cls._patch_QAbstractItemView(qt_gui_shim) cls._patch_QStandardItemModel(qt_gui_shim) cls._patch_QMessageBox(qt_gui_shim) cls._patch_QDesktopServices(qt_gui_shim, qt_core_shim) return qt_core_shim, qt_gui_shim
def SetupWinEnvironment(self): """Mock windows includes.""" pywintypes = imp.new_module("pywintypes") pywintypes.error = Exception sys.modules["pywintypes"] = pywintypes winfile = imp.new_module("win32file") winfile.GetVolumeNameForVolumeMountPoint = GetVolumeNameForVolumeMountPoint winfile.GetVolumePathName = GetVolumePathName sys.modules["win32file"] = winfile sys.modules["_winreg"] = imp.new_module("_winreg") sys.modules["ntsecuritycon"] = imp.new_module("ntsecuritycon") sys.modules["win32security"] = imp.new_module("win32security") sys.modules["win32api"] = imp.new_module("win32api") sys.modules["win32service"] = imp.new_module("win32service") sys.modules["win32process"] = imp.new_module("win32process") sys.modules["win32serviceutil"] = imp.new_module("win32serviceutil") sys.modules["winerror"] = imp.new_module("winerror") # Importing process.py pulls in lots of Windows specific stuff. # pylint: disable=g-import-not-at-top from grr_response_client import windows windows.process = None
def load( self, code, context={} ): """Using an optional ``context``, a dictionary of key-value pairs, and the code object obtained from :meth:`compilettl` method, create a new module instance populating it with ``context`` and some standard references.""" from tayra.runtime import Namespace import tayra.h as tmplh # Module instance for the ttl file module = imp.new_module( self.modulename ) # Create helper module helper = imp.new_module( 'template_helper' ) filterfn = lambda k, v : callable(v) [ helper.__dict__.update( pynamespace(m) ) for m in [tmplh] + self['helpers'] ] ctxt = { self.igen.machname : self.mach, '_compiler' : self, 'this' : Namespace( None, module ), 'local' : module, 'parent' : None, 'next' : None, 'h' : helper, '__file__' : self.pyfile, '_ttlfile' : self.ttlfile, '_ttlhash' : self.ttlloc.ttlhash, } ctxt.update( context ) ctxt['_context'] = ctxt module.__dict__.update( ctxt ) # Execute the code in module's context sys.modules.setdefault( self.modulename, module ) exec( code, module.__dict__, module.__dict__ ) return module
def load(self): # create assignment module if self.module_name: module = imp.new_module(self.module_name) else: module = imp.new_module('assignment') # add dependencies for dependency in self.assignment.dependencies: module.__dict__[dependency.__name__] = dependency # execute assignment module try: exec(self.assignment.source_code, module.__dict__) except Exception as err: self.assignment.reporter.onRuntimeError(err) return # create unit test module ut = imp.new_module('assignment_unittest_module') if self.module_name: ut.__dict__[self.module_name] = module else: for name, value in inspect.getmembers(module, inspect.isfunction): ut.__dict__[name] = value with open(self.unittest_path) as fin: exec(fin.read(), ut.__dict__) for name, obj in inspect.getmembers(ut, inspect.isclass): if issubclass(obj, unittest.TestCase): self.cases.append(obj)
def getSpaceFor(self, acl, module): """return the appropiate space, or returns a new one if it hasn't been assigned to yet. """ # do the logic to make the session if none, the module if none. # this also reloads the session from the zodb machinery, if any! if self.spaces.has_key(acl.sessid): if not self.spaces[acl.sessid].has_key(module): self.spaces[acl.sessid][module] = imp.new_module("space") space = self.spaces[acl.sessid][module] # This is needed to decide what init block have been run, may be filled on session reload space.INITSPACETRACKER = {} # This records which automatically generated ID numbers the session had been using space.AUTOGENTABLE = {} else: space = self.spaces[acl.sessid][module] else: # if session doesn't exist, make the session, then module. self.spaces[acl.sessid] = {module: imp.new_module("space")} space = self.spaces[acl.sessid][module] # This is needed to decide what init block have been run, may be filled on session reload space.INITSPACETRACKER = {} # This records which automatically generated ID numbers the session had been using space.AUTOGENTABLE = {} return space
def django_exceptions(): import imp, sys if 'django' in sys.modules: django = sys.modules['django'] else: django = imp.new_module('django') core = imp.new_module('core') exceptions = imp.new_module('exceptions') class ImproperlyConfigured(Exception): pass exceptions.__dict__.update({'ImproperlyConfigured': ImproperlyConfigured}) core.__dict__.update({'exceptions': exceptions}) django.__dict__.update({'core': core}) sys.modules.update({ 'django': django, 'django.core': core, 'django.core.exceptions': exceptions }) yield del_keys(sys.modules, 'django', 'django.core', 'django.core.exceptions')
def django_staticfiles(): import imp, sys if 'django' in sys.modules: django = sys.modules['django'] else: django = imp.new_module('django') contrib = imp.new_module('contrib') staticfiles = imp.new_module('staticfiles') @classmethod def raiser(*args, **kwargs): raise TestException staticfiles.__dict__.update({ 'finders': type('find', (object,), { 'find': raiser, }) }) contrib.__dict__.update({'staticfiles': staticfiles}) django.__dict__.update({'contrib': contrib}) sys.modules.update({ 'django': django, 'django.contrib': contrib, 'django.contrib.staticfiles': staticfiles }) yield del_keys(sys.modules, 'django', 'django.contrib', 'django.contrib.staticfiles')
def load_module(self, fullname): imp.acquire_lock() try: dprint('loading module {}'.format(fullname)) if fullname in sys.modules: return sys.modules[fullname] mod=None c=None if self.extension=="py": mod = imp.new_module(fullname) mod.__name__ = fullname mod.__file__ = '<memimport>/{}'.format(self.path) mod.__loader__ = self if self.is_pkg: mod.__path__ = [mod.__file__.rsplit('/',1)[0]] mod.__package__ = fullname else: mod.__package__ = fullname.rsplit('.', 1)[0] sys.modules[fullname]=mod code = compile(self.contents, mod.__file__, "exec") exec code in mod.__dict__ elif self.extension in ["pyc","pyo"]: mod = imp.new_module(fullname) mod.__name__ = fullname mod.__file__ = '<memimport>/{}'.format(self.path) mod.__loader__ = self if self.is_pkg: mod.__path__ = [mod.__file__.rsplit('/',1)[0]] mod.__package__ = fullname else: mod.__package__ = fullname.rsplit('.', 1)[0] sys.modules[fullname]=mod c=marshal.loads(self.contents[8:]) exec c in mod.__dict__ elif self.extension in ("dll","pyd","so"): initname = "init" + fullname.rsplit(".",1)[-1] path=fullname.replace(".",'/')+"."+self.extension dprint('Loading {} from memory'.format(fullname)) dprint('init:{}, {}.{}'.format(initname,fullname,self.extension)) mod = _memimporter.import_module(self.contents, initname, fullname, path) mod.__name__=fullname mod.__file__ = '<memimport>/{}'.format(self.path) mod.__loader__ = self mod.__package__ = fullname.rsplit('.',1)[0] sys.modules[fullname]=mod except Exception as e: if fullname in sys.modules: del sys.modules[fullname] import traceback exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback) dprint('PupyPackageLoader: ' 'Error while loading package {} ({}) : {}'.format( fullname, self.extension, str(e))) raise e finally: imp.release_lock() mod = sys.modules[fullname] # reread the module in case it changed itself return mod
def _import(self, full_name, full_path): parent = full_name.rsplit('.', 1)[0] if parent not in sys.modules: sys.modules[parent] = imp.new_module(parent) sys.modules[full_name] = imp.new_module(full_name) sys.modules[full_name].__file__ = full_path with open(full_path, 'r') as mfd: exec mfd.read() in sys.modules[full_name].__dict__
def loadPluginsFixed(self, excludeFiles): """load and register plugins with apis. The directories in the list directories are searched in order for all files ending with .py or all directories. These are assumed to be possible plugins for WikidPad. All such files and directories are loaded as modules and if they have the WIKIDPAD_PLUGIN variable, are registered. Files and directories given in exludeFiles are not loaded at all. Also directories are searched in order for plugins. Therefore plugins appearing in earlier directories are not loaded from later ones.""" import imp exclusions = excludeFiles[:] for dirNum, directory in enumerate(self.directories): sys.path.append(os.path.dirname(directory)) if not os.access(mbcsEnc(directory, "replace")[0], os.F_OK): continue files = os.listdir(directory) if dirNum == self.systemDirIdx: packageName = "wikidpadSystemPlugins" else: packageName = "cruelimportExtensionsPackage%i_%i" % \ (id(self), dirNum) package = imp.new_module(packageName) package.__path__ = [directory] sys.modules[packageName] = package for name in files: try: module = None fullname = os.path.join(directory, name) ( moduleName, ext ) = os.path.splitext(name) if name in exclusions: continue if os.path.isfile(fullname): if ext == '.py': with open(fullname, "rb") as f: module = imp.load_module(packageName + "." + moduleName, f, fullname, (".py", "r", imp.PY_SOURCE)) elif ext == '.zip': module = imp.new_module( packageName + "." + moduleName) module.__path__ = [fullname] module.__zippath__ = fullname sys.modules[packageName + "." + moduleName] = module zi = zipimporter(fullname) co = zi.get_code("__init__") exec(co, module.__dict__) if module: setattr(package, moduleName, module) if hasattr(module, "WIKIDPAD_PLUGIN"): self.registerPlugin(module) except: traceback.print_exc() del sys.path[-1]
def load_module(self, fullname): imp.acquire_lock() try: #print "loading module %s"%fullname if fullname in sys.modules: return sys.modules[fullname] mod=None c=None if self.extension=="py": mod = imp.new_module(fullname) mod.__name__ = fullname mod.__file__ = ("<memimport>/%s" % self.path).replace("/",sep) mod.__loader__ = self if self.is_pkg: mod.__path__ = [mod.__file__.rsplit(sep,1)[0]] mod.__package__ = fullname else: mod.__package__ = fullname.rsplit('.', 1)[0] sys.modules[fullname]=mod code = compile(self.contents, mod.__file__, "exec") exec code in mod.__dict__ elif self.extension in ["pyc","pyo"]: mod = imp.new_module(fullname) mod.__name__ = fullname mod.__file__ = ("<memimport>/%s" % self.path).replace("/",sep) mod.__loader__ = self if self.is_pkg: mod.__path__ = [mod.__file__.rsplit(sep,1)[0]] mod.__package__ = fullname else: mod.__package__ = fullname.rsplit('.', 1)[0] sys.modules[fullname]=mod c=marshal.loads(self.contents[8:]) exec c in mod.__dict__ elif self.extension in ("dll","pyd","so"): initname = "init" + fullname.rsplit(".",1)[-1] path=fullname.replace(".","/")+"."+self.extension #print "Loading %s from memory"%fullname #print "init:%s, %s.%s"%(initname,fullname,self.extension) mod = _memimporter.import_module(self.contents, initname, fullname, path) mod.__name__=fullname mod.__file__ = ("<memimport>/%s" % self.path).replace("/",sep) mod.__loader__ = self mod.__package__ = fullname.rsplit('.',1)[0] sys.modules[fullname]=mod except Exception as e: if fullname in sys.modules: del sys.modules[fullname] import traceback exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback) print "PupyPackageLoader: Error while loading package %s (%s) : %s"%(fullname, self.extension, str(e)) raise e finally: imp.release_lock() mod = sys.modules[fullname] # reread the module in case it changed itself return mod
def setUpModule(): """Install NewClassDefinition into its proper submodule.""" mod = imp.new_module('mod') mod.submod1 = imp.new_module('submod1') mod.submod1.submod2 = imp.new_module('submod2') sys.modules['mod'] = mod sys.modules['mod.submod1'] = mod.submod1 sys.modules['mod.submod1.submod2'] = mod.submod1.submod2 mod.submod1.submod2.NewClass = NewClassDefinition
def init_app_package(self, app_name): module = imp.new_module(frontik.magic_imp.gen_module_name(app_name)) sys.modules[module.__name__] = module pages_module = imp.new_module(frontik.magic_imp.gen_module_name(app_name, "pages")) sys.modules[pages_module.__name__] = pages_module module.config = self.importer.imp_app_module(app_name, "config") return module
def SetupWinEnvironment(self): """Mock windows includes.""" winreg = imp.new_module("_winreg") winreg.error = exceptions.Exception sys.modules["_winreg"] = winreg ntsecuritycon = imp.new_module("ntsecuritycon") sys.modules["ntsecuritycon"] = ntsecuritycon pywintypes = imp.new_module("pywintypes") pywintypes.error = Exception sys.modules["pywintypes"] = pywintypes winfile = imp.new_module("win32file") winfile.GetVolumeNameForVolumeMountPoint = GetVolumeNameForVolumeMountPoint winfile.GetVolumePathName = GetVolumePathName sys.modules["win32file"] = winfile win32security = imp.new_module("win32security") sys.modules["win32security"] = win32security win32api = imp.new_module("win32api") sys.modules["win32api"] = win32api win32service = imp.new_module("win32service") sys.modules["win32service"] = win32service win32serviceutil = imp.new_module("win32serviceutil") sys.modules["win32serviceutil"] = win32serviceutil winerror = imp.new_module("winerror") sys.modules["winerror"] = winerror
def main(): parser = argparse.ArgumentParser() pymake_args = { 'D': dict(action="store_true", default=False, help='Debug output.'), 'cold': dict(action="store_true", default=False, help="Cold run shell output."), } for k in pymake_args: pymake_args[k]['dest'] = "pymake_" + k arg = '-'*(1 if len(k)==1 else 2) + k parser.add_argument(arg, **pymake_args[k]) subparsers = parser.add_subparsers(dest="subparser") command_mod = imp.new_module('command_mod') dummy_mod = imp.new_module('dummy_mod') sys.modules['command_mod'] = command_mod pkg_path = os.path.dirname(pymake.__file__) api_code = compile("from pymake.api import *", pkg_path+"/api.py", "exec") exec(api_code, dummy_mod.__dict__) exec(api_code, command_mod.__dict__) pymake_file = find_pymake_file() if not pymake_file: print 'No Pymake file found. Exiting...' return with open(pymake_file) as f: code = f.read() user_code = compile(code, pymake_file, "exec") exec(user_code, command_mod.__dict__) functions = [f for f in dir(command_mod) if f not in dir(dummy_mod)] for f in sorted(functions): add_function(subparsers, command_mod, f) if len(sys.argv) < 2: args = parser.parse_args([env.DEFAULT_ACTION]) else: args = parser.parse_args() getLogger().setLevel(level=DEBUG if args.pymake_D else INFO) env.COLD = args.pymake_cold debug("Tasks found: %s", sorted(rules.keys())) debug("Passed command line arguments: %s", args) command, kwargs = pop_pymake_args(args, pymake_args) task = rules[command] task.run(kwargs)
def patch_fake_ryu_client(): ryu_mod = imp.new_module('ryu') ryu_app_mod = imp.new_module('ryu.app') ryu_mod.app = ryu_app_mod ryu_app_mod.client = fake_ryu_client ryu_app_mod.rest_nw_id = fake_rest_nw_id return mock.patch.dict('sys.modules', {'ryu': ryu_mod, 'ryu.app': ryu_app_mod, 'ryu.app.client': fake_ryu_client, 'ryu.app.rest_nw_id': fake_rest_nw_id})
def test_imp_new_module(self): x = imp.new_module('abc') sys.modules['abc'] = x x.foo = 'bar' import abc self.assertEqual(abc.foo, 'bar') y = imp.new_module('\r\n') sys.modules['xyz'] = y y.foo = 'foo' import xyz self.assertEqual(xyz.foo, 'foo')
def _new_module(in_package, name, is_package=False): if not in_package: m = imp.new_module(name) else: m = imp.new_module(in_package.__name__ + '.' + name) if is_package: m.__package__ = m.__name__ # surprisingly not the parent name else: m.__package__ = in_package.__name__ if is_package: m.__path__ = [m.__name__.replace('.', '/')] return m
def test_imp_new_module(): x = imp.new_module('abc') sys.modules['abc'] = x x.foo = 'bar' import abc AreEqual(abc.foo, 'bar') y = imp.new_module('\r\n') sys.modules['xyz'] = y y.foo = 'foo' import xyz AreEqual(xyz.foo, 'foo')
def _import(self, full_name, full_path): # create parents as necessary parents = full_name.split('.')[2:] # skip mailpile.plugins module = "mailpile.plugins" for parent in parents: module = '%s.%s' % (module, parent) if module not in sys.modules: sys.modules[module] = imp.new_module(module) # load actual module sys.modules[full_name] = imp.new_module(full_name) sys.modules[full_name].__file__ = full_path with open(full_path, 'r') as mfd: exec mfd.read() in sys.modules[full_name].__dict__
def prepare_modules(self): if self.distribution.has_pure_modules(): self.run_command('build_py') if self.distribution.has_ext_modules(): self.run_command('build_ext') if self.distribution.config_module: from Ft.Lib.DistExt.InstallConfig import METADATA_KEYS if self.distribution.config_module not in sys.modules: module = imp.new_module(self.distribution.config_module) sys.modules[self.distribution.config_module] = module else: module = sys.modules[self.distribution.config_module] for name in METADATA_KEYS: value = getattr(self.distribution, 'get_' + name)() setattr(module, name.upper(), value) # Add the build directory to the search path (sys.path). sys.path.insert(0, self.build_lib) # Enable importing of modules in namespace packages. for package in self.distribution.namespace_packages: packages = [package] while '.' in package: package = '.'.join(package.split('.')[:-1]) packages.insert(0, package) for package in packages: path = os.path.join(self.build_lib, *package.split('.')) if package not in sys.modules: module = sys.modules[package] = imp.new_module(package) module.__path__ = [path] else: module = sys.modules[package] try: search_path = module.__path__ except AttributeError: raise DistutilsSetupError("namespace package '%s' is" " not a package" % package) search_path.insert(0, path) # Any packages that are already imported also need their # search path (__path__) adjusted. for name in self.modules: if name in sys.modules: search_path = getattr(sys.modules[name], '__path__', None) if search_path is not None: path = os.path.join(self.build_lib, *name.split('.')) search_path.insert(0, path) return
def blockRecurse(path,modulename): if os.path.exists(path): if debug: print "Dummying "+modulename sys.modules[modulename] = imp.new_module(modulename) contents = os.listdir(path) for entry in contents: entryPath = path+"/"+entry if entry.endswith(".py") and os.path.isfile(entryPath): entryModulename = modulename+"."+entry[:len(entry)-3] if debug: print "Dummying "+entryModulename sys.modules[entryModulename] = imp.new_module(entryModulename) elif os.path.isdir(entryPath): if debug: print "Recursing into "+entryPath blockRecurse(entryPath,modulename+"."+entry)
def __init__( self ): import imp # Set attribute 'slicer.app' setattr( slicer, 'app', _qSlicerCoreApplicationInstance ) # Listen factory and module manager to update slicer.{modules, moduleNames} when appropriate moduleManager = slicer.app.moduleManager() # If the qSlicerApplication is only minimally initialized, the factoryManager # does *NOT* exist. # This would be the case if, for example, a commandline module wants to # use qSlicerApplication for tcl access but without all the managers. # Note: This is not the default behavior. if hasattr( moduleManager, 'factoryManager' ): factoryManager = moduleManager.factoryManager() factoryManager.connect( 'allModulesRegistered()', self.setSlicerModuleNames ) moduleManager.connect( 'moduleLoaded(qSlicerAbstractCoreModule*)', self.setSlicerModules ) # Add module 'slicer.moduleNames' _moduleNames = imp.new_module( 'moduleNames' ) setattr( slicer, _moduleNames.__name__, _moduleNames ) # Add module 'slicer.modules' _modules = imp.new_module( 'modules' ) setattr( slicer, _modules.__name__, _modules ) # Retrieve current instance of the scene and set 'slicer.mrmlScene' setattr( slicer, 'mrmlScene', slicer.app.mrmlScene() ) # HACK - Since qt.QTimer.singleShot is both a property and a static method, the property # is wrapped in python and prevent the call to the convenient static method having # the same name. To fix the problem, let's overwrite it's value. # Ideally this should be fixed in PythonQt itself. def _singleShot( msec, receiverOrCallable, member=None ): """Calls either a python function or a slot after a given time interval.""" # Add 'moduleManager' as parent to prevent the premature destruction of the timer. # Doing so, we ensure that the QTimer will be deleted before PythonQt is cleanup. # Indeed, the moduleManager is destroyed before the pythonManager. timer = qt.QTimer( slicer.app.moduleManager() ) timer.setSingleShot( True ) if callable( receiverOrCallable ): timer.connect( "timeout()", receiverOrCallable ) else: timer.connect( "timeout()", receiverOrCallable, member ) timer.start( msec ) qt.QTimer.singleShot = staticmethod( _singleShot )
def load_module(self, fullname): source = self.get_source(fullname) if fullname in sys.modules: print 'reusing existing module from previous import of "%s"' % fullname mod = sys.modules[fullname] else: print 'creating a new module object for "%s"' % fullname mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) # Set a few properties required by PEP 302 mod.__file__ = self._get_filename(fullname) mod.__name__ = fullname mod.__path__ = self.path_entry mod.__loader__ = self mod.__package__ = '.'.join(fullname.split('.')[:-1]) if self.is_package(fullname): print 'adding path for package' # Set __path__ for packages # so we can find the sub-modules. mod.__path__ = [ self.path_entry ] else: print 'imported as regular module' print 'execing source...' exec source in mod.__dict__ print 'done' return mod
def load_module(self, name): module = imp.new_module(name) exec self.current_module_code in module.__dict__ sys.modules[name] = module return module
def load_module(file_path): """ Load a Python source file containing user code. @type file_path: string @param file_path: file path @return: Loaded Python module """ try: return cache_modules[file_path] except KeyError: pass module = imp.new_module(WSCRIPT_FILE) try: code = Utils.readf(file_path, m='rU') except (IOError, OSError): raise Errors.WafError('Could not read the file %r' % file_path) module_dir = os.path.dirname(file_path) sys.path.insert(0, module_dir) exec(compile(code, file_path, 'exec'), module.__dict__) sys.path.remove(module_dir) cache_modules[file_path] = module return module
def Load_Module(fileName): """ Load module without load_module from importer. \ In this way, we can change the name of module in the built-in. """ ### import zipfile model if zipfile.is_zipfile(fileName): importer = zipimport.zipimporter(fileName) ### change module name old_plugin_name = 'plugins' new_plugin_name = '%s.%s'%(os.path.basename(os.path.splitext(fileName)[0]), old_plugin_name) ### get code of plug-ins code = importer.get_code(old_plugin_name) # Create the new 'temp' module. temp = imp.new_module(new_plugin_name) sys.modules[new_plugin_name] = temp ### there is syntax error ? try: exec code in temp.__dict__ except Exception, info: return info return sys.modules[new_plugin_name]
def _import_module(module_name, loaded=None): """ Import the module. Import the module and track which modules have been loaded so we don't load already loaded modules. """ # Pull in built-in and custom plugin directory if module_name.startswith("bh_modules."): path_name = join("Packages", "BracketHighlighter", normpath(module_name.replace('.', '/'))) else: path_name = join("Packages", normpath(module_name.replace('.', '/'))) path_name += ".py" if loaded is not None and module_name in loaded: module = sys.modules[module_name] else: module = imp.new_module(module_name) sys.modules[module_name] = module exec( compile( sublime.load_resource(sublime_format_path(path_name)), module_name, 'exec' ), sys.modules[module_name].__dict__ ) return module
def make_module(name, objects): name = name.lower() module = imp.new_module(name) module._name = name.split('.')[-1] module._objects = objects module.__dict__.update((o.__name__, o) for o in objects) return module
def load_module(self, name): module = imp.new_module(name)
('util.commands', 'eJzdWW1v2zgS/u5fwXPQs9x1laDFvSBA9pDdJnfBtkkucS9XtIEgS+OYG4n0kVRc76+/GZKSKPkl\n2T3slzPQOhLJ4bw888yQHg6H55XIDJdCs7lUTFVCcPHAMlmWqcj1cDgc8HIplWG6mi2VzEDr+o1s\n/jK8hPrvZZEaFFXWz4V8eECRA/xmJ/VT/ADmA/4JKkoSkZaQJOPBwKj18YDhxy9dcfHu7ZwXsPEy\nXXL77vrz3cXlu7coeKoqGMC3DJaGXdiZZ0pJddybdp4WGgaDQQ5z0iXJyjzCfxP2+vXjKlUPeuxW\nHLBslTOumV5CxtOCccHMIsXtgaXFKl1rtkqFYRwNVlwYQBHwBILxOb4baSak8YLg27LgGTfFmmUL\nqUHY92431Mj9EWdyuY7GztA5G+HuI5JB+7oZTq926Rc75x4lSE3uxCe/Hu2KuZjLaOjDeMxup6c3\n0+HO4Vd6yF4FEY4Lrs1b9EvBBZB/xm4pQeQR1hP2lBYVtLrF3IDCf6WOxq2eWzeym02cFG1UZCWh\neBeSEtQDJCCeIvznRQlY0RtnKP7BlRShu/x4XC3z1IBdaN8rMJUS9bDfAAG+M+YI9ptKMBxiUcrI\nBUzOGU6oShBGj2PGblKuIXTUj2lRQH7tniziMHxWmllAnUYIAW4QMNwsMKbizS+gJAq7mHcmOX0R\ncVVGwuZVUawnoSVHMaWj9+wWKzze7oA5V6B0BHA6x9jUecdmkKUVmoAwzqUYGYdiNIJMJW24WNhQ\n5jV60fNPqdKsrHKCwwMKtxNlZZaVaQCL80b7wErjBNY2wp0Rp3xDAPYBZxOxxPSfj/UOWDldjoft\nJO+yIFLZArLHJENTt7nNM8feyG5B9qhR4ezm5upmNCFBCQ2dECEF+hBwXA5xgZIDO6FIlxryrrXs\nDTP7LD67fM+iV/Hbua7Xj78KzKv6IYD7fyoOZifoc3gSiDTKriWICFPMv5mw0WrUyaQ9ztQmxxic\nNEvxGZRqn1tnt3opKKWBVjEN6gnUhCE8FZWEk0spAF/rxU+wbh9ORvjfaNI4J/j0TEOpyVLBnH9D\n677gqvsarfUWbRDauTF8MyDy6MvoTTFqtblvuNkp9MxSjkvRl8vULPDtEmNGgiK3duyFBSvT5ZJW\nOh80W3HNhTapyMC5aJZqQNLELBx39if78Os+jFbAdLUXvmM95Hc4MVli4sucZ8lS1nHFedQPJFTh\nFFL1ybujowmj8fbVUfz2T1vD4T+1DELLLM0efSh/JfkSt6QBBBlRpoUhI27FxFgWQI2MlVabQpn2\nYtrepGwr67fQdkvZg20uYHPfdaFwzL0ZSrMKub1I+hxdLFdEt40LvIYOOW5z7DPgG2SVFWXSR9DI\nFQK7KpooNqLXYgZBpUxCVNNQBoYV3VHH4v+6zDxbQcgTKCQAzLVlxy2OaD25pVwVqUbmtSA9CWYO\nHCgW2NnavrU1Q9G2tGdsc3A8aEbQeBzktrFklEHHnZQjk3KYVQ/R0KPaQxBBZRsulY07C5y8kxN2\ndLyRu7sqUmBBf8lvKVF9GXXOdAYA+/VNDdXzCR2pbEJ0EvhQyNWOngK9QYNvwoh9vyd/6HOACmsw\n4RIjWfokeY6nhrQs7UHKZ3w3WCEscN+ewbXznUY7nI4a91ll000BKshBpNBOKqLGPHqlx3gS2EPm\nUX/9JFBwvBnTTkcXfvpyop2UtCnUN2tn9otU37oDGQ8WCdZ4a6zFTY61w8vAxRPGH4SkmhrH8XBf\nIfNbb2vv7NBWpJIW3lbUoykuNWljQiNvU2Aa4k7FcK8Swz4sMcvy8TNrJvWeWyDwzNJbCgw5zRBE\nmuDgA+U2HRyjvkbPefH5T4CG/1lWTTgBE1gO0AXAMuo0M3VLhOfpxJUEx/lcZEWVQ+L7WnuLMKHS\nZhIMcP38a1uatn0ISp3rMLobuvKHPQaYurduOgc/M3c3FLUU7D7xQa2IJrlpJmvcGFmqPaASbSps\nI7xQbC4hLWPnqDsXVXfvsZYV0wtZFTmVc6rttuw3jQxSX5Yu0RbANq1AI/G7lJUgm600pxeLvsfx\nOaxwuaw0eWC2NqDHk0bNHNK8kNljc9rlfXeEfYxVu1Oqb6fvrz5N3amuk5LNZCqfg+c6nN/nUOu9\ncMKGbdbtOuju7UL8iSscvLg+a05e7uv53OnaXO+KjMVNoEmjtR10W8eIlLxbQu2oA3Qmc2B/2Ogu\nXlK3e1J8EQ+2oQ6oTr3NLujZq4HORDe8cW8QdJ0vuRlAUmwVOWAfsRPHBQpc6njvufxl0qVpU7za\ne4C4cXOwfeu13+X6YP/tAZ7QnyChQ2xE/7W8NqXcp64f5yyLNANiNHs9qBdYZIpYlcgk3v6VVI8a\n2cfQCaESCEx/rhK5XOmYTbHk4QRkkB8gVVhnrIOubk/PrUR32MrBHaWiHyR6fIUGz5Us2aziRT6T\nBsk8fYK4vrceB0eYugO6IWuIz2w/bO0Z1JmecJ14fbbfYH7StDJxZtVTGXUMLXZ6o85lPWQ1OxKI\n2wsCrA06dLHDkfUyOicv8GA3U/IRz3TYxD3qMBtqIVzTUF8IfXCGi+R+jfYLeomQA/YvPNTN1zZk\nOVeQGanWhBPiisMVHfgOXR8CbWgrpQg8dD8y8Dtli1LmdqMJO/rL0ZEPFC2huxiiZOkuqXGXvqZ0\nAre/KbgbY2vTz5ILL49GxoGTMR/vXMAmtqmuT6wLxBOzKtNtQsm1tud1qpk07JwRyLGndjzRHbaG\nA6cajJwsmS/yxAaiFz2n6gkbCTPqBq6FSWrvFqLGNHu5dJdc/TTe7DgP2AXVZvHoKrQ9Mq5Q3xxT\nD0/hE8wZg1MCK7EdvpxukVOmGcoBykws0aS6teViVLIHaTsDyQogCdz+UGGZYIucN9Qf+uj2gOki\nHdh19Ocm3Bu4pGA3U3uWh1zVzglYst+cH7D31gNYnm3zQor0sqsbgzA5dmmx0yoL4t4sn089bWmg\nbGCNTHwQspPtGfs0RDc/AudZRizlLwtyt9aOxLdQm15rAyWVc/9bXezetL8/+RkY02joswM5c/iR\nZ0pqOTfDwG5fMu0PcJ3lsW3iNd1p4dHn89/vLi6fWbczG8K53qxtZNvUpzql39if7+Y8Y2FBqimV\n1iCAxYNZ6PD8xT6e/ju5Pp3+I24UuJb2DGQ9nBVyNgMFKl6u486FWaqRxEzX5e5CiXZq6QjpsGir\nquM2QoGfNvqKn799/Tpi39mVe2pGs2zDseEi//vncZhWXVRv4dHA7/Vd8iiHgh2es8N/siFW0RGe\n/brVYDPN+hIsttnh7XYZYe/UKSBExOnM/xLc/C4c34I5x+9TYxRHWgN9F/WdNwmmn198OEtOp9Ob\nix8+Tc+Sy6ubj6cf6p1v8ZABjuDxFOLwgp2UvZJNLbUT+5VAHZbeFhLnxf7+m4hv9XkPBRggCzaX\ntSVvPkdHUC7WP33H5wguWqU3luEXvnodvx6FFRGnJin6CLFlhX05um8vxVyldO//et+BSJ2L8YjV\npdc+xr1ClWE3zkXVcv+LanC4VaviH3fH6/3FzdmP06ubz93d+1TwIvp/MYYCFn8RkDY32BHlnprt\nfNuowvsa/lug8V+mJBic\n' ), ('util.retry', 'eJytVk2P2zYQvetXDFwsLDuC4C2wORhxsUHQFgWKnHqXaYmyiUqkQ1LxGkX/e2dIivpy0h6qw1oa\nDh9nHt/MjmivSluwouVJrVULdSdLq1RjQPilm2ZX49dKJS1/s4049YvB0jLJzlwnwdqo81nIc4K/\ncOi/8jO3v+Mr12lRSNbyotgkSVLxGjS3+p6y0golM2DW8vZqzeElA9NwfqXgDu93GbTsrRgsL7AF\ntCYQH4dT8LeSPJQ0h/Tn/j3bZFA2nMnuevisJMdj9Bkd0Pznzb3+9fdm77BWq9Un1jRw9AGtgdHB\nou1aUDVaQ3hrR5qBTlrRgLBgurLkvDJDRJgb6xqLyYNV8JLDMUa/BmHAXjjIrj1xTciGI5uVIdcb\nEzainLi9cS4jL9kM9/0OmKygUt2pIRNn5cVT0W/J0C3CTbOZULrOAY5zEl2kDGx3bThuiTiRWsqD\nYfoX1TUVRgsl684Xm8NvNQwwoDBbTa4S/yjDI1AjjOUVCPnobKY5aCYMOjgJ9peSEXl3uAm8qNOA\nFVxF2/JKMMubuwvjGK7e5XLV6quo0ItYK/Gm2QkzwwsksBHrbm0KBqy2mASmELMnxD7hz4pU1bVc\nWhOBQohwZYZCwwsTnpu76nSvSV92BKf5l05o1NUSCUPEwzTKBCOSlIEjHnFckbp1ScH1WxtuTETO\nI86R9L526R+9+D3P/SU7NYnSkkBiFBQ4pQBY8YOY0HjsKVxj4bgFSpR6Q7CHwt6M16SyMXWlB9dg\n876inlY8fBj6wX6QjzrnFT9153Q19X6qwBHgJDc2r+AJ0lHbgOkxo66z8YFI7GLP7u12EUiQhA+H\nWI5DJKjd/QSWQhOyVunKCXsP1FeoRJ8MysJeXA/a41ffhPz7agISn1U4EX4IKfQN01id0u6Nf/VQ\n+CFD+LE4uO00qsNtS7fklcF2G/yjqy+/RTNdphZYj7lREQwVv4dVRl8FMXD4Q3d8Gg3ebrjt/SLf\nsJAuduBNPGL+m4T/Kr4S36QyidwSbWM1Ttih1jE/b5DNT7D7D+f9wlAfVVCQu+kq9vUTrxV1M/LE\nJYzl8T3TMyhw4UPW3K2n3/EaAj+M3rfw48JzluWkFJYZz7En7hNvGg2E7AZjLSTKf1YiEt5RbQ1z\ngHB9YOvV10vUfwWheoD1eg0f8T9hqTSz2EKQ2zBHbHLszqylTtYZHEu8/+sA7tmiA2ulRhrL8zyZ\n+8Zh5Hm3G48jz7sB5cR0utlPYEKESfQpImRRowIVxkmNebTt1Q1a3jqeIMZbyeWKA9S8dveP6tyz\nQXhh2PGbwrjjfxBjxPS39Ti7gmR21DLE5PFqyB3v+3U2OsY5EEsjBP3vIlhwFlEKYb/D0v/M0CN2\n7oLjNNTHkvwDPQB6iA==\n' ), ('util.git', 'eJzNW+uT27YR/66/ApF7IymWeEk/Xuam4/iReJrGntiZdMZ2JEoEJcQUIRPgyddM/vfuAyDAh+S7\nNkmrGVsiCSx2F7u/fRA3Ho+f1eXGKl0aketKqNLKKoUb5VYcld2J3XY8Ho/U/qArK7Txv0y9PlR6\nI01zp66KQ1oZGV0Xau2vKjka5ZXei9qqItno/T4tMyP807pcbvbZHIbt9Y1cHlK7m9PdD7WSFp9F\ns3NVSD/TpLlc1mWhyvcjv1aht1vgfwTf4tpfJVtpv4Ofspoul2W6l8vlbDQabYrUGPFE5mld2Fe7\ntJJfp0ZejQR8DvBo1H0EFLu3pkgok7lY7tP3cpmujS5qK6eVPOgZk1K5wKvE2LSyBhU7HaMYV5eX\nYzcEPw/EP4CCcE9QhUZ4cs0gVA5wgfTeFLKMCb1rBuFTGOSfXZixuIDtS3ByAiTxe4r/zWiKLIDD\nMRIRpbZgBUTgqkuuS4AkHPEAW1c8yykD9L3ES1J2rIu1sgZoeXtJUMpDoWxEbaeN5SFgQsmHWoM2\ncVpSSlvozVyMx7NRpIv+QGKzMLZSh+kYVBOmOE69KL9oVU5xvblgdTD3u9QA9zfKgGdMM4mP/aUT\nA9ziByJlxOuqlrzFPELIj8qAkKBGnIoOhDNsdRtpNDbu6ZvJVtnJXEzAWvFrsdAl7Ekp6aL8chKW\nfzcXm2N2jYRn0f6QUMgI7+fHjTzEXpo8TotCZi/56mlV6eqqO/tZWoD7xvLnjeg57uI5yWlAR/DE\nKZyfbdJSrKVIxbpKy81OANrYdCvwWXIfFZmdPi6AKKkmmzTc/TmKUSVYKmtlDf5/Tc+CYp7DY5UW\n6l8SPBcMYX+wt+QVRlld3YrUsmbE85x+eI0BGgplyonlKXOhLOBvUaDGGBQz1ibMW+HCKxhOYs2F\n3ckS1Qp32VH9xE0lUwsTvXZho9C7vekrk6mKZIkgCAwwUWWup2NaFuMAgMdctNUawe40PJGFh078\nYDhBfeF6BQg5sBgNi3CFnJGVm89ao06x1RkGEralyzur8a42QWbamd+WYEhamEDPH4hv/BbloOb3\nQtcWl4ebADqw+1Y7/XNM3ctM4QUwJTdgCjgENORoscxoBLSZ8N8tW0YifmLP2SHhHez5EQccagA8\n0AFodw+hSB0K3nrj6MF9AFe07AIZMRiqMjYOFBu424ElbnRpUxiK4VjTDFnamENH7TtpJ8ZLA0SR\nv7YgqjK278CwFRgRYaSJrYRd8MUrcra5iBQO+pOJrKoSgs21+OsX7a14IL4H602blUFFSCFJEgBL\noXNii4UweEn+xU6Vdgg1JFr3q1ShnztO0J8CAwBBYKgNCCEMMFDjMPr1YcJe8m7AF07NDnNGbSsX\nY3YGmDhzcauFhnjfI5JZAlmKtbF/DaC0Uwio8AYgKhMwjWziPvjQhsTeliOqgqQRvr7UB0hS3oxh\nMfBXcN+bBcV9vFgs4O4CVhlH4D0XgBXgTdcxkecvn85iM8EHyTEFLJ6Jz65Fx1JaTDbWWNtDjWkF\nzeU1ErDpbDpLOFEIK6BCga0Imkpd7QkxBrCKKc9aUQc0DLOnDaFr1j5gYnRrgNY4QUXNehGMSf4+\nMQxTM8fFCYthT4LcCsADf6OlBLdDZOco9gx+NXHHMEAphg02Nmtkkc9pRiW3dZFW7aE07JJkdkYI\nSbesbN+qRwN+BACWK5cwrbUu+BeIxw8rmZB3skeeMk0qPO5mfJHVscOYJUn/SZtSeRiLWTluxjjs\nUTYcA50tDOAJTsAxscY8Ac4oplkr3c3c1hvYeooGlG3POTK4/U8LiFMlYLzpshMbDGXpoF69/gXM\nwTCc5Rq/A4EJL07Ul27kOaLMRkTVRVkqQWmXAm0YdZzMQGqRR8lGcqwUJP/jC/O2xFqntbSHyk0h\n0zKuRR6I10cNNpNDfNvDMyPGNAatZK+zupCYZBx3CvJVir0QNY9SHFOIk0aLPK2SBpxbSSpRIXPM\no/+zicM5p/wTpsbMplm2xFTF+r3iC6qnmotIFnCgR1mG6M7PKLPOxCqatvL+DEUU4JPHf0wXVvhj\nxVYOu0MNABi8itZZeRftScuDyAQyzsiHOY2kn0UG6UZAFXdnSV9JyygFkwhdvNR34BGWXMC0+/G5\nbfjs8ziMn54zxs8bWbopcwwC32PKojhlcduVaYm5ioN4FerGDugFQRY3d4W28/Y2BG3IORaglEp2\nwA3vm2mUFOypHwHJnt3sphX6oHk4ffvq4Uy8neYSbr6d/QWdEsZIs0kPqMOgvTkt1Arv+8F4vk+2\nla4P0y/7xnM/wznvIIM2j6lZJtf1FiHmCs2BXISHIkiE7sX+1jEFWjlrNj40RBOuY667QXzUnwCg\nhCkbmtNQDYesmharUDahjPD/9AgQemFmjvfTypuH9aIK8F5+OxDC2kwCbrR5vDCf5Cswc3eo9N7s\n2k1z0WpwXKMeQ6vFXdaHDOLOEkdeU8UdlOBbgNfdniDoTGEeZhwNigdMotMxwI6fAdeF1ICKshUO\noup+B/uz8rysEDVWjs+V2OzkBiorqjqxM0rUGMMTNpMnmsMV1o20BOw6VmO8yi49AEDMwbs3RU2q\nh6TMqHVxC6zq9VpW2EGlVIMaOU3vwYlFDIINzLkEttjagOq1NpIgzY0Sawk4IhvGnMiNHTf6Q2rD\nTdiWmjmFkOWNqnSJHd3p+Jvnr5evvn30w9Pl149ePV0+ef4D2A3qfDa8St9bmiZl466tpmWbi05V\nQImMCZvezB2y+JgAstBmkB5EDJI+qRkbZcLNyMGODVXouJehFURuFGY1k1pFG7GBfa1moGtuobW3\nGyQgeG0V6CYaytr2I1x18pS+wHDbyyCzx7QqgUvgV9dFhuW5ay3EbYoL8xVUHCZdU58Dn8B3LMsc\nV1qi4ANsxhZDqu497O0D1Sv9FjfXHp3q/DF6H/JFkzr9MVdFnyjL3Yhust7vi7U0BYDo0gOBjgtV\nFHgzNVNDJd/UZ19FLtzr3LHFhwZYJN85a+x2YkKf06UwsGVosAAJgJd0j+j0bazPTqhJXAXWN9d+\nX+6BeAGLVEcFewziUqICOmmKIv+hZ4NY774DUrvvNuAzWvueH72eIazWdcWMopbijJnUobY7Kw5F\nupFnfTx24s37Jb3Y+lSVRIqB2lCVmfyY4Lzx7IxlNYQHzGuooRrGt/coaoEODDmzhU5zEDuOEnJX\n0N4BQg24OVsw6dqpLm0i75wDHMpzlI7CLr1xwat5z5IWmI7eUjfd6HnTPIWaH5UsSknrOAKUiYKV\n3todvhBkr9dLvn0ddYviVzmwW+2deoAFYKbRFYmjwLQwB7lRuZKQdENxiD1azJ7ljax4yVC+h1XD\nmwl8Bdd97dJ648Srx5ylG1unBcRsZCIXbM6wNHDoRMc6iAWPSPhMgAz56PbAO3L+aS7RfD/9gmxI\nWdT1CZtsmi1ym6PsydX9zvj7V4OY1QWJZ0QCnRUkM4wRjeu2xvYiIhN4/eLJiyvxLWAb+CYtzHkq\nYYeByuU9Kc1c2nRrLv8Jnx6R6P1Yz5riD1GP+zIc5jrwNOvNHX5pcXeKPUjsvBO5V7sxaO6V3ksy\ne7CB0oojpGzbzwbGPeZgFSEkBpJKLrgd350QgIu6/2FPaG8hUC7a4W8gmvhPHAfPDQuvBfxn0Fju\nt8/Rfrg3XnjblTHXYw0xRJXj++/23ej+IXseZaLNDpzMQO+5Cffd9n6a0V3sxIj2Zve1Pbj1saOx\n1v8jHzuRNP+P5AcXhmyOsRONh1u6oaHBgk7Yoia+A+JxOkqihmqVH33c51bkRh9uvYquKPn3UeLK\ntwJyX827KBMFGYIahXgcOSAe34HYAhE4NVGUjsNGs0Y7Tf10hCOIagdrp4fLCzOhTlcvFg7owLCD\nIIM+fgO/xkJSgy8wPZHxkNRhS3NXvPYkDENcyhDXO+4Bnp6hnZqeyI6bZkifBZVHfY22oNxpHzyL\nAXQaIxmaHk/1bftTOTw3V9qtFq4iOXHvN29C4+UxUjWhCY5bSim7wZ5J04khu4bbFMgg+8R0jmDB\nv+iifDMR4jWkT0ddUV1I5uyPYdCJjju3ULiYodNu/U4K94NhBC5CY1o9H6TO4nePh6CUUXltGuZq\n8JEwOdIWUXBKJBKQTw+K506ZNM0dt7XnK9wTJSj2NlngIcx4ZC3q0lULkaLcnChaYvua79IZiS7N\nNt3HsUIJbXhC29kGgb9508s2yvM6Vto2wuj3kDN3X/b6j4sQf5e3a51W2XM8U1LVBzvAUi9tult0\nkf7xdAxhfl3IfdvSnDpP6gc/eKJElXVYvh8/g9pfukMs8RaKPIXCMvsKvvnhOoUy0OrQD3aW0n0T\njOp3RyrexW2YwTDk0/ofwYv5BMflYuHkQ2/+WwCjfZZQqzSbThaLUi+oLtW1nQSL9WGrNUl+tDjp\nDb6ZpvNu0UG1TmsyuzqxHD+dBIkbqgEL34XTIc25EEd8UHRnYdzojIKbx9rBYDDYFo967CFdbdCV\n4jtAaQsyXG+b37G4Tja3tV2TOyEYKqVCUPUAiz0lX9kPQxAznTVvN3HlqE2gaSorsa7okJNbHtb7\njvOPXVpuZYDFTJkNuFl0eM61MLpFP8Sbo8Iak9ZOrRv7EyFrM+rnL8SUqxpaFi7XstDHGVW+utpw\n8c0lJfVFHJkMjDGHf+WGMhlEPb3fA5arzPj30nvq7iPAc88EKO35NFrpzj0hHZvC00wYC7pJIFbx\n6Qv5oVaANKgRoD1piOD0xYJnTeYeQJQ/EEY9nAo1vr4VugAuBURFQ6fINb1dGeqj9LteXSf2vuWP\nRvF784bGQzH5+YtJdMg5GH337GcbdxwW9ByVHcLnT5MLc7lPIfuqOINrzPsMmrVnc+437bx96uT7\ndxWaCXuZ7yL0p3y7X6V0Hbzv0Z36cSjh4gHY/+hkWNR8Adv0zkVAfyLfwiMIhA53TpS4O9RLlOgs\nYpwuuQwpfu/UywfukC6cCv+ocVbsYPA/W+/9udG8KRn/D8P5A/FYlzeycraBzeCy+dMHPopGh2sn\nWMpxyRhOVTvjpz9RGPobjKGEgZTR+Bwd+ojThmDTcdbwhDqZbHj4LPQTmSXqAXKnEUq7jWziBebO\n6a1vRTMxKE/1RnHjVUOsoLNOrkFKb8GpGkhxxUNdbSV6CUY2d+TIydTOTpCBySyAbwfvVN7y5k7J\nFoiNH1JL0x1uuPw1nvTb5a+O7m9X7VERfESDxgk41z7F9+29yjLATQsyW4gTX0THIvuW2Od/B3W0\n+aPZnZ0IOL+Doj8/x/HnEad/ih7/O25mztFPhK/4kJWLXPTnOL2TVZzzNClBOJS6wvErn+AVt3R8\nIjom0SRyJ48ohwNW7ogyXnz79NETf2qP/yztPqeoXHw4czr03yOfFDU=\n' ) ] ### Load the compressed module sources ### import sys, imp for name, source in module_sources: source = source.decode("base64").decode("zlib") mod = imp.new_module(name) exec source in mod.__dict__ sys.modules[name] = mod ### Original script follows ### #!/usr/bin/python """%prog [-p|--props-file] [-r|--rev revision] [-b|--branch branch] [-s|--shared-dir shared_dir] repo [dest] Tool to do safe operations with git. revision/branch on commandline will override those in props-file""" # Import snippet to find tools lib import os import site
def override_builtin(name): import imp, sys, os sys.modules[name] = module = imp.new_module(name) path = os.path.join(sys.prefix, 'Lib', name + '.py') execfile(path, module.__dict__)
except ImportError: # pragma: no cover import os # should work if someone just set the $PYTHONPATH to include OPS directory = os.path.dirname(os.path.realpath(__file__)) prev_dir = os.path.split(directory)[0] setupfile = os.path.join(prev_dir, "setup.py") if not os.path.exists(setupfile): # now we're screwed raise ImportError("Unable to identify OPS version. " + "OPS probably not installed correctly.") # continue force-setting version based on `setup.py` import imp # may be Py2 only! ops_setup = imp.load_source("ops_setup", setupfile) version = imp.new_module("openpathsampling.version") version.version = ops_setup.preferences['version'] version.short_version = ops_setup.preferences['version'] version.git_version = ops_setup.get_git_version() version.full_version = ops_setup.preferences['version'] if not ops_setup.preferences['released']: version.full_version += ".dev-" + version.git_version[:7] isrelease = str(ops_setup.preferences['released']) from .analysis.path_histogram import PathDensityHistogram from .analysis.replica_network import (ReplicaNetwork, trace_ensembles_for_replica, trace_replicas_for_ensemble, condense_repeats, ReplicaNetworkGraph)
def main(self, argv): def addmpirun(parser): parser.add_argument("--mpirun", default=None, nargs='?', const="mpirun -n 4", help="launcher for MPI, e.g. mpirun -n 4") # In case we are run from the source directory, we don't want to import the # project from there: sys.path.pop(0) parser = ArgumentParser(usage=self.__doc__.lstrip()) parser.add_argument("--verbose", "-v", action="count", default=1, help="more verbosity") parser.add_argument( "--no-build", "-n", action="store_true", default=False, help="do not build the project (use system installed version)") parser.add_argument("--mpisub", action="store_true", default=False, help="run as a mpisub.") parser.add_argument("--mpisub-site-dir", default=None, help="site-dir in mpisub") parser.add_argument("--build-only", "-b", action="store_true", default=False, help="just build, do not run any tests") parser.add_argument("--doctests", action="store_true", default=False, help="Run doctests in module") parser.add_argument( "--refguide-check", action="store_true", default=False, help="Run refguide check (do not run regular tests.)") parser.add_argument( "--coverage", action="store_true", default=False, help=("report coverage of project code. HTML output goes " "under build/coverage")) parser.add_argument( "--gcov", action="store_true", default=False, help=("enable C code coverage via gcov (requires GCC). " "gcov output goes to build/**/*.gc*")) parser.add_argument( "--lcov-html", action="store_true", default=False, help=("produce HTML for C code coverage information " "from a previous run with --gcov. " "HTML output goes to build/lcov/")) parser.add_argument("--mode", "-m", default="fast", help="'fast', 'full', or something that could be " "passed to nosetests -A [default: fast]") parser.add_argument( "--submodule", "-s", default=None, help="Submodule whose tests to run (cluster, constants, ...)") parser.add_argument("--pythonpath", "-p", default=None, help="Paths to prepend to PYTHONPATH") parser.add_argument("--tests", "-t", action='append', help="Specify tests to run") parser.add_argument("--python", action="store_true", help="Start a Python shell with PYTHONPATH set") parser.add_argument("--ipython", "-i", action="store_true", help="Start IPython shell with PYTHONPATH set") parser.add_argument("--shell", action="store_true", help="Start Unix shell with PYTHONPATH set") parser.add_argument("--debug", "-g", action="store_true", help="Debug build") parser.add_argument( "--parallel", "-j", type=int, default=1, help="Number of parallel jobs during build (requires " "Numpy 1.10 or greater).") parser.add_argument( "--show-build-log", action="store_true", help="Show build output rather than using a log file") parser.add_argument("--bench", action="store_true", help="Run benchmark suite instead of test suite") parser.add_argument( "--bench-compare", action="append", metavar="BEFORE", help= ("Compare benchmark results of current HEAD to BEFORE. " "Use an additional --bench-compare=COMMIT to override HEAD with COMMIT. " "Note that you need to commit your changes first!")) parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER, help="Arguments to pass to Nose, Python or shell") addmpirun(parser) args = parser.parse_args(argv) if args.mpisub: args.no_build = True # master does the building if args.bench_compare: args.bench = True args.no_build = True # ASV does the building if args.lcov_html: # generate C code coverage output lcov_generate() sys.exit(0) if args.pythonpath: for p in reversed(args.pythonpath.split(os.pathsep)): sys.path.insert(0, p) if args.gcov: gcov_reset_counters() if args.debug and args.bench: print( "*** Benchmarks should not be run against debug version; remove -g flag ***" ) if not args.no_build: site_dir = self.build_project(args) sys.path.insert(0, site_dir) os.environ['PYTHONPATH'] = site_dir if args.mpisub_site_dir: site_dir = args.mpisub_site_dir sys.path.insert(0, site_dir) os.environ['PYTHONPATH'] = site_dir extra_argv = args.args[:] if extra_argv and extra_argv[0] == '--': extra_argv = extra_argv[1:] if args.python: if extra_argv: # Don't use subprocess, since we don't want to include the # current path in PYTHONPATH. sys.argv = extra_argv with open(extra_argv[0], 'r') as f: script = f.read() sys.modules['__main__'] = imp.new_module('__main__') ns = dict(__name__='__main__', __file__=extra_argv[0]) exec_(script, ns) sys.exit(0) else: import code code.interact() sys.exit(0) if args.ipython: import IPython IPython.embed(user_ns={}) sys.exit(0) if args.shell: shell = os.environ.get('SHELL', 'sh') print("Spawning a Unix shell...") if len(extra_argv) == 0: os.execv(shell, [shell]) else: os.execvp(extra_argv[0], extra_argv) sys.exit(1) if args.coverage: dst_dir = os.path.join(self.ROOT_DIR, 'build', 'coverage') fn = os.path.join(dst_dir, 'coverage_html.js') if os.path.isdir(dst_dir) and os.path.isfile(fn): shutil.rmtree(dst_dir) extra_argv += ['--cover-html', '--cover-html-dir=' + dst_dir] if args.refguide_check: cmd = [ os.path.join(self.ROOT_DIR, 'tools', 'refguide_check.py'), '--doctests' ] if args.submodule: cmd += [args.submodule] os.execv(sys.executable, [sys.executable] + cmd) sys.exit(0) if args.mpirun: parser = ArgumentParser() addmpirun(parser) args, additional = parser.parse_known_args() mpirun = args.mpirun.split() os.execvp( mpirun[0], mpirun + [ sys.executable, sys.argv[0], '--mpisub', '--mpisub-site-dir=' + site_dir ] + additional) sys.exit(1) test_dir = os.path.join(self.ROOT_DIR, 'build', 'test') if args.build_only: sys.exit(0) elif args.submodule: modname = self.PROJECT_MODULE + '.' + args.submodule try: __import__(modname) test = sys.modules[modname].test except (ImportError, KeyError, AttributeError) as e: print("Cannot run tests for %s (%s)" % (modname, e)) sys.exit(2) elif args.tests: def fix_test_path(x): # fix up test path p = x.split(':') p[0] = os.path.relpath(os.path.abspath(p[0]), self.ROOT_DIR) p[0] = os.path.join(site_dir, p[0]) return ':'.join(p) tests = [fix_test_path(x) for x in args.tests] def test(*a, **kw): extra_argv = kw.pop('extra_argv', ()) extra_argv = extra_argv + tests[1:] kw['extra_argv'] = extra_argv save = dict(globals()) from numpy.testing import Tester result = Tester(tests[0]).test(*a, **kw) # numpy tester messes up with globals. somehow. globals().update(save) return result else: __import__(self.PROJECT_MODULE) print("Using module", self.PROJECT_MODULE, " from", sys.modules[self.PROJECT_MODULE]) test = sys.modules[self.PROJECT_MODULE].test self.comm.barrier() if self.comm.rank == 0: # Run the tests under build/test try: shutil.rmtree(test_dir) except OSError: pass try: os.makedirs(test_dir) except OSError: pass self.comm.barrier() if args.mpisub: oldstdout = sys.stdout oldstderr = sys.stderr newstdout = StringIO() newstderr = StringIO() if self.comm.rank != 0: sys.stdout = newstdout sys.stderr = newstderr cwd = os.getcwd() result = None try: if args.mpisub: assert (os.path.exists(test_dir)) self.comm.barrier() os.chdir(test_dir) result = test(args.mode, verbose=args.verbose if self.comm.rank == 0 else 0, extra_argv=extra_argv + ['--quiet'] if self.comm.rank != 0 else [] + ['--stop'] if args.mpisub else [], doctests=args.doctests, coverage=args.coverage) except: if args.mpisub: self.sleep() oldstderr.write("Fatal Error on Rank %d\n" % self.comm.rank) oldstderr.write(traceback.format_exc()) oldstderr.flush() self.comm.Abort(-1) else: traceback.print_exc() sys.exit(1) finally: os.chdir(cwd) self.comm.barrier() code = 0 if isinstance(result, bool): code = 0 if result else 1 elif result.wasSuccessful(): code = 0 else: code = 1 if args.mpisub: if code != 0: # if any rank has a failure, print the error and abort the world. self.sleep() oldstderr.write("Test Failure due to rank %d\n" % self.comm.rank) oldstderr.write(newstdout.getvalue()) oldstderr.write(newstderr.getvalue()) oldstderr.flush() self.comm.Abort(-1) self.comm.barrier() with Rotator(self.comm): oldstderr.write("------ Test result from rank %d -----\n" % self.comm.rank) oldstderr.write(newstdout.getvalue()) oldstderr.write(newstderr.getvalue()) oldstderr.flush() sys.exit(0) else: sys.exit(code)
writer(chunk) finally: if hasattr(response, 'close'): response.close() lib.uwsgi_pypy_hook_loader = uwsgi_pypy_loader lib.uwsgi_pypy_hook_file_loader = uwsgi_pypy_file_loader lib.uwsgi_pypy_hook_pythonpath = uwsgi_pypy_pythonpath lib.uwsgi_pypy_hook_request = uwsgi_pypy_wsgi_handler lib.uwsgi_pypy_post_fork_hook = uwsgi_pypy_post_fork_hook """ Here we define the "uwsgi" virtual module """ uwsgi = imp.new_module('uwsgi') sys.modules['uwsgi'] = uwsgi uwsgi.version = ffi.string(lib.uwsgi_pypy_version) uwsgi.hostname = ffi.string(lib.uwsgi.hostname) def uwsgi_pypy_uwsgi_register_signal(signum, kind, handler): cb = ffi.callback('void(int)', handler) uwsgi_gc.append(cb) if lib.uwsgi_register_signal(signum, ffi.new("char[]", kind), cb, lib.pypy_plugin.modifier1) < 0: raise Exception("unable to register signal %d" % signum) uwsgi.register_signal = uwsgi_pypy_uwsgi_register_signal
KML_TRANSLATABLE_FIELDS = ['name', 'street_address', 'www'] KML_REGEXP = r'application/vnd.google-earth\.kml' LOCALE_PATHS = ( os.path.join(BASE_DIR, 'locale'), ) # local_settings.py can be used to override environment-specific settings # like database and email that differ between development and production. f = os.path.join(BASE_DIR, "local_settings.py") if os.path.exists(f): import sys import imp module_name = "%s.local_settings" % ROOT_URLCONF.split('.')[0] module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module exec(open(f, "rb").read()) if 'SECRET_KEY' not in locals(): secret_file = os.path.join(BASE_DIR, '.django_secret') try: SECRET_KEY = open(secret_file).read().strip() except IOError: import random system_random = random.SystemRandom() try: SECRET_KEY = ''.join([system_random.choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(64)]) secret = open(secret_file, 'w')
import os import sys from xml.etree import ElementTree try: # For python3 import urllib.error import urllib.parse import urllib.request except ImportError: # For python2 import imp import urllib2 import urlparse urllib = imp.new_module('urllib') urllib.error = urllib2 urllib.parse = urlparse urllib.request = urllib2 DEBUG = False default_manifest = ".repo/manifest.xml" custom_local_manifest = ".repo/local_manifests/aosip_manifest.xml" custom_default_revision = "oreo-mr1" custom_dependencies = "aosip.dependencies" org_manifest = "devices" # leave empty if org is provided in manifest org_display = "AOSiP-Devices" # needed for displaying github_auth = None
#!flask/bin/python from flask_application import db from config import SQLALCHEMY_DATABASE_URI from config import SQLALCHEMY_MIGRATE_REPO from imp import new_module from migrate.versioning import api v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v + 1)) tmp_module = new_module('old_model') old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) exec(old_model, tmp_module.__dict__) script = api.make_update_script_for_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, tmp_module.meta, db.metadata) open(migration, 'wt').write(script) api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) print 'New migration saved as ' + migration print 'Current database version: ' + str(v)
"""This is the visocyte.vtk module.""" import sys if sys.version_info <= (3, 4): # imp is deprecated in 3.4 import imp, importlib # import vtkmodules package. vtkmodules_m = importlib.import_module('vtkmodules') # import visocyte.pv-vtk-all all_m = importlib.import_module('visocyte.pv-vtk-all') # create a clone of the `vtkmodules.all` module. vtk_m = imp.new_module(__name__) for key in dir(all_m): if not hasattr(vtk_m, key): setattr(vtk_m, key, getattr(all_m, key)) # make the clone of `vtkmodules.all` act as a package at the same location # as vtkmodules. This ensures that importing modules from within the vtkmodules package # continues to work. vtk_m.__path__ = vtkmodules_m.__path__ # replace old `vtk` module with this new package. sys.modules[__name__] = vtk_m else: import importlib.util # import vtkmodules.all all_spec = importlib.util.find_spec('visocyte.pv-vtk-all') all_m = importlib.util.module_from_spec(all_spec)
import imp from migrate.versioning import api from app import db from config import SQLALCHEMY_DATABASE_URI from config import SQLALCHEMY_MIGRATE_REPO #数据库迁移 v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v + 1)) tmp_module = imp.new_module('old_model') old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) exec(old_model, tmp_module.__dict__) script = api.make_update_script_for_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, tmp_module.meta, db.metadata) open(migration, "wt").write(script) api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) print('New migration saved as ' + migration) print('Current database version: ' + str(v))
from .utilities.launch import quick_app from .extensions.tray import TimedProcessorTray from .extensions.tray import MemorableTimedProcessorTray # -- Quick was a convenience sub-module which became a little # -- too convenient to put things. Therefore its contents is # -- now spread around. However, for the sake of backward compatability # -- we need to nest its functionality in a placeholder class from .utilities.request import confirmation as _rerouted_confirm from .utilities.request import text as _rerouted_getText from .utilities.request import filepath as _rerouted_getFilepath from .utilities.request import folderpath as _rerouted_getFolderPath from .extensions.dividers import HorizontalDivider as _rerouted_horizontalDivider from .extensions.buttons import CopyToClipboardButton as _rerouted_copyToClipBoardButton if _py_version == 3: quick = types.ModuleType('name') elif _py_version == 2: quick = imp.new_module('qute.quick') quick.confirm = _rerouted_confirm quick.getText = _rerouted_getText quick.getFilepath = _rerouted_getFilepath quick.getFolderPath = _rerouted_getFolderPath quick.horizontalDivider = _rerouted_horizontalDivider quick.copyToClipBoardButton = _rerouted_copyToClipBoardButton quick.quick_app = quick_app
#!/usr/bin/env python # -*- coding:utf-8 -*- import imp import six import linecache class_str = """ #!/usr/bin/python # -*- coding:utf-8 -*- class Project(object): def __init__(self): self.value = 1 """ mod = imp.new_module('project') mod.__file__ = '<%s>' % 'project' code = compile(class_str, "project", "exec") six.exec_(code, mod.__dict__) linecache.clearcache() print mod.__dict__
def load_module(self, fullname): logger.debug("Running load_module for {0}...".format(fullname)) if fullname in sys.modules: mod = sys.modules[fullname] else: if self.kind in ( imp.PY_COMPILED, imp.C_EXTENSION, imp.C_BUILTIN, imp.PY_FROZEN, ): convert = False # elif (self.pathname.startswith(_stdlibprefix) # and 'site-packages' not in self.pathname): # # We assume it's a stdlib package in this case. Is this too brittle? # # Please file a bug report at https://github.com/PythonCharmers/python-future # # if so. # convert = False # in theory, other paths could be configured to be excluded here too elif any( [fullname.startswith(path) for path in self.exclude_paths]): convert = False elif any( [fullname.startswith(path) for path in self.include_paths]): convert = True else: convert = False if not convert: logger.debug("Excluded {0} from translation".format(fullname)) mod = imp.load_module(fullname, *self.found) else: logger.debug("Autoconverting {0} ...".format(fullname)) mod = imp.new_module(fullname) sys.modules[fullname] = mod # required by PEP 302 mod.__file__ = self.pathname mod.__name__ = fullname mod.__loader__ = self # This: # mod.__package__ = '.'.join(fullname.split('.')[:-1]) # seems to result in "SystemError: Parent module '' not loaded, # cannot perform relative import" for a package's __init__.py # file. We use the approach below. Another option to try is the # minimal load_module pattern from the PEP 302 text instead. # Is the test in the next line more or less robust than the # following one? Presumably less ... # ispkg = self.pathname.endswith('__init__.py') if self.kind == imp.PKG_DIRECTORY: mod.__path__ = [os.path.dirname(self.pathname)] mod.__package__ = fullname else: # else, regular module mod.__path__ = [] mod.__package__ = fullname.rpartition(".")[0] try: cachename = imp.cache_from_source(self.pathname) if not os.path.exists(cachename): update_cache = True else: sourcetime = os.stat(self.pathname).st_mtime cachetime = os.stat(cachename).st_mtime update_cache = cachetime < sourcetime # # Force update_cache to work around a problem with it being treated as Py3 code??? # update_cache = True if not update_cache: with open(cachename, "rb") as f: data = f.read() try: code = marshal.loads(data) except Exception: # pyc could be corrupt. Regenerate it update_cache = True if update_cache: if self.found[0]: source = self.found[0].read() elif self.kind == imp.PKG_DIRECTORY: with open(self.pathname) as f: source = f.read() if detect_python2(source, self.pathname): source = self.transform(source) with open("/tmp/futurized_code.py", "w") as f: f.write("### Futurized code (from %s)\n%s" % (self.pathname, source)) code = compile(source, self.pathname, "exec") dirname = os.path.dirname(cachename) try: if not os.path.exists(dirname): os.makedirs(dirname) with open(cachename, "wb") as f: data = marshal.dumps(code) f.write(data) except Exception: # could be write-protected pass exec(code, mod.__dict__) except Exception as e: # must remove module from sys.modules del sys.modules[fullname] raise # keep it simple if self.found[0]: self.found[0].close() return mod
def init(jugfile=None, jugdir=None, on_error='exit', store=None): ''' store,jugspace = init(jugfile={'jugfile'}, jugdir={'jugdata'}, on_error='exit', store=None) Initializes jug (create backend connection, ...). Imports jugfile Parameters ---------- jugfile : str, optional jugfile to import (default: 'jugfile') jugdir : str, optional jugdir to use (could be a path) on_error : str, optional What to do if import fails (default: exit) store : storage object, optional If used, this is returned as ``store`` again. Returns ------- store : storage object jugspace : dictionary ''' import imp from .options import set_jugdir assert on_error in ('exit', 'propagate'), 'jug.init: on_error option is not valid.' if jugfile is None: jugfile = 'jugfile' if store is None: store = set_jugdir(jugdir) sys.path.insert(0, os.path.abspath('.')) # The reason for this implementation is that it is the only that seems to # work with both barrier and pickle()ing of functions inside the jugfile # # Just doing __import__() will not work because if there is a BarrierError # thrown, then functions defined inside the jugfile end up in a confusing # state. # # Alternatively, just execfile()ing will make any functions defined in the # jugfile unpickle()able which makes mapreduce not work # # Therefore, we simulate (partially) __import__ and set sys.modules *even* # if BarrierError is raised. # jugmodname = os.path.basename(jugfile[:-len('.py')]) jugmodule = imp.new_module(jugmodname) jugmodule.__file__ = os.path.abspath(jugfile) jugspace = jugmodule.__dict__ sys.modules[jugmodname] = jugmodule jugfile_contents = open(jugfile).read() try: exec(compile(jugfile_contents, jugfile, 'exec'), jugspace, jugspace) except BarrierError: jugspace['__jug__hasbarrier__'] = True except Exception as e: logging.critical("Could not import file '%s' (error: %s)", jugfile, e) if on_error == 'exit': import traceback print(traceback.format_exc()) sys.exit(1) else: raise # The store may have been changed by the jugfile. store = Task.store return store, jugspace
class DeferredConnection(Connection): """ Experimental MySQL DA which implements deferred SQL code execution to reduce locking issues """ meta_type=title='Z %s Deferred Database Connection' % database_type def factory(self): return DeferredDB # BBB: Allow loading of deferred connections that were created # before the merge of ZMySQLDDA into ZMySQLDA. import sys, imp m = 'Products.ZMySQLDDA' assert m not in sys.modules, "please remove obsolete ZMySQLDDA product" sys.modules[m] = imp.new_module(m) m += '.DA' sys.modules[m] = m = imp.new_module(m) m.DeferredConnection = DeferredConnection del m __ac_permissions__=( ('Add Z MySQL Database Connections', ('manage_addZMySQLConnectionForm', 'manage_addZMySQLConnection')), ) misc_={'conn': ImageFile( os.path.join(SHARED_DC_ZRDB_LOCATION,'www','DBAdapterFolder_icon.gif'))}
def _run(): module = imp.new_module('<python_stepper>') module.__dict__.update(self._scope) exec self._compiledCode in module.__dict__
def import_phantom_module(xml_file): """ Insert a fake Python module to sys.modules, based on a XML file. The XML file is expected to conform to Pydocweb DTD. The fake module will contain dummy objects, which guarantee the following: - Docstrings are correct. - Class inheritance relationships are correct (if present in XML). - Function argspec is *NOT* correct (even if present in XML). Instead, the function signature is prepended to the function docstring. - Class attributes are *NOT* correct; instead, they are dummy objects. Parameters ---------- xml_file : str Name of an XML file to read """ import lxml.etree as etree object_cache = {} tree = etree.parse(xml_file) root = tree.getroot() # Sort items so that # - Base classes come before classes inherited from them # - Modules come before their contents all_nodes = dict([(n.attrib['id'], n) for n in root]) def _get_bases(node, recurse=False): bases = [x.attrib['ref'] for x in node.findall('base')] if recurse: j = 0 while True: try: b = bases[j] except IndexError: break if b in all_nodes: bases.extend(_get_bases(all_nodes[b])) j += 1 return bases type_index = ['module', 'class', 'callable', 'object'] def base_cmp(a, b): x = cmp(type_index.index(a.tag), type_index.index(b.tag)) if x != 0: return x if a.tag == 'class' and b.tag == 'class': a_bases = _get_bases(a, recurse=True) b_bases = _get_bases(b, recurse=True) x = cmp(len(a_bases), len(b_bases)) if x != 0: return x if a.attrib['id'] in b_bases: return -1 if b.attrib['id'] in a_bases: return 1 return cmp(a.attrib['id'].count('.'), b.attrib['id'].count('.')) nodes = root.getchildren() nodes.sort(base_cmp) # Create phantom items for node in nodes: name = node.attrib['id'] doc = (node.text or '').decode('string-escape') + "\n" if doc == "\n": doc = "" # create parent, if missing parent = name while True: parent = '.'.join(parent.split('.')[:-1]) if not parent: break if parent in object_cache: break obj = imp.new_module(parent) object_cache[parent] = obj sys.modules[parent] = obj # create object if node.tag == 'module': obj = imp.new_module(name) obj.__doc__ = doc sys.modules[name] = obj elif node.tag == 'class': bases = [ object_cache[b] for b in _get_bases(node) if b in object_cache ] bases.append(object) init = lambda self: None init.__doc__ = doc obj = type(name, tuple(bases), {'__doc__': doc, '__init__': init}) obj.__name__ = name.split('.')[-1] elif node.tag == 'callable': funcname = node.attrib['id'].split('.')[-1] argspec = node.attrib.get('argspec') if argspec: argspec = re.sub('^[^(]*', '', argspec) doc = "%s%s\n\n%s" % (funcname, argspec, doc) obj = lambda: 0 obj.__argspec_is_invalid_ = True obj.func_name = funcname obj.__name__ = name obj.__doc__ = doc if inspect.isclass(object_cache[parent]): obj.__objclass__ = object_cache[parent] else: class Dummy(object): pass obj = Dummy() obj.__name__ = name obj.__doc__ = doc if inspect.isclass(object_cache[parent]): obj.__get__ = lambda: None object_cache[name] = obj if parent: if inspect.ismodule(object_cache[parent]): obj.__module__ = parent setattr(object_cache[parent], name.split('.')[-1], obj) # Populate items for node in root: obj = object_cache.get(node.attrib['id']) if obj is None: continue for ref in node.findall('ref'): if node.tag == 'class': if ref.attrib['ref'].startswith(node.attrib['id'] + '.'): setattr(obj, ref.attrib['name'], object_cache.get(ref.attrib['ref'])) else: setattr(obj, ref.attrib['name'], object_cache.get(ref.attrib['ref']))
def test_learn(args): machine = Machine("x86_64") # Compil tests log_info("Remove old files") os.system("make clean") log_info("Compile C files") status = os.system("make") assert status == 0 # Find test names c_files = [] for cur_dir, sub_dir, files in os.walk("."): c_files += [x[:-2] for x in files if x.endswith(".c")] # Ways to invoke to_invoke = { "Miasm": invoke_miasm, } if args.pin_tracer: to_invoke["PIN"] = invoke_pin # Learn + test fail = False for filename in c_files: if filename in unsupported: log_error("Skip %s (unsupported)" % filename) continue with open(filename) as fdesc: cont = Container.from_stream(fdesc) func_name = filename func_addr = cont.symbol_pool[func_name].offset header_filename = "%s.h" % filename for name, cb in to_invoke.iteritems(): log_info("Learning %s over %s with %s" % (func_name, filename, name)) cmdline = cb(filename, func_name, header_filename, cont) print " ".join(cmdline) sibyl = subprocess.Popen(cmdline, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = sibyl.communicate() if sibyl.returncode != 0: log_error("Failed to learn with error:") print stderr fail = True continue log_info("Testing generated class") mod = imp.new_module("testclass") exec stdout in mod.__dict__ classTest = getattr(mod, "TESTS")[0] tl = TestLauncher(filename, machine, ABI_AMD64_SYSTEMV, [classTest], config.jit_engine) possible_funcs = tl.run(func_addr) if tl.possible_funcs and possible_funcs == [filename]: log_success("Generated class recognize the function " \ "'%s'" % func_name) else: log_error("Generated class failed to recognize the function " \ "'%s'" % func_name) fail = True # Clean log_info( "Remove old files" ) os.system("make clean") return fail
def run_python_file(filename, args, package=None, collector=None): """Run a python file as if it were the main program on the command line. `filename` is the path to the file to execute, it need not be a .py file. `args` is the argument array to present as sys.argv, including the first element naming the file being executed. `package` is the name of the enclosing package, if any. """ # Create a module to serve as __main__ old_main_mod = sys.modules['__main__'] main_mod = imp.new_module('__main__') sys.modules['__main__'] = main_mod main_mod.__file__ = filename if package: main_mod.__package__ = package main_mod.__builtins__ = sys.modules['__builtin__'] # Set sys.argv and the first path element properly. old_argv = sys.argv old_path0 = sys.path[0] sys.argv = args if package: sys.path[0] = '' else: sys.path[0] = os.path.abspath(os.path.dirname(filename)) try: # Open the source file. try: source_file = open(filename, 'rU') except IOError: raise NoSource("No file to run: %r" % filename) try: source = source_file.read() finally: source_file.close() # We have the source. `compile` still needs the last line to be clean, # so make sure it is, then compile a code object from it. if source[-1] != '\n': source += '\n' code = compile(source, filename, "exec") # Execute the source file. try: exec code in main_mod.__dict__ except SystemExit: # The user called sys.exit(). Just pass it along to the upper # layers, where it will be handled. raise except: # Something went wrong while executing the user code. # Get the exc_info, and pack them into an exception that we can # throw up to the outer loop. We peel two layers off the traceback # so that the coverage.py code doesn't appear in the final printed # traceback. typ, err, tb = sys.exc_info() raise ExceptionDuringRun(typ, err, tb.tb_next.tb_next) finally: # Restore the old __main__ sys.modules['__main__'] = old_main_mod # Restore the old argv and path sys.argv = old_argv sys.path[0] = old_path0
def configure(name=None, config_file=None, config_object=None, **kargs): ''' Generate a dynamic context module that's pushed / popped on the application context stack. ''' script_name = sys.argv[0] # always add current working directory to the path regardless # was trying to avoid this but too much path munging happening if os.getcwd() not in sys.path: sys.path.insert(1, os.getcwd()) if config_object: if 'project_name' not in config_object: config_object['project_name'] = script_name # create a named tuple that's the combo of default plus input dict root_path = config_object['path'] = config_object.get('path', os.getcwd()) config = create_config_object(config_object) # add root path to config? elif config_file: # possible root path if relative filename specified possible_root_path = os.getcwd() root_path, filename = os.path.split(os.path.join(possible_root_path, config_file)) try: config_module = read_config_module(root_path=root_path, filename=filename) except IOError: log.exception("Config Error:\n" "File Error or File not " "found\n{0}".format(config_file)) sys.exit(1) config = create_config_object(create_config_dict_from_module(config_module)) elif kargs: if 'project_name' not in kargs: kargs['project_name'] = script_name root_path = kargs['path'] = kargs.get('path', os.getcwd()) config = create_config_object(kargs) else: root_path = os.getcwd() log.warning("Warning: Using current path for the config " "file {0}".format(root_path)) if root_path not in sys.path: sys.path.insert(1, root_path) try: config_module = read_config_module(root_path=root_path, filename='project.py') log.warning("Found project.py in default path, " "using for the config file") except IOError: config = create_config_object(default_config) log.warning("No config file, using default pybald configuration") else: config = create_config_object(create_config_dict_from_module(config_module)) # if the discovered root_path is not in sys.path, add it in the least ugly # way possible if root_path not in sys.path: sys.path.insert(1, root_path) if config.debug: from pybald.core.logs import default_debug_log default_debug_log() new_context = imp.new_module("context") if context._proxied() and hasattr(context._proxied(), 'unconfigured'): # if we're at the root, consume any placeholder values placeholder = context._pop() new_context.__dict__.update(placeholder) else: new_context.__dict__['controller_registry'] = [] # always set the runtime config new_context.__dict__['path'] = root_path new_context.__dict__['config'] = config new_context.__dict__['name'] = name new_context.__dict__['__file__'] = None new_context.__dict__['Unconfigured'] = Unconfigured # new_context.__dict__['__path__'] = None # now execute the app context with this config context._push(new_context) exec(compile(context_template, '<string>', 'exec'), new_context.__dict__) return new_context
def main(argv): parser = ArgumentParser(usage=__doc__.lstrip()) parser.add_argument("--verbose", "-v", action="count", default=1, help="more verbosity") parser.add_argument( "--no-build", "-n", action="store_true", default=False, help="do not build the project (use system installed version)") parser.add_argument("--build-only", "-b", action="store_true", default=False, help="just build, do not run any tests") parser.add_argument("--doctests", action="store_true", default=False, help="Run doctests in module") parser.add_argument( "--coverage", action="store_true", default=False, help=("report coverage of project code. HTML output goes " "under build/coverage")) parser.add_argument( "--gcov", action="store_true", default=False, help=("enable C code coverage via gcov (requires GCC). " "gcov output goes to build/**/*.gc*")) parser.add_argument("--lcov-html", action="store_true", default=False, help=("produce HTML for C code coverage information " "from a previous run with --gcov. " "HTML output goes to build/lcov/")) parser.add_argument("--mode", "-m", default="fast", help="'fast', 'full', or something that could be " "passed to nosetests -A [default: fast]") #parser.add_argument("--submodule", "-s", default=None, #help="Submodule whose tests to run (cluster, constants, ...)") parser.add_argument("--pythonpath", "-p", default=None, help="Paths to prepend to PYTHONPATH") parser.add_argument("--tests", "-t", action='append', help="Specify tests to run") parser.add_argument("--python", action="store_true", help="Start a Python shell with PYTHONPATH set") parser.add_argument("--ipython", "-i", action="store_true", help="Start IPython shell with PYTHONPATH set") parser.add_argument("--shell", action="store_true", help="Start Unix shell with PYTHONPATH set") parser.add_argument("--debug", "-g", action="store_true", help="Debug build") parser.add_argument("--show-build-log", action="store_true", help="Show build output rather than using a log file") parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER, help="Arguments to pass to Nose, Python or shell") args = parser.parse_args(argv) if args.lcov_html: # generate C code coverage output lcov_generate() sys.exit(0) if args.pythonpath: for p in reversed(args.pythonpath.split(os.pathsep)): sys.path.insert(0, p) if args.gcov: gcov_reset_counters() if not args.no_build: site_dir = build_project(args) sys.path.insert(0, site_dir) os.environ['PYTHONPATH'] = site_dir extra_argv = args.args[:] if extra_argv and extra_argv[0] == '--': extra_argv = extra_argv[1:] if args.python: if extra_argv: # Don't use subprocess, since we don't want to include the # current path in PYTHONPATH. sys.argv = extra_argv with open(extra_argv[0], 'r') as f: script = f.read() sys.modules['__main__'] = imp.new_module('__main__') ns = dict(__name__='__main__', __file__=extra_argv[0]) exec_(script, ns) sys.exit(0) else: import code code.interact() sys.exit(0) if args.ipython: import IPython IPython.embed(user_ns={}) sys.exit(0) if args.shell: shell = os.environ.get('SHELL', 'sh') print("Spawning a Unix shell...") os.execv(shell, [shell] + extra_argv) sys.exit(1) if args.coverage: dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage') fn = os.path.join(dst_dir, 'coverage_html.js') if os.path.isdir(dst_dir) and os.path.isfile(fn): shutil.rmtree(dst_dir) extra_argv += ['--cover-html', '--cover-html-dir=' + dst_dir] test_dir = os.path.join(ROOT_DIR, 'build', 'test') if args.build_only: sys.exit(0) #elif args.submodule: #modname = PROJECT_MODULE + '.' + args.submodule #try: #__import__(modname) #test = sys.modules[modname].test #except (ImportError, KeyError, AttributeError): #print("Cannot run tests for %s" % modname) #sys.exit(2) elif args.tests: def fix_test_path(x): # fix up test path p = x.split(':') p[0] = os.path.relpath(os.path.abspath(p[0]), test_dir) return ':'.join(p) tests = [fix_test_path(x) for x in args.tests] def test(*a, **kw): extra_argv = kw.pop('extra_argv', ()) extra_argv = extra_argv + tests[1:] kw['extra_argv'] = extra_argv from numpy.testing import Tester return Tester(tests[0]).test(*a, **kw) else: __import__(PROJECT_MODULE) test = sys.modules[PROJECT_MODULE].test # Run the tests under build/test try: shutil.rmtree(test_dir) except OSError: pass try: os.makedirs(test_dir) except OSError: pass cwd = os.getcwd() try: os.chdir(test_dir) result = test(args.mode, verbose=args.verbose, extra_argv=extra_argv, doctests=args.doctests, coverage=args.coverage) finally: os.chdir(cwd) if result.wasSuccessful(): sys.exit(0) else: sys.exit(1)
def load_module(self, name): if LEGACY: imp.acquire_lock() logger.debug("LOADER=================") logger.debug("[+] Loading %s" % name) if name in sys.modules and not RELOAD: logger.info('[+] Module "%s" already loaded!' % name) if LEGACY: imp.release_lock() return sys.modules[name] if name.split('.')[-1] in sys.modules and not RELOAD: logger.info('[+] Module "%s" loaded as a top level module!' % name) if LEGACY: imp.release_lock() return sys.modules[name.split('.')[-1]] if self.is_archive: zip_name = self._mod_to_paths(name) if not zip_name in self._paths: logger.info( '[-] Requested module/package "%s" name not available in Archive file list!' % zip_name) if LEGACY: imp.release_lock() raise ImportError(zip_name) module_url = self.base_url + '%s.py' % name.replace('.', '/') package_url = self.base_url + '%s/__init__.py' % name.replace('.', '/') final_url = None final_src = None if self.is_archive: package_src = _open_archive_file(self.archive, zip_name, 'r', zip_pwd=self.__zip_pwd).read() logger.info('[+] Source from zipped file "%s" loaded!' % zip_name) final_src = package_src else: try: logger.debug("[+] Trying to import as package from: '%s'" % package_url) package_src = None if self.non_source: # Try the .pyc file package_src = self.__fetch_compiled(package_url) if package_src == None: package_src = urlopen(package_url).read() final_src = package_src final_url = package_url except IOError as e: package_src = None logger.info("[-] '%s' is not a package:" % name) if final_src == None: try: logger.debug("[+] Trying to import as module from: '%s'" % module_url) module_src = None if self.non_source: # Try the .pyc file module_src = self.__fetch_compiled(module_url) if module_src == None: # .pyc file not found, falling back to .py module_src = urlopen(module_url).read() final_src = module_src final_url = module_url except IOError as e: module_src = None logger.info("[-] '%s' is not a module:" % name) logger.warning( "[!] '%s' not found in HTTP repository. Moving to next Finder." % name) if LEGACY: imp.release_lock() return None logger.debug("[+] Importing '%s'" % name) if LEGACY: mod = imp.new_module(name) else: mod = types.ModuleType(name) mod.__loader__ = self mod.__file__ = final_url if not package_src: mod.__package__ = name else: mod.__package__ = name.split('.')[0] try: mod.__path__ = ['/'.join(mod.__file__.split('/')[:-1]) + '/'] except: mod.__path__ = self.base_url logger.debug("[+] Ready to execute '%s' code" % name) sys.modules[name] = mod exec(final_src, mod.__dict__) logger.info("[+] '%s' imported succesfully!" % name) if LEGACY: imp.release_lock() return mod
import threading import socket import time import yaml import tempfile from contextlib import contextmanager from beets.util import py3_path, bluelet from beetsplug import bpd import confuse # Mock GstPlayer so that the forked process doesn't attempt to import gi: import mock import imp gstplayer = imp.new_module("beetsplug.bpd.gstplayer") def _gstplayer_play(*_): # noqa: 42 bpd.gstplayer._GstPlayer.playing = True return mock.DEFAULT gstplayer._GstPlayer = mock.MagicMock(spec_set=[ "time", "volume", "playing", "run", "play_file", "pause", "stop",
def get_bare_module(modname, modpath): mod = imp.new_module(modname) mod.__file__ = modpath return mod
import pyxb.namespace import sys import imp import os.path te_generator = pyxb.binding.generate.Generator(allow_absent_module=True, generate_to_files=False) te_generator.setSchemaRoot( os.path.realpath('%s/../schemas' % (os.path.dirname(__file__), ))) te_generator.addSchemaLocation('test-external.xsd') # Create a module into which we'll stick the shared types bindings. # Put it into the sys modules so the import directive in subsequent # code is resolved. st = imp.new_module('st') sys.modules['st'] = st # Now get the code for the shared types bindings, and evaluate it # within the new module. st_generator = pyxb.binding.generate.Generator(allow_absent_module=True, generate_to_files=False) st_generator.setSchemaRoot( os.path.realpath('%s/../schemas' % (os.path.dirname(__file__), ))) st_generator.addSchemaLocation('shared-types.xsd') st_modules = st_generator.bindingModules() assert 1 == len(st_modules) code = st_modules.pop().moduleContents() file('st.py', 'w').write(code)
# Pull python code out of vim file _all_code = open(VIM_FILE).read() _match = re.search(r"python << endpython(.*)endpython", _all_code, flags=re.DOTALL) if not _match: raise RuntimeError('Could not find python code in file %s' % VIM_FILE) PY_CODE = _match.groups()[0] # Make something that looks like the vim module assert "vim" not in sys.modules sys.path.insert(0, THIS_DIR) import fakevim sys.modules["vim"] = fakevim # And something that looks like the vim_bridge module. This only needs to give # a null decorator. vim_bridge = imp.new_module('vim_bridge') vim_bridge.bridged = lambda x : x sys.modules['vim_bridge'] = vim_bridge exec(PY_CODE) def test_is_underline(): for char in SECTION_CHARS: for n in range(1,4): line = char * n assert_true(is_underline(line)) assert_false(is_underline('')) assert_false(is_underline('aa')) assert_false(is_underline('+++=+'))
... 'step', ... 'continue', ... ]): ... skip_module() > <doctest test.test_pdb.test_pdb_skip_modules[0]>(4)skip_module() -> string.capwords('FOO') (Pdb) step --Return-- > <doctest test.test_pdb.test_pdb_skip_modules[0]>(4)skip_module()->None -> string.capwords('FOO') (Pdb) continue """ # Module for testing skipping of module that makes a callback mod = imp.new_module('module_to_skip') exec('def foo_pony(callback): x = 1; callback(); return None', mod.__dict__) def test_pdb_skip_modules_with_callback(): """This illustrates skipping of modules that call into other code. >>> def skip_module(): ... def callback(): ... return None ... import pdb; pdb.Pdb(skip=['module_to_skip*'], nosigint=True).set_trace() ... mod.foo_pony(callback) >>> with PdbTestInput([ ... 'step', ... 'step',
def __init__(self, mod_name): self.mod_name = mod_name self.module = imp.new_module(mod_name) self._saved_module = []