def regen_if_needed(src, out_path): module = load_module(src) cur_time = max([int(os.stat(module.__file__).st_mtime), int(os.stat(__file__).st_mtime)]) try: trg_time = int(os.stat(out_path).st_mtime) except EnvironmentError, e: if e.errno != errno.ENOENT: raise trg_time = -1
def regen_if_needed(cls, base_path, src, out_name=None, force=False): if out_name is None: out_name = src.rsplit(".", 1)[-1] out_path = pjoin(base_path, '%s.rst' % (out_name,)) script_time = int(os.stat(__file__).st_mtime) module = load_module(src) cur_time = int(os.stat(module.__file__).st_mtime) cur_time = max([cur_time, script_time]) try: trg_time = int(os.stat(out_path).st_mtime) except EnvironmentError, e: if e.errno != errno.ENOENT: raise trg_time = -1
def test_it(self): dname, bname = self.ext_namespace.rsplit(".", 1) dir_mod = modules.load_module(dname) fp = os.path.join(os.path.dirname(dir_mod.__file__), '%s.so' % (bname,)) if not os.path.exists(fp): raise SkipTest("for extension %r, path %r doesn't exist" % (self.ext_namespace, fp)) extension = modules.load_module(self.ext_namespace) if self.trg_attribute is None: return target_scope = modules.load_module(self.namespace) ext_obj = extension ext_full_name = self.ext_namespace if self.src_attribute is not None: ext_obj = getattr(ext_obj, self.src_attribute) ext_full_name += '.%s' % (self.src_attribute,) trg_obj = getattr(target_scope, self.trg_attribute) exp_msg = ("expected to find object from %r at '%s.%s', but " "what's there isn't from the extension" % (ext_full_name, self.namespace, self.trg_attribute)) self.assertIdentical(ext_obj, trg_obj, exp_msg)
def test_load_module(self): # import an already-imported module self.assertIdentical( modules.load_module('snakeoil.modules'), modules) # and a system one, just for kicks self.assertIdentical(modules.load_module('sys'), sys) # non-existing module from an existing package self.assertRaises( modules.FailedImport, modules.load_module, 'snakeoil.__not_there') # (hopefully :) non-existing top-level module/package self.assertRaises( modules.FailedImport, modules.load_module, '__not_there') # "Unable to import" # pylint: disable=F0401 # unimported toplevel module modtest1 = modules.load_module('mod_test1') import mod_test1 self.assertIdentical(mod_test1, modtest1) # unimported in-package module packtest2 = modules.load_module('mod_testpack.mod_test2') from mod_testpack import mod_test2 self.assertIdentical(mod_test2, packtest2)
def python_namespace_type(value, module=False, attribute=False): """ return the object from python namespace that value specifies :param value: python namespace, snakeoil.modules for example :param module: if true, the object must be a module :param attribute: if true, the object must be a non-module :raises ValueError: if the conditions aren't met, or import fails """ try: if module: return modules.load_module(value) elif attribute: return modules.load_attribute(value) return modules.load_any(value) except modules.FailedImport, err: compatibility.raise_from(argparse.ArgumentTypeError(str(err)))
def regen_if_needed(src, out_path): module = load_module(src) cur_time = max([int(os.stat(module.__file__).st_mtime), int(os.stat(__file__).st_mtime)]) try: trg_time = int(os.stat(out_path).st_mtime) except EnvironmentError as e: if e.errno != errno.ENOENT: raise trg_time = -1 if cur_time != trg_time: sys.stdout.write("regenerating rst for %s\n" % (src,)) with open(out_path, "w") as f: generate_rst(src, module, f) os.chmod(out_path, 0644) os.utime(out_path, (cur_time, cur_time))
def regen_if_needed(cls, base_path, src, out_name=None, force=False): if out_name is None: out_name = src.rsplit(".", 1)[-1] out_path = pjoin(base_path, '%s.rst' % (out_name,)) script_time = int(os.stat(__file__).st_mtime) module = load_module(src) cur_time = int(os.stat(module.__file__).st_mtime) cur_time = max([cur_time, script_time]) try: trg_time = int(os.stat(out_path).st_mtime) except EnvironmentError as e: if e.errno != errno.ENOENT: raise trg_time = -1 if cur_time != trg_time or force: cls(base_path, out_name, module.argparser, mtime=cur_time).run()
def test_load_module(self): # import an already-imported module assert modules.load_module('snakeoil.modules') is modules # and a system one, just for kicks assert modules.load_module('sys') is sys # non-existing module from an existing package with pytest.raises(modules.FailedImport): modules.load_module('snakeoil.__not_there') # (hopefully :) non-existing top-level module/package with pytest.raises(modules.FailedImport): modules.load_module('__not_there') # "Unable to import" # pylint: disable=F0401 # unimported toplevel module modtest1 = modules.load_module('mod_test1') import mod_test1 assert mod_test1 is modtest1 # unimported in-package module packtest2 = modules.load_module('mod_testpack.mod_test2') from mod_testpack import mod_test2 assert mod_test2 is packtest2
def module(self): return modules.load_module('snakeoil.compression._%s' % (self.name,))
def module(self): return modules.load_module('snakeoil.compression._%s' % (self.name, ))
def initialize_cache(package, force=False): """Determine available plugins in a package. Writes cache files if they are stale and writing is possible. """ # package plugin cache, see above. package_cache = collections.defaultdict(set) seen_modnames = set() for path in package.__path__: # Check if the path actually exists first. try: modlist = listdir_files(path) except OSError as e: if e.errno not in (errno.ENOENT, errno.ENOTDIR): raise continue stored_cache_name = pjoin(path, CACHE_FILENAME) stored_cache = _read_cache_file(package, stored_cache_name) if force: _clean_old_caches(path) # Directory cache, mapping modulename to # (mtime, set([keys])) modlist = set( x for x in modlist if os.path.splitext(x)[1] == '.py' and x != '__init__.py') modlist.difference_update(seen_modnames) cache_stale = False # Hunt for modules. actual_cache = collections.defaultdict(set) mtime_cache = mappings.defaultdictkey( lambda x: int(os.path.getmtime(x))) for modfullname in sorted(modlist): modname = os.path.splitext(modfullname)[0] # It is an actual module. Check if its cache entry is valid. mtime = mtime_cache[pjoin(path, modfullname)] vals = stored_cache.get((modname, mtime)) if vals is None or force: # Cache entry is stale. logger.debug('stale because of %s: actual %s != stored %s', modname, mtime, stored_cache.get(modname, (0, ()))[0]) cache_stale = True entries = [] qualname = '.'.join((package.__name__, modname)) try: module = modules.load_module(qualname) except modules.FailedImport: # This is a serious problem, but if we blow up # here we cripple pkgcore entirely which may make # fixing the problem impossible. So be noisy but # try to continue. logger.exception( 'plugin import failed for %s processing %s', package.__name__, modname) continue registry = getattr(module, PLUGIN_ATTR, {}) vals = set() for key, plugs in registry.iteritems(): for idx, plug_name in enumerate(plugs): if isinstance(plug_name, basestring): plug = _process_plugin( package, _plugin_data(key, 0, qualname, plug_name)) else: plug = plug_name if plug is None: # import failure, ignore it, error already logged continue priority = getattr(plug, 'priority', 0) if not isinstance(priority, int): logger.error( "ignoring plugin %s: has a non integer priority: %s", plug, priority) continue if plug_name is plug: # this means it's an object, rather than a string; store # the offset. plug_name = idx data = _plugin_data(key, priority, qualname, plug_name) vals.add(data) actual_cache[(modname, mtime)] = vals seen_modnames.add(modfullname) for data in vals: package_cache[data.key].add(data) if force or set(stored_cache) != set(actual_cache): logger.debug('updating cache %r for new plugins', stored_cache_name) _write_cache_file(stored_cache_name, actual_cache) return mappings.ImmutableDict( (k, sort_plugs(v)) for k, v in package_cache.iteritems())
def initialize_cache(package, force=False): """Determine available plugins in a package. Writes cache files if they are stale and writing is possible. """ # package plugin cache, see above. package_cache = collections.defaultdict(set) seen_modnames = set() for path in package.__path__: # Check if the path actually exists first. try: modlist = listdir_files(path) except OSError, e: if e.errno not in (errno.ENOENT, errno.ENOTDIR): raise continue stored_cache_name = pjoin(path, CACHE_FILENAME) stored_cache = _read_cache_file(package, stored_cache_name) if force: _clean_old_caches(path) # Directory cache, mapping modulename to # (mtime, set([keys])) modlist = set(x for x in modlist if os.path.splitext(x)[1] == '.py' and x != '__init__.py') modlist.difference_update(seen_modnames) cache_stale = False # Hunt for modules. actual_cache = collections.defaultdict(set) mtime_cache = mappings.defaultdictkey(lambda x:int(os.path.getmtime(x))) for modfullname in sorted(modlist): modname = os.path.splitext(modfullname)[0] # It is an actual module. Check if its cache entry is valid. mtime = mtime_cache[pjoin(path, modfullname)] vals = stored_cache.get((modname, mtime)) if vals is None or force: # Cache entry is stale. logger.debug( 'stale because of %s: actual %s != stored %s', modname, mtime, stored_cache.get(modname, (0, ()))[0]) cache_stale = True entries = [] qualname = '.'.join((package.__name__, modname)) try: module = modules.load_module(qualname) except modules.FailedImport: # This is a serious problem, but if we blow up # here we cripple pkgcore entirely which may make # fixing the problem impossible. So be noisy but # try to continue. logger.exception('plugin import failed for %s processing %s', package.__name__, modname) continue registry = getattr(module, PLUGIN_ATTR, {}) vals = set() for key, plugs in registry.iteritems(): for idx, plug_name in enumerate(plugs): if isinstance(plug_name, basestring): plug = _process_plugin(package, _plugin_data(key, 0, qualname, plug_name)) else: plug = plug_name if plug is None: # import failure, ignore it, error already logged continue priority = getattr(plug, 'priority', 0) if not isinstance(priority, int): logger.error("ignoring plugin %s: has a non integer priority: %s", plug, priority) continue if plug_name is plug: # this means it's an object, rather than a string; store # the offset. plug_name = idx data = _plugin_data(key, priority, qualname, plug_name) vals.add(data) actual_cache[(modname,mtime)] = vals seen_modnames.add(modfullname) for data in vals: package_cache[data.key].add(data) if force or set(stored_cache) != set(actual_cache): logger.debug('updating cache %r for new plugins', stored_cache_name) _write_cache_file(stored_cache_name, actual_cache)