def main(query, p, download_cad): """ Look up Akkadian words in the CAD and other dictionaries. query should be an akkadian word. Diacritics on consonants count. Diacritics on vowels, not so much. """ import configparser import shutil if download_cad: from akkdict.fetchcad import download download() exit() if not query: print("missing argument [query].", file=sys.stderr) exit(1) if p: print(lookup("cad", query)) exit() cfg = configparser.ConfigParser() if not cfg.read(home + "/.akkdictrc"): print("Oops! you don't have a config file yet!", "Creating ~/.akkdictrc...", file=sys.stderr) from pkg_resources import ResourceManager rm = ResourceManager() shutil.copy(rm.resource_filename("akkdict", "conf.ini"), home + "/.akkdictrc") print("Now, go edit ~/.akkdictrc for your local setup and then try", "the command again!", file=sys.stderr) exit(1) else: opendictionaries(query, cfg["dicts"], cfg["conf"]["command"])
def installed_location(filename): """Returns the full path for the given installed file or None if not found. """ try: return ResourceManager().resource_filename(Requirement.parse("websnort"), filename) except DistributionNotFound: return None
def __init__(self, allowed_template): self.allowed_template = allowed_template self.provider = get_provider(self.package_name) self.manager = ResourceManager() self.src_mock = mock.MagicMock( spec='jinja2.loaders.PackageLoader.get_source', autospec=True) self.src_mock.side_effect = self.template_source
def get_package_loader(self, package, package_path): from pkg_resources import DefaultProvider, ResourceManager, \ get_provider loadtime = datetime.utcnow() provider = get_provider(package) manager = ResourceManager() filesystem_bound = isinstance(provider, DefaultProvider) def loader(path): if path is None: return None, None path = posixpath.join(package_path, path) if not provider.has_resource(path): return None, None basename = posixpath.basename(path) if filesystem_bound: return basename, self._opener( provider.get_resource_filename(manager, path)) s = provider.get_resource_string(manager, path) return basename, lambda: ( BytesIO(s), loadtime, len(s) ) return loader
def __init__(self, package_name, package_path='templates', encoding='utf-8'): provider = get_provider(package_name) self.encoding = encoding self.manager = ResourceManager() self.filesystem_bound = isinstance(provider, DefaultProvider) self.provider = provider self.package_path = package_path
def load_resource(self, rp=None, package='stackdio'): """ Takes a relative path `rp`, and attempts to pull the full resource path using pkg_resources. """ provider = get_provider(package) if rp is None: return os.path.dirname(provider.module_path) return provider.get_resource_filename(ResourceManager(), rp)
def __init__(self, module, env=None): """Initialize the Environment object :param module: The Module that serves as the basis for this botoweb application :param env: Optional environment file that overrides any settings in our config """ self.module = module self._client_connection = None if not env: env = os.environ.get("BOTO_WEB_ENV") self.env = env # Config setup self.config = Config() self.config.env = self self.dist = get_provider(self.module) self.mgr = ResourceManager() if self.dist.has_resource("conf"): self.config.update(self.get_config("conf")) if env and os.path.exists(self.env): log.info("Loading environment: %s" % self.env) self.config.update(yaml.load(open(self.env, "r"))) # Set up the DB shortcuts if not self.config.has_key("DB"): self.config['DB'] = { "db_type": self.config.get("DB", "db_type", "SimpleDB"), "db_user": self.config.get("Credentials", "aws_access_key_id"), "db_passwd": self.config.get("Credentials", "aws_secret_access_key") } if self.config.has_key("auth_db"): self.config['DB']['User'] = {"db_name": self.config['auth_db']} if self.config.has_key("default_db"): self.config['DB']['db_name'] = self.config["default_db"] if self.config.has_key("session_db"): self.config['DB']['Session'] = { 'db_name': self.config["session_db"] } # Bootstrap importing all db_classes for XMLize if self.config['botoweb'].has_key("handlers"): for handler in self.config['botoweb']['handlers']: if handler.has_key("db_class"): try: db_class = find_class(handler['db_class']) except: log.exception("Could not load class: %s" % handler['db_class']) db_class = None if db_class: xmlize.register(db_class)
def __init__(self, package_name, package_path="templates", encoding="utf-8"): from pkg_resources import DefaultProvider from pkg_resources import get_provider from pkg_resources import ResourceManager provider = get_provider(package_name) self.encoding = encoding self.manager = ResourceManager() self.filesystem_bound = isinstance(provider, DefaultProvider) self.provider = provider self.package_path = package_path
def getTemplate(cls, fname): current_module = sys.modules[__name__] provider = get_provider(current_module.__package__) manager = ResourceManager() p = "/".join(['templates', fname]) if not provider.has_resource(p): raise Exception("Template not found: %s", fname) return provider.get_resource_string(manager, p)
def get_example_data(dataset_name): """ This is a smart package loader that locates text files inside our package :param dataset_name: :return: """ provider = get_provider('ebu_tt_live') manager = ResourceManager() source = provider.get_resource_string(manager, 'example_data/'+dataset_name) return source
def __init__(self, package_name, package_path='templates', encoding='utf-8'): from pkg_resources import DefaultProvider, ResourceManager, \ get_provider provider = get_provider(package_name) self._encoding = encoding self._manager = ResourceManager() self._filesystem_bound = isinstance(provider, DefaultProvider) self._provider = provider self._package_path = package_path
def __init__(self, packages): self.searchpath = ['templates'] try: self.fsl = loaders.FileSystemLoader(self.searchpath) except Exception as e: log.error(e) self.modules = packages self.packages = {} self.encoding = 'utf-8' self.package_path = "templates" self.manager = ResourceManager()
def __init__(self): assert isinstance(self.pathDocumentation, str), 'Invalid documentation path %s' % self.pathDocumentation assert isinstance(self.packageName, str), 'Invalid package name %s' % self.packageName assert isinstance(self.pathsTemplates, list), 'Invalid templates paths %s' % self.pathsTemplates assert isinstance(self.patternTemplate, str), 'Invalid template pattern %s' % self.patternTemplate assert isinstance(self.patternCopy, str), 'Invalid template copy %s' % self.patternCopy assert isinstance(self.packagePath, str), 'Invalid package path %s' % self.packagePath super().__init__() self._packageProvider = get_provider(self.packageName) self._manager = ResourceManager() self._rPatternTemplate = re.compile(self.patternTemplate) self._rPatternCopy = re.compile(self.patternCopy)
def get_data_file(filename): """Return full path to specified data file or None if not found. If a valid absolute path is provided it will be returned. """ if os.path.exists(filename): return filename path = os.path.join(SOURCE_PATH, filename) if os.path.exists(path): return path try: return ResourceManager().resource_filename( Requirement.parse("netsink"), filename) except DistributionNotFound: return None
def executeProcess(self, assembly): proc = assembly.create(solicit=TestSolicit) assert isinstance(proc, Processing) #use packageProvider (not os package) to access files from inside the package (like config_test.xml) packageProvider = get_provider(__name__) manager = ResourceManager() self.assertTrue(packageProvider.has_resource('config_test.xml'), 'Xml Config file missing') content = packageProvider.get_resource_stream(manager, 'config_test.xml') solicit = proc.ctx.solicit(stream=content, uri = 'file://%s' % 'config_test.xml') arg = proc.execute(FILL_ALL, solicit=solicit) assert isinstance(arg.solicit, TestSolicit) content.close() return arg
def get_package_loader(self, package, package_path): from pkg_resources import DefaultProvider, ResourceManager, get_provider loadtime = datetime.utcnow() provider = get_provider(package) manager = ResourceManager() filesystem_bound = isinstance(provider, DefaultProvider) def loader(path): path = posixpath.join(package_path, path) if path is None or not provider.has_resource(path): return (None, None) basename = posixpath.basename(path) if filesystem_bound: return (basename, self._opener(provider.get_resource_filename(manager, path))) return (basename, lambda : (provider.get_resource_stream(manager, path), loadtime, 0)) return loader
def getFilesToImport(self, ext=None, context=None): """ Return list of configuration files of given extension with full pathnames """ if ext in self.pkg_names: files = self.pkg_confs[self.pkg_names.index(ext)] if files: filenames = [] for f in files: filename = ResourceManager().resource_filename(ext, 'conf/' + f) filenames.append(filename) # if ZMSActions are included but no Provider available - create it if context is not None: if ('.metacmd.' in f) \ and ('ZMSMetacmdProvider' not in [x.meta_type fox x in context.objectValues()]) \ and ('ZMSMetacmdProviderAcquired' not in [x.meta_type fox x in context.objectValues()]): context.REQUEST.set('meta_type', 'ZMSMetacmdProvider') context.manage_customizeSystem('Add', 'Manager', context.REQUEST['lang'], context.REQUEST) return filenames
def __init__(self, app=None, prefix='/static', cache_max_age=60, route_name='static'): self.__class__.__registry__[app] = self self.app = app self.prefix = prefix self.route_name = route_name self.sources = OrderedDict() self.resource_manager = ResourceManager() #: Default ``Cache-Control: max-age`` value self.cache_max_age = cache_max_age #: Add "Access-Control-Allow-Origin: *" header? self.access_control_allow_origin = '*' if app is not None: self.init_app(app)
def __init__(self): self.pkg = {} self.pkg_names = [] self.pkg_available = [] self.pkg_hints = [] self.pkg_infos = [] self.pkg_ready = [] self.pkg_confs = [] self.pkg_installed = [] self.pkg_urls = [] for name, info in sorted(EXTENSIONS.iteritems()): self.pkg_names.append(name) self.pkg_available.append(info[0]) self.pkg_hints.append(info[1]) self.pkg_infos.append(info[3]) self.pkg_urls.append(info[2]) package = str(WorkingSet().find(Requirement.parse(name))).split() if ((name in package) and (len(package) == 2)): # TODO: **Normalize Versions** acc. to `PEP 440`: http://legacy.python.org/dev/peps/pep-0440/ # The version specified requires normalization, consider using '3.2.0.dev3' instead of '3.2.0dev3' etc. + # pip v6.0.6 does not append svn revision specified in `setup.cfg` as v1.5.6 before # => `zms.zms_version()` have to be adjusted too... self.pkg_installed.append(package[1].replace('.dev', 'dev').replace('dev0', 'dev')) self.pkg_ready.append(True) try: confres = ResourceManager().resource_listdir(name, 'conf') except: confres = None if confres: confxml = [ob for ob in confres if ob.endswith('.xml') or ob.endswith('.zip')] if len(confxml) > 0: self.pkg_confs.append(confxml) else: self.pkg_confs.append(None) else: self.pkg_confs.append(None) else: self.pkg_installed.append(None) self.pkg_confs.append(None) self.pkg_ready.append(False)
def build(output_basename, input_modules, selfinclude=False, separator="."): """ Creates a build of javascript files """ distribution = get_distribution('modularjs') with open('%s.js' % output_basename, 'w') as output: if selfinclude: output.write('var __build__ = true;\n'); include('include', output) for input_module in input_modules: include(input_module, output) if selfinclude: output.write('\nmodularjs.init();'); modularjslogger.info('Wrote %s.js' % output_basename) with open('%s%scompressed.js' % (output_basename, separator), 'w') as output: yui = os.path.join('lib', 'yuicompressor-2.4.2.jar') jar = distribution.get_resource_filename(ResourceManager(), yui) p = subprocess.Popen(['java', '-jar', jar, '%s.js' % output_basename], stdout=output) p.wait() modularjslogger.info('Wrote %s.compressed.js' % output_basename)
def get_htdocs_dirs(self): return [('querystatushelper', ResourceManager().resource_filename(__name__, 'htdocs'))]
import re import os import tw2.core as twc from pkg_resources import ResourceManager rm = ResourceManager() tinymce_dir = twc.DirLink(modname=__name__, filename="static/tiny_mce") #tinymce_js = twc.JSLink(modname=__name__, filename='static/tinymce.js') #tinymce_css = twc.CSSLink(modname=__name__, filename='static/tinymce.css') tinymce_js = twc.JSLink(modname=__name__, filename='static/tiny_mce/tiny_mce_src.js', init=twc.js_function('tinyMCE.init')) def _get_available_languages(): filename_re = re.compile(r'(\w+)\.js') langs = [] locale_dir = rm.resource_filename(__name__, "static/tiny_mce/langs") for filename in os.listdir(locale_dir): match = filename_re.match(filename) if match: langs.append(match.groups(0)[0]) return langs from formencode.validators import UnicodeString, Validator from genshi.core import Markup, stripentities
def get_contact_distance_map(structure_directory=INFO_DIRECTORY, westhof_vector=None, force_rebuild=False): """ Returns contact distance map The contact distance map is cached it in the user directory and updated when newer files are found. :param structure_directory: directory to look up structure information text files :param westhof_vector: list of factors to apply different weights to the bonding family classes (defaults to ``[1, 1, ... ]``) :param force_rebuild: force rebuilding the distance map """ # default: same weight for all families if not westhof_vector: westhof_vector = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] nucleotides = ["A", "U", "G", "C"] # build a dict of filenames # if a local file is present in the user directory it will take precedence over the system wide shipped version structure_filenames = {} resource_manager = ResourceManager() for nt1 in nucleotides: for nt2 in nucleotides: ntpair = "%s-%s" % (nt1, nt2) local_file = structure_directory + os.sep + ntpair + ".txt" if os.path.isfile(local_file): structure_filenames[ntpair] = local_file else: structure_filenames[ntpair] = resource_manager.resource_filename(__name__, "structure_info/%s.txt" % ntpair) # try to use a cached version of the distance map if found and recent and force_rebuild is False if not force_rebuild: try: cache_ok = True if os.path.isfile(CACHE_DISTANCEMAP): cache_timestamp = os.path.getmtime(CACHE_DISTANCEMAP) for d in structure_filenames.itervalues(): if os.path.getmtime(d) > cache_timestamp: cache_ok = False print "Contact map cache out of date. Rebuilding..." break if cache_ok: with open(CACHE_DISTANCEMAP, "r") as f: return pickle.load(f) except (IOError, pickle.PickleError, AttributeError, EOFError, IndexError): print "Contact map cache broken. Rebuilding..." print "Building contact distance map:" pdb_structure_dict = {} distance_map = {} for nt1 in nucleotides: for nt2 in nucleotides: distance_map_res_pair = {} pdb_codes = [] residues = [] # read the structures for the 12 edge-to-edge bonding families for line in utils.read_file_line_by_line(structure_filenames[nt1 + '-' + nt2]): fields = line.split(" ") if fields[0] != "-": pdb_codes.append(fields[0].upper()) residues.append((int(fields[1]), int(fields[2]))) else: pdb_codes.append(None) residues.append(None) # loop over all pdbcodes and their index in the list (0-11) for index, pdb_code in enumerate(pdb_codes): # skip if we don't have any entry for this family if pdb_code is None: continue # download pdb if necessary if pdb_code not in pdb_structure_dict: pdb_structure_dict[pdb_code] = pdbtools.parse_pdb(pdb_code, pdbtools.get_pdb_by_code(pdb_code)) # extract model from pdb model = pdb_structure_dict[pdb_code][0] # try to find the residue contact specified. this is done by looping over all chains in the model, # and checking if the residue is in there and is the correct nucleotide def find_res(res, resname): for chain in model: try: if chain[res].get_resname().strip() == resname: return chain[res] except KeyError: pass return None res1 = find_res(residues[index][0], nt1) res2 = find_res(residues[index][1], nt2) if not res1 or not res2: raise Exception("Could not find residue contact in pdb file: %s-%s %s %s %s" % (nt1, nt2, pdb_code, residues[index][0], residues[index][1])) print "%s-%s %s %s %s" % (nt1, nt2, pdb_code, residues[index][0], residues[index][1]) # add all atom-atom contacts to the distance map for the current residue pair for atom1 in res1: for atom2 in res2: if not (atom1.name.startswith('H') or atom2.name.startswith('H')): contact_key = str(atom1.name) + '-' + str(atom2.name) distance = westhof_vector[index] * (atom1 - atom2) if contact_key not in distance_map_res_pair: distance_map_res_pair[contact_key] = [distance] else: distance_map_res_pair[contact_key].append(distance) distance_map[nt1 + nt2] = distance_map_res_pair # save distance map in cache utils.mkdir_p(CACHE_DIRECTORY) with open(CACHE_DISTANCEMAP, "w") as f: pickle.dump(distance_map, f) return distance_map
def get_htdocs_dirs(self): return [('ldrize', ResourceManager().resource_filename(__name__, 'htdocs'))]
import zope.interface from connection import XMLAConnection import olap.xmla.interfaces as oxi import olap.interfaces as ooi from pkg_resources import ResourceManager rm = ResourceManager() defaultwsdl = "file://"+rm.resource_filename(__name__, "vs.wsdl") class TREE_OP(object): CHILDREN = 0x01 SIBLINGS = 0x02 PARENT = 0x04 SELF = 0x08 DESCENDANTS = 0x10 ANCESTORS = 0x20 class XMLAProvider(object): zope.interface.implements(ooi.IProvider) def connect(self, url=defaultwsdl, location=None, username=None, password=None, spn=None, sslverify=True, **kwargs): return XMLASource(url, location, username, password, spn, sslverify, **kwargs) class XMLAClass(object): zope.interface.implements(ooi.IOLAPSchemaElement)
def get_htdocs_dirs(self): return [('contextchrome', ResourceManager().resource_filename(__name__, 'htdocs'))]
def __init__(self, package_name, package_path): super().__init__() from pkg_resources import ResourceManager, get_provider self.provider = get_provider(package_name) self.manager = ResourceManager() self.package_path = package_path
from datetime import datetime from genshi.builder import tag from genshi.filters.transform import Transformer from pkg_resources import ResourceManager from trac.cache import cached from trac.config import ListOption from trac.core import Component, implements from trac.ticket.api import ITicketManipulator, TicketSystem from trac.ticket.model import Ticket from trac.util.datefmt import format_datetime, from_utimestamp, to_timestamp, \ format_date, format_time from trac.web.api import ITemplateStreamFilter, IRequestFilter from trac.web.chrome import add_script, ITemplateProvider, add_stylesheet import re is_trac_ja = ResourceManager().resource_exists('trac.wiki', 'default-pages/TracJa') # Is patch need or not, for trac-ja from interact # https://twitter.com/#!/jun66j5/status/180856879155658753 by @jun66j5; # "まともな方法がなくて、前にやったのは trac/wiki/default-pages/TracJa # があるかどうかを pkg_resources.resource_filename で調べてました" class EpochField(Component): implements(ITemplateStreamFilter, ITemplateProvider, IRequestFilter, ITicketManipulator) date_columns = ListOption('epochfield', 'date_columns', '.*_date', doc=""" field-names you want to translate from epoch to date-string in regular-expressions."""
import zope.interface from connection import XMLAConnection import olap.xmla.interfaces as oxi import olap.interfaces as ooi from pkg_resources import ResourceManager rm = ResourceManager() defaultwsdl = "file://" + rm.resource_filename(__name__, "vs.wsdl") class TREE_OP(object): CHILDREN = 0x01 SIBLINGS = 0x02 PARENT = 0x04 SELF = 0x08 DESCENDANTS = 0x10 ANCESTORS = 0x20 class XMLAProvider(object): zope.interface.implements(ooi.IProvider) def connect(self, url=defaultwsdl, location=None, username=None, password=None, spn=None, sslverify=True): return XMLASource(url, location, username, password, spn, sslverify)
def get_htdocs_dirs(self): return [('statushistorychart', ResourceManager().resource_filename(__name__, 'htdocs'))]
'''Conway's Game of Life Canned patterns. ''' from pkg_resources import ResourceManager Patterns = {} _EXT = '.life' _rm = ResourceManager() _pkg = 'GameOfLife.patterns.data' for fname in [f for f in _rm.resource_listdir(_pkg, '.') if f.endswith(_EXT)]: with _rm.resource_stream(_pkg, fname) as f: Patterns.setdefault(fname.split('.')[0].lower(), f.read().decode('utf-8')) class Pattern(object): def __init__(self, name, data=None): pass
'''Unit tests for file chooser ''' from guitest.gtktest import GtkTestCase from pkg_resources import ResourceManager from path import path from baudot.widget import FileFolderChooser SAMPLES = path(ResourceManager().resource_filename(__package__, "samples")) class FileFolderChooserTest(GtkTestCase): '''Unit tests for FileFolderChooser class ''' def test_init(self): '''Test __init__ method ''' ffc = FileFolderChooser() self.assertIsNotNone(ffc) def test_selection(self): '''Test file selection scenarios ''' ffc = FileFolderChooser() self.assertFalse(ffc.get_filenames()) ffc.chooser.set_current_folder(SAMPLES / "dir1") empty_dir = SAMPLES / "dir1" / "empty" self.assertTrue(ffc.chooser.select_filename(empty_dir))
def get_htdocs_dirs(self): return [('epochfield', ResourceManager().resource_filename(__name__, 'htdocs'))]
def setUp(self): self.cmd = ConvertCommand(None, None, None) self.samples = path(ResourceManager().resource_filename( __package__, "samples"))