def GetJob(race): ''' need 5 tables of careers, one for each race there are Careers which are grouped in sets called Classes each races has a dictionary in that dic the career is the KEY and the VALUE is a list of numbers corresponding to the Career ex: human_careers = {..., 'Nun': [4, 5], ...} Then you can look up the Class in the Class dic ex: human_classes = {'Academic': [..., 'Nun',...], ...} @ params: race @ returns: class, and career ''' file_manager = ExitStack() atexit.register(file_manager.close) ref_career = importlib_resources.files('Pickles') / 'career_table.pickle' path_career = file_manager.enter_context( importlib_resources.as_file(ref_career)) ref_class = importlib_resources.files('Pickles') / 'classes_table.pickle' path_class = file_manager.enter_context( importlib_resources.as_file(ref_class)) master_map = OpenPickle(path_career) career_class = OpenPickle(path_class) # TIME TO ROLL roll = D(100) my_possible_careers = master_map[race] for career, nums in my_possible_careers.items(): if roll in nums: return career_class[career], career return 0
def _load_face_detecting_neural_net(): """Loads a neural net that detects faces """ logging.info("loading serialized face detector model...") prototxt_source = files(face_detector_model).joinpath("deploy.prototxt") weights_source = files(face_detector_model).joinpath( "res10_300x300_ssd_iter_140000.caffemodel") with as_file(prototxt_source) as prototxt_path, as_file( weights_source) as weights_path: neural_net = cv2.dnn.readNet(str(prototxt_path), str(weights_path)) return neural_net
def load_synthdefs(self, synthdefs_path: Optional[str] = None) -> None: """Load SynthDef files from path. Parameters ---------- synthdefs_path : str, optional Path where the SynthDef files are located. If no path provided, load default sc3nb SynthDefs. """ def _load_synthdef(path): self.cmds( r""" "sc3nb - Loading SynthDefs from ^synthdef_path".postln; PathName.new(^synthdefs_path).files.collect( { |path| (path.extension == "scsyndef").if({SynthDescLib.global.read(path); path;})} );""", pyvars={"synthdefs_path": path.as_posix()}, ) if synthdefs_path is None: ref = libresources.files(sc3nb.resources) / "synthdefs" with libresources.as_file(ref) as path: _load_synthdef(path) else: path = Path(synthdefs_path) if path.exists() and path.is_dir(): _load_synthdef(path) else: raise ValueError( f"Provided path {path} does not exist or is not a dir")
def GetPhysicalFeatures(race): ''' Randomly selects age, height, eye color, hair color based on tables in rule book @ params: race @ returns: (age, height, eye color, hair color) ''' file_manager = ExitStack() atexit.register(file_manager.close) ref_eye = importlib_resources.files('Pickles') / 'eye_table.pickle' path_eye = file_manager.enter_context(importlib_resources.as_file(ref_eye)) eye_table = OpenPickle(path_eye) eye_roll = D(10) + D(10) eye_color = '' for color, nums in eye_table[race].items(): if eye_roll in nums: eye_color = color ref_hair = importlib_resources.files('Pickles') / 'hair_table.pickle' path_hair = file_manager.enter_context( importlib_resources.as_file(ref_hair)) hair_table = OpenPickle(path_hair) hair_roll = D(10) + D(10) hair_color = '' for color, nums in hair_table[race].items(): if hair_roll in nums: hair_color = color age = 0 height = 0 if race == 'Human': age = 15 + D(10) height = 2.54 * (4 * 12 + 9 + sum([D(10) for i in range(0, 2)])) elif race == "Dwarf": age = 15 + sum([D(10) for i in range(0, 10)]) height = 2.54 * (4 * 12 + 3 + D(10)) elif race == "Halfling": age = 15 + sum([D(10) for i in range(0, 5)]) height = 2.54 * (3 * 12 + 1 + D(10)) else: age = 30 + sum([D(10) for i in range(0, 10)]) height = 2.54 * (5 * 12 + 11 + D(10)) return (age, height, eye_color, hair_color)
def test_entered_path_does_not_keep_open(self): # This is what certifi does on import to make its bundle # available for the process duration. c = resources.as_file( resources.files('ziptestdata') / 'binary.file' ).__enter__() self.zip_path.unlink() del c
def resolve_file(mgr, filename): """ Given a file manager (ExitStack), load the filename and set the exit stack to clean up. See https://importlib-resources.readthedocs.io/en/latest/migration.html#pkg-resources-resource-filename for more details. """ path = resources.files('pmxbot.web.templates') / filename return str(mgr.enter_context(resources.as_file(path)))
def test_natural_path(self): """ Guarantee the internal implementation detail that file-system-backed resources do not get the tempdir treatment. """ target = resources.files(self.data) / 'utf-8.file' with resources.as_file(target) as path: assert 'data' in str(path)
def data_path(filename, raise_missing=True): """Return the absolute filepath for a given filename in test data""" ref = importlib_resources.files("km3net_testdata.data") / filename file_manager = ExitStack() atexit.register(file_manager.close) file_path = file_manager.enter_context(as_file(ref)) if raise_missing and not file_path.exists(): raise RuntimeError("Unknown or missing file: {0}".format(filename)) return str(file_path)
def test_reading(self): # Path should be readable. # Test also implicitly verifies the returned object is a pathlib.Path # instance. target = resources.files(self.data) / 'utf-8.file' with resources.as_file(target) as path: self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) # pathlib.Path.read_text() was introduced in Python 3.5. with path.open('r', encoding='utf-8') as file: text = file.read() self.assertEqual('Hello, UTF-8 world!\n', text)
def main(specification: str = 'FIX42.xml', to_dir: str = './src', base_module: str = 'pelicanfix.protocol.fix42', protocol: str = 'FIX42'): source = files('pelicanfix.dictionary').joinpath(specification) with as_file(source) as dict_file: parser = FIXDictionaryParser(dict_file) fix_dict = parser.parse() code_generator = ProtocolCodeGenerator(fix_dict, Path(to_dir), base_module, protocol) code_generator.generate()
def data_path(filename, raise_missing=True): if remote_files.is_known_remote(filename): return remote_files.remote_file(filename, raise_missing=raise_missing) ref = importlib_resources.files("skhep_testdata.data") / filename file_manager = ExitStack() atexit.register(file_manager.close) file_path = file_manager.enter_context(as_file(ref)) if raise_missing and not file_path.exists(): raise RuntimeError("Unknown or missing file: {0}".format(filename)) return str(file_path)
def load_face_mask_detector_model(): """Loads the face mask detector model """ face_mask_detector_model_source = files( face_mask_detector.face_mask_detector_model).joinpath( "face_mask_detector.model") with as_file( face_mask_detector_model_source) as face_mask_detector_model_path: face_mask_detector_model = load_model(face_mask_detector_model_path) return face_mask_detector_model
def metrics_process(registry, queue): # pragma: no cover session = db.get_session(registry.settings) with importlib_resources.as_file(NEW_RELIC_CONFIG_REF) as config_file: newrelic.agent.initialize(config_file=config_file) newrelic.agent.register_application(timeout=5) application = newrelic.agent.application() while True: with db.read_only_transaction(session): application.record_custom_metrics(websocket_metrics(queue)) gevent.sleep(METRICS_INTERVAL)
def test_parse_fix50sp1(): source = files('pelicanfix.dictionary').joinpath('FIX50SP1.xml') with as_file(source) as dict_file: parser = FIXDictionaryParser(dict_file) fix_dict = parser.parse() assert fix_dict.major == 5 assert fix_dict.minor == 0 assert fix_dict.service_pack == 1 msg = fix_dict.get_message('NewOrderSingle') assert msg.get_msg_type() == 'D' assert msg.get_msg_category() == 'app' assert len(msg.get_elements()) == 92
def GetRaceTalents(race): ''' Each race gets each of the talents listed (and with an 'or' pick one) Additionally, any random talents are rolled for you. @ params: race @ returns: list of race talents ''' race_talents = { "Human": ['Doomed', 'Savvy or Suave', '3 Random Talents'], "Dwarf": [ 'Magic Resistance', 'Night Vision', 'Read/Write or Relentless', 'Resolute or Strong-minded', 'Sturdy' ], "Halfling": [ 'Acute Sense (Taste)', 'Night Vision', 'Resistance (Chaos)', 'Small', '2 Random Talents' ], "High Elf": [ 'Acute Sense (Sight)', 'Coolheaded or Savvy', 'Night Vision', 'Second Sight or Sixth Sense', 'Read/Write' ], "Wood Elf": [ 'Acute Sense (Sight)', 'Hardy or Second Sight', 'Night Vision', 'Read/Write or Very Resilient', 'Rover' ] } my_talents = race_talents[race] is_rand = my_talents[-1].split() # check for random talents if is_rand[0] in ('2', '3'): my_talents = my_talents[:-1] num_rand_talents = int(is_rand[0]) file_manager = ExitStack() atexit.register(file_manager.close) ref = importlib_resources.files('Pickles') / 'RandTalent_table.pickle' path_rand_tal = file_manager.enter_context( importlib_resources.as_file(ref)) rand_talent_table = OpenPickle(path_rand_tal) i = 0 while i < num_rand_talents: roll = D(100) rolled_talent = '' for talent, nums in rand_talent_table.items(): if roll in nums: rolled_talent = talent if rolled_talent in my_talents: continue my_talents.append(rolled_talent) i += 1 return my_talents
def apply_style(app, color: str, source: Path): with as_file(source.joinpath(color)) as clr: tmp = clr.read_text() colors = tmp.split('\n') with as_file(source.joinpath(colors[0])) as thema: qss = thema.read_text() dd = [] for line in colors[1:-1]: dd.append(line.split('~')[:2]) dd.sort(key=lambda x: x[1], reverse=True) for d in dd: b, a = d if a[0] == '@': # path to resouces: icons, etc, may be several paths b = source.joinpath(b).as_posix() qss = qss.replace(a.strip(), b) """ save translated qss """ save_qss("out-qss.log", qss) app.setStyleSheet(qss)
def copy(res: Traversable, dest_dir: str) -> None: if res.name.startswith("__"): return elif res.is_file(): with resources.as_file(res) as file: dest_file = f"{dest_dir}{res.name}" if os.path.exists(dest_file): print(f"Skipping {dest_file} (already exists)") else: print(f"Writing {dest_file}") shutil.copy(file, dest_dir) elif res.is_dir(): subdir = f"{dest_dir}{res.name}/" os.makedirs(subdir, exist_ok=True) for r in res.iterdir(): copy(r, subdir)
def __init__(self): # Try to use package resources to locate the "gmx" binary wrapper. try: from importlib.resources import open_text with open_text('gmxapi', 'gmxconfig.json') as textfile: config = json.load(textfile) gmxbindir = config.get('gmx_bindir', None) command = config.get('gmx_executable', None) except ImportError: try: # A backport of importlib.resources is available as importlib_resources # with a somewhat different interface. from importlib_resources import files, as_file source = files('gmxapi').joinpath('gmxconfig.json') with as_file(source) as gmxconfig: with open(gmxconfig, 'r') as fp: config = json.load(fp) gmxbindir = config.get('gmx_bindir', None) command = config.get('gmx_executable', None) except ImportError: gmxbindir = None command = None # TODO: Remove fall-back when we can rely on gmxconfig.json via importlib.resources in Py 3.7+. allowed_command_names = ['gmx', 'gmx_mpi'] for command_name in allowed_command_names: if command is not None: break command = shutil.which(command_name) if command is None: gmxbindir = os.getenv('GMXBIN') if gmxbindir is None: gromacsdir = os.getenv('GROMACS_DIR') if gromacsdir is not None and gromacsdir != '': gmxbindir = os.path.join(gromacsdir, 'bin') if gmxbindir is None: gmxapidir = os.getenv('gmxapi_DIR') if gmxapidir is not None and gmxapidir != '': gmxbindir = os.path.join(gmxapidir, 'bin') if gmxbindir is not None: gmxbindir = os.path.abspath(gmxbindir) command = shutil.which(command_name, path=gmxbindir) self._command = command self._bindir = gmxbindir
def load_dir( cls, synthdef_dir: Optional[str] = None, completion_msg: Optional[bytes] = None, server: Optional["SCServer"] = None, ): """Load all SynthDefs from directory. Parameters ---------- synthdef_dir : str, optional directory with SynthDefs, by default sc3nb default SynthDefs completion_msg : bytes, optional Message to be executed by the server when loaded, by default None server : SCServer, optional Server that gets the SynthDefs, by default use the SC default server """ if server is None: server = sc3nb.SC.get_default().server def _load_synthdefs(path): cmd_args: List[Union[str, bytes]] = [path.as_posix()] if completion_msg is not None: cmd_args.append(completion_msg) server.msg( SynthDefinitionCommand.LOAD_DIR, cmd_args, await_reply=True, bundle=True, ) if synthdef_dir is None: ref = libresources.files(sc3nb.resources) / "synthdefs" with libresources.as_file(ref) as path: _load_synthdefs(path) else: path = Path(synthdef_dir) if path.exists() and path.is_dir(): _load_synthdefs(path) else: raise ValueError(f"Provided path {path} does not exist or is not a dir")
def test_remove_in_context_manager(self): # It is not an error if the file that was temporarily stashed on the # file system is removed inside the `with` stanza. target = resources.files(self.data) / 'utf-8.file' with resources.as_file(target) as path: path.unlink()
def copy_to(self, dst_path): with pkg_resources.as_file(self.path) as whl_path: shutil.copyfile(str(whl_path), str(dst_path))
def load_np_file(file_name): source = files(lmdec.examples).joinpath(file_name) with as_file(source) as file: return np.load(file)
#!/usr/bin/python3 from pathlib import Path from importlib_resources import as_file, files from ruamel.yaml import YAML yaml = YAML(typ="safe") yaml.default_flow_style = False build_context = Path(__file__).resolve().parent.parent with as_file( files("dandi") / "tests" / "data" / "dandiarchive-docker" / "docker-compose.yml" ) as path: with path.open() as fp: compose = yaml.load(fp) del compose["services"]["redirector"]["image"] compose["services"]["redirector"]["build"] = str(build_context) with path.open("w") as fp: yaml.dump(compose, fp)
def generate_car(): #This code sets up the parser for command line arguments specifying parameters for generating your new car. parser = argparse.ArgumentParser() parser.add_argument( '--config', action='store', default='config.toml', help='specify a car configuration or use the default config.toml') parser.add_argument( '--output_dir', action='store', default='', help= 'specifies an output directory or use the default directoy of the car name' ) parser.add_argument('--name', action='store', default='', help='specify a car name or use a random name') args = parser.parse_args() print(args) # load car configuration into memory #eml = files('email.tests.data').joinpath('message.eml').read_text() toml_str = files('cargenerator').joinpath(args.config) with as_file(toml_str) as toml_file: car = toml.load(toml_file, _dict=dict) #check args and apply if args.name != '': NAME = args.name else: NAME = car["Name"] print("CarName: ", NAME) if args.config != '': CONFIG = args.config else: CONFIG = "config.toml" print("Config File: ", CONFIG) if args.output_dir != '': OUTPUT_DIR = args.output_dir + NAME else: OUTPUT_DIR = NAME print("OUTPUT_DIR: ", OUTPUT_DIR) # create the directory and copy necessary files print(car["computer"]["kind"]) print(car["arduino"]["kind"]) print(car["camera"]["kind"]) default_template = files('cargenerator').joinpath( car["src"]["default_car_dir"]) copytree(default_template, OUTPUT_DIR) # get car Runner # cp ../cars/templatecar/serices.* OUTPUT_DIR/python/ # copytree("../cars/templatecar/services/*", OUTPUT_DIR+"/services", ignore=["services"]) for file in glob.glob(car["src"]["default_car_dir"] + " /services/*"): print(f"source: {file}") print(f"file: {os.path.basename(file)}") print(f"dest: {OUTPUT_DIR}/services/") copyfile(file, OUTPUT_DIR + "/services/" + os.path.basename(file)) outfile = open(OUTPUT_DIR + "/" + CONFIG, 'w') toml.dump(car, outfile) outfile.close()
def as_file(self): return pkg_resources.as_file(self.path)
def execute(self, package, path): with resources.as_file(resources.files(package).joinpath(path)): pass
from importlib_resources import as_file, files from selfies import split_selfies as split_selfies_ from SmilesPE.pretokenizer import kmer_tokenizer from SmilesPE.tokenizer import SPE_Tokenizer from ..types import Dict, Tokenizer, Tokens logger = logging.getLogger(__name__) # tokenizer SMILES_TOKENIZER = re.compile( r'(\[[^\]]+]|Br?|Cl?|N|O|S|P|F|I|b|c|n|o|s|p|\(|\)|\.|=|#|' r'-|\+|\\\\|\/|:|~|@|\?|>|\*|\$|\%[0-9]{2}|[0-9])' ) with as_file(files('pytoda.smiles.metadata').joinpath('spe_chembl.txt')) as filepath: SPE_TOKENIZER = SPE_Tokenizer(codecs.open(str(filepath))) def tokenize_smiles(smiles: str, regexp=SMILES_TOKENIZER, *args, **kwargs) -> Tokens: """ Tokenize a character-level SMILES string. Args: smiles (str): a SMILES representation. regexp (re.Pattern): optionally pass a regexp for the tokenization. Defaults to SMILES_TOKENIZER. args (): ignored, for backwards compatibility. kwargs (): ignored, for backwards compatibility. Returns: Tokens: the tokenized SMILES.
def font_path(fontname): with as_file(files('ttyrec2video') / 'data' / 'ubuntu-font' / fontname) \ as path: # Violating the context manager like this means that ttyrec2video can't # be run from within a zipfile. return str(path)
def data_file_path(filename): file_manager = ExitStack() atexit.register(file_manager.close) ref = importlib_resources.files('sanskrit_parser') / 'data' / filename path = file_manager.enter_context(importlib_resources.as_file(ref)) return str(path)
from flask.cli import FlaskGroup from flask_migrate import stamp from importlib_resources import as_file, files from sqlalchemy import inspect from . import __version__ from .app import create_app from .dbutil import add_wheel, add_wheel_from_json, dbcontext, \ get_serial, purge_old_versions, set_serial from .models import EntryPointGroup, OrphanWheel, Wheel, db from .process import process_queue from .pypi_api import PyPIAPI from .scan import scan_changelog, scan_pypi log = logging.getLogger(__name__) with as_file(files('wheelodex') / 'data' / 'entry_points.ini') as ep_path: # Violating the context manager like this means that wheelodex can't be run # from within a zipfile. ep_path = str(ep_path) # FlaskGroup causes all commands to be run inside an application context, # thereby letting `db` do database operations. This does require that # `ctx.obj` be left untouched, though. @click.group(cls=FlaskGroup, create_app=create_app) @click.option( '-l', '--log-level', type=click.Choice(['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']), default='INFO', show_default=True,