async def test_new_issue_gains_data_science_while_processing(): webhook_data = json.loads( importlib_resources.read_text(samples, "issues-opened.json") ) eventual_data = json.loads( importlib_resources.read_text(samples, "issues-opened-data_science.json") ) event = gidgethub.sansio.Event(webhook_data, event="issues", delivery_id="12345") gh = FakeGH() gh.getiter_response = eventual_data["issue"]["labels"] await classify.router.dispatch(event, gh) assert not gh.post_
def load_config(options): """ Load options from configuration files, if defined and present. :param options: :type options: :return: :rtype: """ configurations = [] if not options.get('no_default_config'): default_options_data = read_text('guessit.config', 'options.json') default_options = json.loads(default_options_data) configurations.append(default_options) config_files = [] if not options.get('no_user_config'): home_directory = os.path.expanduser("~") cwd = os.getcwd() yaml_supported = False try: import yaml # pylint:disable=unused-variable,unused-import,import-outside-toplevel yaml_supported = True except ImportError: pass config_file_locations = get_options_file_locations( home_directory, cwd, yaml_supported) config_files = [f for f in config_file_locations if os.path.exists(f)] custom_config_files = options.get('config') if custom_config_files: config_files = config_files + custom_config_files for config_file in config_files: config_file_options = load_config_file(config_file) if config_file_options: configurations.append(config_file_options) config = {} if configurations: config = merge_options(*configurations) if 'advanced_config' not in config: # Guessit doesn't work without advanced_config, so we use default if no configuration files provides it. default_options_data = read_text('guessit.config', 'options.json') default_options = json.loads(default_options_data) config['advanced_config'] = default_options['advanced_config'] return config
def _prepare_test_data(caplog): global test_db_path print("**********************") caplog.set_level(logging.DEBUG) logging.basicConfig(level=logging.DEBUG) conn = sqlite3.connect(test_db_path) with conn: sql_script = read_text("find2deny", "log-data.sql") sql_code = '''\n DROP TABLE IF EXISTS log_ip; DROP TABLE IF EXISTS block_network; DROP TABLE IF EXISTS processed_log_ip; VACUUM; ''' + sql_script conn.executescript(sql_code) conn.close() # judgment.init_database(test_db_path) conn = sqlite3.connect(test_db_path) with conn: conn.executemany( "INSERT INTO log_ip (ip, first_access, last_access, access_count) VALUES (?, ? , ?, ?)", ip_data) conn.executemany( "INSERT INTO processed_log_ip (ip, line, log_file) VALUES (?, ?, ?)", ip_processed_data) conn.close() print("**********************init database done")
def test_pickle(protocol): """Ensure that parsers can be pickled""" source = importlib_resources.read_text("bootpeg.grammars", "bpeg.bpeg") parser = boot.boot_parser for _ in range(2): parser = pickle.loads(pickle.dumps(parser, protocol=protocol)) parser = create_parser(source, parser, actions)
def init_app(argv=None, gui=True): """ Initialize qt runtime, deal with common issues (such as installing an exception handler), and return a ``QApplication`` object. If ``gui`` is false, return a ``QCoreApplication`` instead. """ warnings.filterwarnings( "default", module='(madgui|cpymad|minrpc|pydicti).*') set_app_id('hit.madgui') init_stdio() # QApplication needs a valid argument list: if argv is None: argv = sys.argv if gui: from PyQt5.QtWidgets import QApplication from madgui.util.qt import load_icon_resource from importlib_resources import read_text app = QApplication(argv) app.setWindowIcon(load_icon_resource('madgui.data', 'icon.xpm')) app.setStyleSheet(read_text('madgui.data', 'style.css')) # matplotlib must be imported *after* Qt; # must be selected before importing matplotlib.backends: import matplotlib matplotlib.use('Qt5Agg') else: app = QCoreApplication(argv) app.setApplicationName('madgui') app.setApplicationVersion(__version__) app.setOrganizationName('HIT Betriebs GmbH') app.setOrganizationDomain('https://www.klinikum.uni-heidelberg.de/hit') # Print uncaught exceptions. This changes the default behaviour on PyQt5, # where an uncaught exception would usually cause the program to abort. sys.excepthook = traceback.print_exception setup_interrupt_handling(app) return app
def get_arbinfo(): arbinfo_data = read_text("arbitrum.evm", "ArbInfo.json") arbinfo = json.loads(arbinfo_data) arbinfo["address"] = ARBINFO_ADDRESS_STRING arbinfo["code"] = "0x100000" arbinfo["name"] = "ArbInfo" return arbinfo
def get_arbsys(): arbsys_data = read_text("arbitrum.evm", "ArbSys.json") arbsys = json.loads(arbsys_data) arbsys["address"] = ARBSYS_ADDRESS_STRING arbsys["code"] = "0x" arbsys["name"] = "ArbSys" return arbsys
async def test_new_issue_with_label(data_filename): sample_data = json.loads(importlib_resources.read_text(samples, data_filename)) event = gidgethub.sansio.Event(sample_data, event="issues", delivery_id="12345") gh = FakeGH() await classify.router.dispatch(event, gh) assert not gh.post_
def _make_config_dirs() -> None: os.makedirs(CONFIG_DIR, exist_ok=True) cfg = importlib_resources.read_text('crowdbike.resources', 'config.json') calib = importlib_resources.read_text( 'crowdbike.resources', 'calibration.json', ) theme = importlib_resources.read_text('crowdbike.resources', 'theme.json') with open(os.path.join(CONFIG_DIR, 'config.json'), 'w') as f: f.write(cfg) with open(os.path.join(CONFIG_DIR, 'calibration.json'), 'w') as f: f.write(calib) with open(os.path.join(CONFIG_DIR, 'theme.json'), 'w') as f: f.write(theme)
async def test_new_issue_gains_no_status_label_while_processing(): webhook_data = json.loads( importlib_resources.read_text(samples, "issues-opened.json")) eventual_data = json.loads( importlib_resources.read_text( samples, "issues-opened-labels_but_no_status.json")) event = gidgethub.sansio.Event(webhook_data, event="issues", delivery_id="12345") gh = FakeGH() gh.getiter_response = eventual_data["issue"]["labels"] await classify.router.dispatch(event, gh) assert len(gh.post_) == 1 action = gh.post_[0] assert action[1] == {"labels": [labels.Status.classify.value]}
def __post_init__(self, config: Optional[Path]): validate_name(self.name) # TODO - support running config/script from another dir with abs paths # all paths are relative and we're running from the same dir if config: assert (config.parent == self.script.parent == Path(".") ), "All files must be in the main project directory" # we must be in the project dir for now # TODO - move this logic to create_initial self.proj_dir = self.script.resolve(strict=False).parent assert os.getcwd() == os.path.abspath( self.proj_dir), "Please run from source directory" # # config and script dir must be in same dir # if config: # assert config.resolve(strict=True).parent == self.proj_dir, \ # "Config and Script directory must be in same directory" # validate config if self.parameters: config_schema = json.loads( ir.read_text("datapane.resources", "script_parameter_def.schema.json")) jsonschema.validate(self.parameters, config_schema)
def __init__(self, model: Optional[tf.keras.Model] = None, stopwords: Optional[List[Text]] = None) -> None: """Initialize embed model and list of stopwords. Args: model: Pretrained model object for text embedding. All pre trained tf embeddings: https://tfhub.dev/s?module-type=text-embedding stopwords: Stopwords to remove from embedding. Attributes: k_means: cluster.KMeans object used to cluster keywords. """ if model is None: self.model = hub.load("https://tfhub.dev/google/nnlm-en-dim50/2") else: self.model = model if stopwords is None: stopwords_default = importlib_resources.read_text(preprocess_data, "stopwords_eng.txt") stopwords_default = stopwords_default.split("\n")[1:] self.stopwords_to_remove = list( filter(lambda word: word, stopwords_default)) else: self.stopwords_to_remove = stopwords self.k_means = cluster.KMeans
def test_full_parse(self, module_name, count, expected_archive_link): html = importlib_resources.read_text( simple_data, f"archive_links.{module_name}.html" ) archive_links = simple.parse_archive_links(html) assert len(archive_links) == count assert expected_archive_link in archive_links
def _construct_exportscript_cmd(self, checkpoint_name, input_type='image_tensor', fpath=None): fpath = fpath or self.config.exportscript_cmd_path ckpt_prefix_path = str( PureWindowsPath(self.config.model_dir).joinpath(checkpoint_name)) template = resources.read_text(tf_script_templates, 'exportscript.cmd') template = template.replace('<TF_RESEARCH_PATH>', str(PureWindowsPath(config.RESEARCH_PATH))) template = template.replace('<TF_SLIM_PATH>', str(PureWindowsPath(config.SLIM_PATH))) template = template.replace( '<MODEL_DIR>', str(PureWindowsPath(self.config.model_dir))) template = template.replace( '<MODEL_CONFIG_PATH>', str(PureWindowsPath(self.config.model_config_path))) template = template.replace('<MODEL_CHECKPOINT_PREFIX>', ckpt_prefix_path) template = template.replace( '<EXPORT_DIR>', str(PureWindowsPath(self.config.export_dir))) template = template.replace('<INPUT_TYPE>', input_type) with Path(fpath).open('w') as f: f.write(template) os.chmod(fpath, 0o755) return fpath
def test_full_parse(self, name, count, expected_item): index_html = importlib_resources.read_text(simple_data, f"index.{name}.html") index = simple.parse_repo_index(index_html) assert len(index) == count key, value = expected_item assert key in index assert index[key] == value
def get_templates(): raw_contract_templates_data = read_text("arbitrum.evm", "contract-templates.json") raw_contract_templates = json.loads(raw_contract_templates_data) token_templates = {} for raw_contract in raw_contract_templates: token_templates[raw_contract["name"]] = raw_contract return token_templates
async def _init_db(conn): if await _is_db_initialized(conn): return True async with conn.transaction(): query = read_text(__package__, f"init_tables.sql") await conn.execute(query) return False
def load_data_file(name, load_json=True, as_binary=False): """Load a data file.""" if as_binary: return importlib_resources.read_binary(data, name) string = importlib_resources.read_text(data, name) if load_json and name.endswith(".json"): return json.loads(string) return string
def get_source(self, environment, template): from parsec.backend import templates # Self import \o/ try: source = importlib_resources.read_text(templates, template) except FileNotFoundError as exc: raise TemplateNotFound(template) from exc return source, self.path, lambda: True
def do_help(self, *args): command_docs_name = "-".join(args).lower() command_summary_name = " ".join(args).upper() try: doc = read_text(commands_data, f"{command_docs_name}.md") except FileNotFoundError: raise NotRedisCommand( f"{command_summary_name} is not a valide Redis command." ) rendered_detail = markdown.render(doc) summary_dict = commands_summary[command_summary_name] avaiable_version = summary_dict.get("since", "?") server_version = config.version # FIXME anything strange with single quotes? logger.debug(f"[--version--] '{server_version}'") try: is_avaiable = StrictVersion(server_version) > StrictVersion( avaiable_version ) except Exception as e: logger.exception(e) is_avaiable = None if is_avaiable: avaiable_text = f"(Avaiable on your redis-server: {server_version})" elif is_avaiable is False: avaiable_text = f"(Not avaiable on your redis-server: {server_version})" else: avaiable_text = "" since_text = f"{avaiable_version} {avaiable_text}" summary = [ ("", "\n"), ("class:doccommand", " " + command_summary_name), ("", "\n"), ("class:dockey", " summary: "), ("", summary_dict.get("summary", "No summary")), ("", "\n"), ("class:dockey", " complexity: "), ("", summary_dict.get("complexity", "?")), ("", "\n"), ("class:dockey", " since: "), ("", since_text), ("", "\n"), ("class:dockey", " group: "), ("", summary_dict.get("group", "?")), ("", "\n"), ("class:dockey", " syntax: "), ("", command_summary_name), # command *compose_command_syntax(summary_dict, style_class=""), # command args ("", "\n\n"), ] to_render = FormattedText(summary + rendered_detail) if config.raw: return convert_formatted_text_to_bytes(to_render) return to_render
async def test_keeping_classify_label(): sample_data = json.loads( importlib_resources.read_text(samples, "issues-labeled-has_classify.json") ) event = gidgethub.sansio.Event(sample_data, event="issues", delivery_id="1") gh = FakeGH() await classify.router.dispatch(event, gh) assert not gh.delete_
def write_config(self): ''' Write user-facing configuration yaml file ''' template = Template( resources.read_text(data_files, 'config_template_jinja2.yaml')) rendered_template = template.render(config=self.config) with open(self.config.config_path, 'w') as f: f.write(rendered_template)
def test_read_text_with_errors(self): # Raises UnicodeError without the 'errors' argument. self.assertRaises(UnicodeError, resources.read_text, self.data, 'utf-16.file') result = resources.read_text(self.data, 'utf-16.file', errors='ignore') self.assertEqual( result, 'H\x00e\x00l\x00l\x00o\x00,\x00 ' '\x00U\x00T\x00F\x00-\x001\x006\x00 ' '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00')
def notebook(): _, tmppath = tempfile.mkstemp(suffix=".ipynb") with open(tmppath, "w") as f: import smarts.core.tests f.write(importlib_resources.read_text(smarts.core.tests, NOTEBOOK_NAME)) yield tmppath os.remove(tmppath)
def do_show_new_camera_calibration_extrinsic_dialog2(new_name: str, camera_pose, display_image, core: "PyriWebUIBrowser"): try: camera_calib = core.device_manager.get_device_subscription("vision_camera_calibration").GetDefaultClient() dialog2_html = importlib_resources.read_text(__package__,"new_calibrate_extrinsic_dialog2.html") el = js.document.createElement('div') el.id = "new_calibrate_extrinsic_dialog2_wrapper" js.document.getElementById("wrapper").appendChild(el) def handle_hidden(*args): try: el.parentElement.removeChild(el) except: traceback.print_exc() geom_util = GeometryUtil(client_obj=camera_calib) xyz, rpy1, _, _ = geom_util.named_pose_to_xyz_rpy(camera_pose.pose) rpy = np.rad2deg(rpy1) x = f"{xyz[0]:4e}" y = f"{xyz[1]:4e}" z = f"{xyz[2]:4e}" r_r = f"{rpy[0]:4e}" r_p = f"{rpy[1]:4e}" r_y = f"{rpy[2]:4e}" i=0 d_encoded = str(base64.b64encode(display_image.data))[2:-1] disp_img_src = "data:image/jpeg;base64," + d_encoded # TODO: check for png? dialog = js.Vue.new(js.python_to_js({ "el": "#new_calibrate_extrinsic_dialog2_wrapper", "template": dialog2_html, "data": { "x": x, "y": y, "z": z, "r_r": r_r, "r_p": r_p, "r_y": r_y, "disp_img": disp_img_src }, "methods": { "handle_hidden": handle_hidden } })) dialog["$bvModal"].show("new_vision_camera_calibrate_extrinsic2") except: traceback.print_exc()
def _write_file(path, filename): import importlib_resources as pkg_resources import iseq._data text = pkg_resources.read_text(iseq._data, filename) with open(path / filename, "w") as f: f.write(text) return path / filename
def notebook(): _, tmppath = tempfile.mkstemp(suffix=".ipynb") with open(tmppath, "w") as handle: import smarts.core.tests handle.write( importlib_resources.read_text(smarts.core.tests, "test_notebook.ipynb")) yield tmppath os.remove(tmppath)
def notebook(): _, tmppath = tempfile.mkstemp(suffix=".ipynb") with open(tmppath, "w") as f: import smarts.core.tests # pytype: disable=module-attr f.write(importlib_resources.read_text(smarts.core.tests, NOTEBOOK_NAME)) # pytype: enable=module-attr yield tmppath os.remove(tmppath)
def migrate(credentials_file, credentials_blob, table): """ Synchronizes the BigQuery table schema. TABLE is a BigQuery table identifier of the form ProjectId.DataSetId.TableId. """ bq = _configure_bigquery(credentials_file, credentials_blob) schema = json.loads(importlib_resources.read_text("linehaul", "schema.json")) trio.run(migrate_, bq, table, schema)
async def do_show_new_image_template_dialog(new_name: str, variable_type: str, variable_tags: str, core: "PyriWebUIBrowser"): try: db = core.device_manager.get_device_subscription( "variable_storage").GetDefaultClient() img_var_names = await db.async_filter_variables( "globals", "", ["image"], None) if len(img_var_names) <= 0: js.alert("No source images available!") return dialog_html = importlib_resources.read_text( __package__, "new_image_template_dialog.html") dialog_obj = NewImageTemplateDialog(new_name, core, core.device_manager) el = js.document.createElement('div') el.id = "new_image_template_dialog_wrapper" js.document.getElementById("wrapper").appendChild(el) dialog = js.Vue.new( js.python_to_js({ "el": "#new_image_template_dialog_wrapper", "template": dialog_html, "data": { "image_selected": "", "image_select_options": [], }, "methods": { "handle_create": dialog_obj.handle_create, "handle_hidden": dialog_obj.handle_hidden, "image_select_changed": dialog_obj.image_select_changed, "image_reset": dialog_obj.image_reset, "image_rot_m5": dialog_obj.image_rot_m5, "image_rot_p5": dialog_obj.image_rot_p5 } })) dialog_obj.init_vue(dialog) img_vars = [] for v in img_var_names: img_vars.append({"value": v, "text": v}) dialog["$data"].image_select_options = js.python_to_js(img_vars) if len(img_vars) > 0: dialog["$data"].image_selected = img_vars[0]["value"] core.create_task(dialog_obj.set_image(img_var_names[0])) except: js.alert( f"Image template creating failed:\n\n{traceback.format_exc()}") dialog["$bvModal"].show("new_image_template")
def __init__(self, result, configuration, **kwargs): """ Fuse a collective result with a report configuration. Parameters ---------- result : memote.MemoteResult The dictionary structure of results. configuration : memote.MemoteConfiguration A memote configuration structure. """ super(Report, self).__init__(**kwargs) self.result = result self.config = configuration self._report_type = None self._template = Template( read_text(templates, "index.html", encoding="utf-8"))
def get_copyright_notice() -> str: """Return madgui license information.""" from importlib_resources import read_text return read_text('madgui.data', 'COPYING.txt')
try: from importlib_resources import read_text except ModuleNotFoundError: from importlib.resources import read_text try: import progressbar except ImportError: PROGRESSBAR_EXISTS = False else: PROGRESSBAR_EXISTS = True logger = logging.getLogger(__name__) indices_urls = toml.loads(read_text("planetpy.pdstools.data", "indices_paths.toml")) def list_available_index_files(): print(yaml.dump(indices_urls, default_flow_style=False)) print("Use indices.download('mission:instrument:index') to download in index file.") print("For example: indices.download('cassini:uvis:moon_summary'") def replace_url_suffix(url, new_suffix=".tab"): """Cleanest way to replace the suffix in an URL. Sometimes the indices have upper case filenames, this is taken care of here. Parameters ==========
def get_copyright_notice(): from importlib_resources import read_text return read_text('cpymad.COPYING', 'cpymad.rst', encoding='utf-8')
def set_buster(): return {'bust': read_text('resources', 'revision.txt').strip()}
import string import subprocess import sys from importlib_resources import read_text from .async_ import exec_subprocess, run_bg, Future from .locale import _ from .config import DeviceFilter Gtk = None __all__ = ['password', 'browser'] dialog_definition = read_text(__package__, 'password_dialog.ui') class Dialog(Future): def __init__(self, window): self._enter_count = 0 self.window = window self.window.connect("response", self._result_handler) def _result_handler(self, window, response): self.set_result(response) def __enter__(self): self._enter_count += 1 self._awaken()