def main(): args = _get_parser().parse_args() if args.yaml is not None: with open(args.yaml, 'r') as y: yaml = YAML(typ='safe', pure=True) yaml_args = yaml.load(y) ep = electrostatic_potential(**yaml_args) if ('save_csv', False) in yaml_args.items(): print(ep) else: ep = electrostatic_potential(locpot=args.locpot, lattice_vector=args.lattice_vector, save_csv=args.save_csv, csv_fname=args.csv_fname, save_plt=args.save_plt, plt_fname=args.plt_fname, dpi=args.dpi, colors=args.colors, width=args.width, height=args.height) if not args.save_csv: print(ep)
def main(): args = _get_parser().parse_args() if args.yaml is not None: with open(args.yaml, 'r') as y: yaml = YAML(typ='safe', pure=True) yaml_args = yaml.load(y) core = core_energy(**yaml_args) print(core) else: if args.ox_states_dict: ox_states = args.ox_states_dict elif args.ox_states_list: ox_states = args.ox_states_list else: ox_states = None core = core_energy(args.core_atom, args.nn, orbital=args.orbital, ox_states=ox_states, nn_method=CrystalNN(), structure=args.structure) print(core)
def main(): args = _get_parser().parse_args() if args.yaml is not None: with open(args.yaml, 'r') as y: yaml = YAML(typ='safe', pure=True) yaml_args = yaml.load(y) parse_energies(**yaml_args) else: if not args.hkl or not args.bulk_per_atom: raise ValueError('hkl or bulk energy per atom were not supplied') hkl = tuple(map(int, args.hkl.strip('[]()').split(','))) path = os.getcwd() if args.path is not None: path = args.path parse_energies(hkl, args.bulk_per_atom, path_to_fols=path, parse_core_energy=args.parse_core, core_atom=args.core, bulk_nn=args.nn, parse_vacuum=args.parse_vacuum, plt_surfen=args.plt_surfen, save_csv=True, csv_fname=args.csv_fname, verbose=args.verbose, remove_first_energy=args.remove)
def main(): args = _get_parser().parse_args() if args.yaml is not None: with open(args.yaml, 'r') as y: yaml = YAML(typ='safe', pure=True) yaml_args = yaml.load(y) nn = simple_nn(**yaml_args) if ('save_csv', False) in yaml_args.items(): print(nn) else: if args.ox_states_dict: ox_states = args.ox_states_dict elif args.ox_states_list: ox_states = args.ox_states_list else: ox_states = None nn = simple_nn(args.start, end=args.end, ox_states=ox_states, nn_method=CrystalNN(), save_csv=args.save_csv, csv_fname=args.csv_fname) if args.save_csv == False: print(nn)
def dump(self, data, stream=None, **kwargs): inefficient = False if stream is None: inefficient = True stream = StringIO() YAML.dump(self, data, stream, **kwargs) if inefficient: return stream.getvalue()
def get_config(config_filename: str) -> stp_config.Configuration: """Get the configuration.""" if os.path.exists(config_filename): yaml = YAML() yaml.default_flow_style = False with open(config_filename, encoding="utf-8") as config_file: return cast(stp_config.Configuration, yaml.load(config_file.read())) print("Missig config file: " + config_filename) return {}
def yaml_no_ts() -> YAML: """ Get a YAML loader that won't parse timestamps into datetime objects. Such datetime objects can't be easily dumped into JSON. """ yaml = YAML(typ="rt") yaml.preserve_quotes = True # type: ignore yaml.Constructor = _RoundTripNoTimeStampConstructor return yaml
def main() -> None: """Get the status of current scan.""" config = get_config(CONFIG_PATH) for folder in glob.glob( os.path.join(os.path.expanduser(config["scan_folder"]), "*")): print(re.sub(r".", "-", folder)) print(folder) if not os.path.exists(os.path.join(folder, "config.yaml")): print("No config") else: yaml = YAML(typ="safe") yaml.default_flow_style = False with open(os.path.join(folder, "config.yaml"), encoding="utf-8") as config_file: job_config: scan_to_paperless.process_schema.Configuration = yaml.load( config_file.read()) if os.path.exists(os.path.join(folder, "error.yaml")): with open(os.path.join(folder, "error.yaml"), encoding="utf-8") as error_file: error = yaml.load(error_file.read()) if error is not None and "error" in error: print(error["error"]) if isinstance(error["error"], subprocess.CalledProcessError): print(error["error"].output.decode()) if error["error"].stderr: print(error["error"].stderr) if "traceback" in error: print("\n".join(error["traceback"])) else: print("Unknown error") print(error) else: allready_proceed = True if "transformed_images" not in job_config: allready_proceed = False else: for img in job_config["transformed_images"]: img = os.path.join(folder, os.path.basename(img)) if not os.path.exists(img): allready_proceed = False if allready_proceed: if os.path.exists( os.path.join(folder, "REMOVE_TO_CONTINUE")): print("To be validated") if os.path.exists(os.path.join(folder, "DONE")): print("Process finish") else: print("Waiting to be imported") else: print("Not ready")
def test_load_by_yaml_metaschema(metaschema_pre: Any) -> None: path = get_data("metaschema/metaschema.yml") assert path with open(path) as path_handle: yaml = YAML() yaml.preserve_quotes = True # type: ignore yaml_doc = yaml.load(path_handle) doc = cg_metaschema.load_document_by_yaml( yaml_doc, file_uri(path), None, ) saved = [d.save(relative_uris=False) for d in doc] assert saved == JsonDiffMatcher(metaschema_pre)
def test_examples() -> None: for a in ["field_name", "ident_res", "link_res", "vocab_res"]: path = get_data(f"metaschema/{a}_schema.yml") assert path ldr, _, _, _ = schema_salad.schema.load_schema(path) path2 = get_data(f"metaschema/{a}_src.yml") assert path2 yaml = YAML() with open(path2) as src_fp: src = ldr.resolve_all(yaml.load(src_fp), "", checklinks=False)[0] path3 = get_data(f"metaschema/{a}_proc.yml") assert path3 with open(path3) as src_proc: proc = yaml.load(src_proc) assert proc == src
def load_ioc_yaml(ioc_instance_yaml: Path, no_schema: bool = False) -> Dict: """ Read in an ioc instance entity YAML and convert to dictionary and validate against its declared schema """ entity_dict = YAML().load(ioc_instance_yaml) if not no_schema: try: comment1 = entity_dict.ca.comment[1][0].value matches = schema_modeline.match(comment1) schema_url = cast(Match, matches).group(1) # allow relative file paths so that tests can use this parts = schema_url.split(url_f) if len(parts) > 1: schema_url = url_f + str(Path(parts[1]).absolute()).strip() with urllib.request.urlopen(schema_url) as url: entity_schema = json.loads(url.read().decode()) except Exception: log.error( f"Error getting schema for {ioc_instance_yaml}. " "make sure it has '# yaml-language-server: $schema='" ) raise validate(entity_dict, entity_schema) return entity_dict
def test_rfi_add_integration(rfi_content: str, match_error: str) -> None: # pydantic messages end with the type of the error in parentheses. regex = re.compile( ( "^test.rfi:0:0: parser: error: 1 validation error for " rf"IntegrationFile.*{match_error} \([^()]*\)$" ), re.DOTALL, ) yaml = YAML() content = yaml.load(rfi_content) error = RecordFluxError() integration = Integration() with pytest.raises(RecordFluxError, match=regex): # pylint: disable = protected-access integration._add_integration_object(Path("test.rfi"), content, error) error.propagate()
def main(): args = _get_parser().parse_args() if args.yaml is not None: with open(args.yaml, 'r') as y: yaml = YAML(typ='safe', pure=True) yaml_args = yaml.load(y) df = pd.read_csv(yaml_args['filename']) plot_surfen(df=df, **yaml_args) else: df = pd.read_csv(args.filename) plot_surfen(df, colors=args.colors, dpi=args.dpi, width=args.width, height=args.height, plt_fname=args.plt_fname)
def _validate_message(message_path: Path, valid_original_message: bool, message_value: MessageValue) -> "ValidationResult": if not message_path.is_file(): raise ValidationError(f"{message_path} is not a regular file") parameters_path = message_path.with_suffix(".yaml") message_parameters: Dict[str, Union[bool, int, str]] = {} if parameters_path.is_file(): yaml = YAML() message_parameters = yaml.load(parameters_path) original_message = message_path.read_bytes() parsed_message = message_value.clone() parser_error = None try: parsed_message.add_parameters(message_parameters) except PyRFLXError as e: raise ValidationError(f"{message_path}: {e}") from e try: parsed_message.parse(original_message) valid_parser_result = parsed_message.bytestring == original_message if not valid_parser_result: assert parsed_message.valid_message assert len(parsed_message.bytestring) <= len(original_message) assert original_message.startswith(parsed_message.bytestring) parser_error = "message parsed by PyRFLX is shorter than the original message" except PyRFLXError as e: parser_error = str(e) valid_parser_result = False return ValidationResult( valid_original_message == valid_parser_result, parsed_message, parser_error, message_path, original_message, valid_original_message, valid_parser_result, )
def load_integration_file(self, spec_file: Path, error: RecordFluxError) -> None: integration_file = (spec_file.with_suffix(".rfi") if self._integration_files_dir is None else self._integration_files_dir / (spec_file.stem + ".rfi")) if integration_file.exists(): yaml = YAML() try: content = yaml.load(integration_file) except MarkedYAMLError as e: location = Location( start=((0, 0) if e.problem_mark is None else (e.problem_mark.line + 1, e.problem_mark.column + 1)), source=integration_file, ) error.extend([(str(e), Subsystem.PARSER, Severity.ERROR, location)]) return self._add_integration_object(integration_file, content, error)
def yaml_load(file, as_namespace=False): if as_namespace: yaml = YAML(typ='safe', pure=True) yaml.Constructor = YAMLNamespaceConstructor else: yaml = YAML(typ='safe') return yaml.load(file)
def main(): args = _get_parser().parse_args() if args.yaml is not None: with open(args.yaml, 'r') as y: yaml = YAML(typ='safe', pure=True) yaml_args = yaml.load(y) # get hkl first as a list, then convert to hkl = list(yaml_args.pop('hkl')) miller = _hkl(hkl) generate_slabs(hkl=miller, **yaml_args) else: if args.ox_states_dict: ox_states = args.ox_states_dict elif args.ox_states_list: ox_states = args.ox_states_list else: ox_states = None miller = _hkl(args.hkl) generate_slabs(args.structure, miller, args.thicknesses, args.vacuums, make_fols=args.fols, make_input_files=args.files, max_size=args.max_size, center_slab=args.center_slab, name=args.name, ox_states=ox_states, is_symmetric=args.is_symmetric, fmt=args.fmt, config_dict=args.config_dict, user_incar_settings=args.incar, user_potcar_settings=args.potcar, user_kpoints_settings=args.kpoints, layers_to_relax=args.sd)
def fetch( self, url: str, inject_ids: bool = True, content_types: Optional[List[str]] = None, ) -> IdxResultType: if url in self.idx: return self.idx[url] try: text = self.fetch_text(url, content_types=content_types) if isinstance(text, bytes): textIO = StringIO(text.decode("utf-8")) else: textIO = StringIO(text) textIO.name = str(url) yaml = YAML() yaml.preserve_quotes = True # type: ignore attachments = yaml.load_all(textIO) result = cast(Union[CommentedSeq, CommentedMap], next(attachments)) if self.allow_attachments is not None and self.allow_attachments( result): i = 1 for a in attachments: self.idx[f"{url}#attachment-{i}"] = a i += 1 add_lc_filename(result, url) except MarkedYAMLError as e: raise to_validation_exception(e) from e if isinstance(result, CommentedMap) and inject_ids and bool( self.identifiers): for identifier in self.identifiers: if identifier not in result: result[identifier] = url self.idx[self.expand_url(result[identifier], url, scoped_id=True)] = result self.idx[url] = result return result
def _document_load_by_url(loader, url, loadingOptions): # type: (_Loader, str, LoadingOptions) -> Any if url in loadingOptions.idx: return _document_load(loader, loadingOptions.idx[url], url, loadingOptions) text = loadingOptions.fetcher.fetch_text(url) if isinstance(text, bytes): textIO = StringIO(text.decode("utf-8")) else: textIO = StringIO(text) textIO.name = str(url) yaml = YAML() yaml.preserve_quotes = True # type: ignore result = yaml.load(textIO) add_lc_filename(result, url) loadingOptions.idx[url] = result loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url) return _document_load(loader, result, url, loadingOptions)
def create_boot_script(ioc_instance_yaml: Path, definition_yaml: List[Path]) -> str: """ Create the boot script for an IOC """ # Read and load the support module definitions for yaml in definition_yaml: support = Support.deserialize(YAML(typ="safe").load(yaml)) make_entity_classes(support) # Create an IOC instance from it ioc_instance = IOC.deserialize(YAML(typ="safe").load(ioc_instance_yaml)) # Open jinja template for startup script and fill it in with script # elements and database elements described by IOC Entity objects with open(TEMPLATES / "ioc.boot.jinja", "r") as f: template = Template(f.read()) return template.render( env_var_elements=render_environment_variable_elements(ioc_instance), script_elements=render_script_elements(ioc_instance), database_elements=render_database_elements(ioc_instance), post_ioc_init_elements=render_post_ioc_init_elements(ioc_instance), )
if not author: raise ValueError("Missing author?!") guild_id = guild.id author_id = author.id state = cast(PlayerScore, session.query(cls).get((guild_id, author_id))) if state is None: state = PlayerScore(guild=guild_id, player=author_id) session.add(state) session.commit() return state yaml = YAML(typ="safe") @yaml_object(yaml) class Question: yaml_tag = "!question" def __init__(self, question: str, answer: str, options: List[str]) -> None: self.question = question self.answer = answer self.options = options def __repr__(self) -> str: return (f"Question(question={self.question!r}, " f"answer={self.answer!r}, options={self.options!r})")
def get_setting(setting_name): return YAML().load(open('config.yml').read()).get(setting_name)
def main() -> None: """Scan a new document.""" parser = argparse.ArgumentParser() presets = [ e[len(CONFIG_PATH) - 4:-5] for e in glob.glob(f"{CONFIG_PATH[:-5]}-*.yaml") ] # noqa parser.add_argument( "--mode", choices=("adf", "one", "multi", "double"), default="adf", help="The scan mode: 'adf': use Auto Document Feeder (Default), " "one: Scan one page, multi: scan multiple pages, double: scan double sided document using the ADF", ) parser.add_argument( "--preset", choices=presets, help="Use an alternate configuration", ) parser.add_argument( "--append-credit-card", action="store_true", help="Append vertically the credit card", ) parser.add_argument("--assisted-split", action="store_true", help="Split operation, see help") parser.add_argument( "--config", action="store_true", help="Print the configuration and exit", ) parser.add_argument( "--set-config", nargs=2, action="append", default=[], help="Set a configuration option", ) argcomplete.autocomplete(parser) args = parser.parse_args() config_filename = CONFIG_PATH if args.preset is None else f"{CONFIG_PATH[:-5]}-{args.preset}.yaml" config: stp_config.Configuration = get_config(config_filename) if args.config: yaml = YAML() yaml.default_flow_style = False print("Config from file: " + config_filename) yaml.dump(config, sys.stdout) sys.exit() dirty = False for conf in args.set_config: config[conf[0]] = conf[1] # type: ignore dirty = True if dirty: yaml = YAML() yaml.default_flow_style = False with open(config_filename, "w", encoding="utf-8") as config_file: config_file.write( "# yaml-language-server: $schema=https://raw.githubusercontent.com/sbrunner/scan-to-paperless" "/master/scan_to_paperless/config_schema.json\n\n") yaml.dump(config, config_file) if "scan_folder" not in config: print("""The scan folder isn't set, use: scan --set-settings scan_folder <a_folder> This should be shared with the process container in 'source'.""") sys.exit(1) rand_int = str(random.randint(0, 999999)) # nosec base_folder = os.path.join(os.path.expanduser(config["scan_folder"]), rand_int) while os.path.exists(base_folder): rand_int = str(random.randint(0, 999999)) # nosec base_folder = os.path.join(os.path.expanduser(config["scan_folder"]), rand_int) destination = f"/destination/{rand_int}.pdf" root_folder = os.path.join(base_folder, "source") os.makedirs(root_folder) try: scanimage: List[str] = [config.get("scanimage", "scanimage")] scanimage += config.get( "scanimage_arguments", ["--format=png", "--mode=color", "--resolution=300"]) scanimage += [f"--batch={root_folder}/image-%d.png"] if args.mode in ("adf", "double"): scanimage += ["--source=ADF"] if args.mode == "multi": scanimage += ["--batch-prompt"] if args.mode == "one": scanimage += ["--batch-count=1"] if args.mode == "double": call(scanimage + ["--batch-start=1", "--batch-increment=2"]) odd = os.listdir(root_folder) input( "Put your document in the automatic document feeder for the other side, and press enter." ) call(scanimage + [ f"--batch-start={len(odd) * 2}", "--batch-increment=-2", f"--batch-count={len(odd)}", ]) for img in os.listdir(root_folder): if img not in odd: path = os.path.join(root_folder, img) image = io.imread(path) image = np.rot90(image, 2) io.imsave(path, image.astype(np.uint8)) else: call(scanimage) args_: stp_config.Arguments = {} args_.update(config.get("default_args", {})) args_cmd = dict(args._get_kwargs()) # pylint: disable=protected-access del args_cmd["mode"] del args_cmd["preset"] del args_cmd["config"] del args_cmd["set_config"] args_.update(cast(stp_config.Arguments, args_cmd)) except subprocess.CalledProcessError as exception: print(exception) sys.exit(1) print(root_folder) subprocess.call([config.get("viewer", "eog"), root_folder]) # nosec images = [] for img in os.listdir(root_folder): if not img.startswith("image-"): continue images.append(os.path.join("source", img)) regex = re.compile(r"^source\/image\-([0-9]+)\.png$") def image_match(image_name: str) -> int: match = regex.match(image_name) assert match return int(match.group(1)) images = sorted(images, key=image_match) if images: process_config = { "images": images, "destination": destination, "args": args_, } yaml = YAML(typ="safe") yaml.default_flow_style = False with open(os.path.join(os.path.dirname(root_folder), "config.yaml"), "w", encoding="utf-8") as process_file: process_file.write( "# yaml-language-server: $schema=https://raw.githubusercontent.com/sbrunner/scan-to-paperless" "/master/scan_to_paperless/process_schema.json\n\n") yaml.dump(process_config, process_file) else: os.rmdir(root_folder) os.rmdir(base_folder)
def test_yaml_float_test() -> None: assert YAML().load("float-test: 2e-10")["float-test"] == 2e-10
def get_metaschema() -> Tuple[Names, List[Dict[str, str]], Loader]: """Instantiate the metaschema.""" loader = ref_resolver.Loader({ "Any": saladp + "Any", "ArraySchema": saladp + "ArraySchema", "Array_symbol": saladp + "ArraySchema/type/Array_symbol", "DocType": saladp + "DocType", "Documentation": saladp + "Documentation", "Documentation_symbol": saladp + "Documentation/type/Documentation_symbol", "Documented": saladp + "Documented", "EnumSchema": saladp + "EnumSchema", "Enum_symbol": saladp + "EnumSchema/type/Enum_symbol", "JsonldPredicate": saladp + "JsonldPredicate", "NamedType": saladp + "NamedType", "PrimitiveType": saladp + "PrimitiveType", "RecordField": saladp + "RecordField", "RecordSchema": saladp + "RecordSchema", "Record_symbol": saladp + "RecordSchema/type/Record_symbol", "SaladEnumSchema": saladp + "SaladEnumSchema", "SaladRecordField": saladp + "SaladRecordField", "SaladRecordSchema": saladp + "SaladRecordSchema", "SchemaDefinedType": saladp + "SchemaDefinedType", "SpecializeDef": saladp + "SpecializeDef", "_container": saladp + "JsonldPredicate/_container", "_id": { "@id": saladp + "_id", "@type": "@id", "identity": True }, "_type": saladp + "JsonldPredicate/_type", "abstract": saladp + "SaladRecordSchema/abstract", "array": saladp + "array", "boolean": "http://www.w3.org/2001/XMLSchema#boolean", "dct": "http://purl.org/dc/terms/", "default": { "@id": saladp + "default", "noLinkCheck": True }, "doc": "rdfs:comment", "docAfter": { "@id": saladp + "docAfter", "@type": "@id" }, "docChild": { "@id": saladp + "docChild", "@type": "@id" }, "docParent": { "@id": saladp + "docParent", "@type": "@id" }, "documentRoot": saladp + "SchemaDefinedType/documentRoot", "documentation": saladp + "documentation", "double": "http://www.w3.org/2001/XMLSchema#double", "enum": saladp + "enum", "extends": { "@id": saladp + "extends", "@type": "@id", "refScope": 1 }, "fields": { "@id": saladp + "fields", "mapPredicate": "type", "mapSubject": "name", }, "float": "http://www.w3.org/2001/XMLSchema#float", "identity": saladp + "JsonldPredicate/identity", "inVocab": saladp + "NamedType/inVocab", "int": "http://www.w3.org/2001/XMLSchema#int", "items": { "@id": saladp + "items", "@type": "@vocab", "refScope": 2 }, "jsonldPredicate": "sld:jsonldPredicate", "long": "http://www.w3.org/2001/XMLSchema#long", "mapPredicate": saladp + "JsonldPredicate/mapPredicate", "mapSubject": saladp + "JsonldPredicate/mapSubject", "name": "@id", "noLinkCheck": saladp + "JsonldPredicate/noLinkCheck", "null": saladp + "null", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "rdfs": "http://www.w3.org/2000/01/rdf-schema#", "record": saladp + "record", "refScope": saladp + "JsonldPredicate/refScope", "sld": saladp, "specialize": { "@id": saladp + "specialize", "mapPredicate": "specializeTo", "mapSubject": "specializeFrom", }, "specializeFrom": { "@id": saladp + "specializeFrom", "@type": "@id", "refScope": 1, }, "specializeTo": { "@id": saladp + "specializeTo", "@type": "@id", "refScope": 1, }, "string": "http://www.w3.org/2001/XMLSchema#string", "subscope": saladp + "JsonldPredicate/subscope", "symbols": { "@id": saladp + "symbols", "@type": "@id", "identity": True }, "type": { "@id": saladp + "type", "@type": "@vocab", "refScope": 2, "typeDSL": True, }, "typeDSL": saladp + "JsonldPredicate/typeDSL", "xsd": "http://www.w3.org/2001/XMLSchema#", }) for salad in SALAD_FILES: with resource_stream("schema_salad", "metaschema/" + salad) as stream: loader.cache["https://w3id.org/cwl/" + salad] = stream.read().decode("UTF-8") with resource_stream("schema_salad", "metaschema/metaschema.yml") as stream: loader.cache["https://w3id.org/cwl/salad"] = stream.read().decode( "UTF-8") yaml = YAML() j = yaml.load(loader.cache["https://w3id.org/cwl/salad"]) add_lc_filename(j, "metaschema.yml") j2 = loader.resolve_all(j, saladp)[0] if not isinstance(j2, list): _logger.error("%s", j2) raise SchemaParseException(f"Not a list: {j2}") else: sch_obj = make_avro(j2, loader, loader.vocab) try: sch_names = make_avro_schema_from_avro(sch_obj) except SchemaParseException: _logger.error("Metaschema error, avro was:\n%s", json_dumps(sch_obj, indent=4)) raise validate_doc(sch_names, j2, loader, strict=True) return (sch_names, j2, loader)