def test_input_glob(self): """Test simple glob expressions, one for each input""" config_yaml_original = yaml.as_document(self.config_dict) pipeline_config_original = PipelineConfig(config_yaml_original) pipeline_config_original.validate() # replace the inputs with glob expressions self.config_dict["inputs"]["image"] = { "glob": "vast_pipeline/tests/data/epoch??.fits" } self.config_dict["inputs"]["selavy"] = { "glob": "vast_pipeline/tests/data/epoch??.selavy.components.txt" } self.config_dict["inputs"]["noise"] = { "glob": "vast_pipeline/tests/data/epoch??.noiseMap.fits" } self.config_dict["inputs"]["background"] = { "glob": "vast_pipeline/tests/data/epoch??.meanMap.fits" } config_yaml_globs = yaml.as_document(self.config_dict) pipeline_config_globs = PipelineConfig(config_yaml_globs) pipeline_config_globs.validate() # after validation, the glob expressions should be resolved and be identical to # the original config self.assertDictEqual(pipeline_config_original._yaml.data, pipeline_config_globs._yaml.data)
def test_alfasim_schema_is_usable(): """ Smoke test to ensure that the Schema has valid python syntax """ from strictyaml import as_document as_document({"name": "Name"}, case_description_schema)
def _plugin_config_file_content( self, caption: str, plugin_id: str, author_email: str, author_name: str, extras: dict, ) -> str: """ Return a string that represent the content of a valid configuration for a plugin """ file_content = dedent( f"""\ author: '{author_name}' caption: '{caption}' email: '{author_email}' id: '{plugin_id}' version: '1.0.0' """ ) if extras: import strictyaml extras_dict = {"extras": extras} file_content += strictyaml.as_document(extras_dict).as_yaml() return file_content
def test_write(self): """[System config reader | HumanReadableTimePeriod] Read write""" yaml = strictyaml.as_document({ "test": 7 }, schema=self.schema).as_yaml() self.assertEqual("test: 7 seconds", yaml.strip())
def test_association_method_value(self): # test valid options for method in PipelineConfig._VALID_ASSOC_METHODS: with self.subTest(method=method): self.config_dict["source_association"]["method"] = method config_yaml = yaml.as_document(self.config_dict) pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate() # test invalid option method = "foo" with self.subTest(method=method): self.config_dict["source_association"]["method"] = method config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def create(path: str = CONFIG_FILE) -> None: with open(path, 'w') as file: config = as_document({ 'version': __CONFIG_VERSION, 'twitch': { 'client_id': '<your client-id>', }, 'main': { 'channel': '<twitch channel>', 'quality': '<quality>', 'temp_dir': '<path to temporary directory>', }, 'storage': { 'path': '<path where vods should be stored>', 'vod_path': '<"{{channel}}/{{id}} {{date:%Y-%m-%d}} {{title}}.ts">', }, 'telegram': { 'enabled': False, 'api_token': '<Telegram bot API token>', 'chat_id': '<your chat id>', }, }) comment_quality = 'Depends on stream. Leave blank for source (chunked) quality.' comment_vod_path = ( 'Python 3.6 f-string. Valid arguments: {{title}} {{id}} {{type}} {{channel}} {{date}}\n' '\'*\' will be added to the new filename if file already exist in storage' ) config.as_marked_up()['main'].yaml_set_comment_before_after_key( 'quality', before=comment_quality, indent=2) config.as_marked_up()['storage'].yaml_set_comment_before_after_key( 'vod_path', before=comment_vod_path, indent=2) file.write(config.as_yaml())
def test_background_optional(self): """Background inputs are optional if source monitoring is false.""" self.config_dict["source_monitoring"]["monitor"] = False del self.config_dict["inputs"]["background"] config_yaml = yaml.as_document(self.config_dict) pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def test_write_valid(self): """[System config reader | PyEnum] Write valid""" yaml = strictyaml.as_document({ "test": self.MyEnum.FIRST }, schema=self.schema).as_yaml() self.assertEqual("test: FIRST", yaml.strip())
def test_background_for_source_monitoring(self): """Background input images must be provided if source monitoring is true.""" self.config_dict["source_monitoring"]["monitor"] = True del self.config_dict["inputs"]["background"] config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def load_string(data: bytes, schema, path: str): """Load and validate yaml data.""" try: return yaml_load(data, Map(schema), path) except YAMLError: raise return as_document(schema)
def test_minimum_two_inputs(self): for input_type in PipelineConfig._REQUIRED_INPUT_TYPES: self.config_dict["inputs"][input_type] = [ self.config_dict["inputs"][input_type][0], ] config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def test_write_invalid(self): """[System config reader | PyEnum] Write invalid""" self.assertRaises( YAMLSerializationError, lambda: strictyaml.as_document({ "test": "FIRST" }, schema=self.schema).as_yaml(), )
def test_input_files_exist(self): # add a fake input file to each input list for input_type in PipelineConfig._REQUIRED_INPUT_TYPES: input_file_list = self.config_dict["inputs"][input_type] input_file_list.append(input_file_list[0].replace("01", "0x")) config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def test_duplicated_files(self): for input_type in PipelineConfig._REQUIRED_INPUT_TYPES: with self.subTest(input_type=input_type): # duplicate the first input file input_file_list = self.config_dict["inputs"][input_type] input_file_list[1] = input_file_list[0] config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def test_nr_files_differs(self): for input_type in PipelineConfig._REQUIRED_INPUT_TYPES: with self.subTest(input_type=input_type): # add a new unique input file input_file_list = self.config_dict["inputs"][input_type] input_file_list.append(input_file_list[0].replace("01", "0x")) config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def load(path: Path, schema_pointer): """Load and validate .yaml file.""" schema = copy.deepcopy(schema_pointer) with path.open() as f: yaml = f.read() data = yaml_load(yaml, Any()) is_template = path.name == "template.yaml" # Replace real Country and Timezone values with fakes if is_template: schema["woo/woocommerce_default_country"] = Enum(["LL"]) schema["wp/timezone_string"] = Enum(["Region/Country"]) schema["wp/DEFAULT_WPLANG"] = Enum(["ll_LL"]) schema["woo/woocommerce_currency"] = Enum(["LLL"]) if "woo/woocommerce_tax_classes" in data: # Inspect that tax classes and taxes match # create enum for taxes from defined tax_classes tax_classes = [ str(tax).lower().replace(" ", "-") for tax in data["woo/woocommerce_tax_classes"] ] # +1 is for standard schema which is never defined in tax class for x in range(len(tax_classes) + 1): # start counting with 1 schema[f"wootax/{x+1}"] = Map({ "country": Enum(["LL"]) if is_template else Enum(COUNTRIES), "state": Str(), "rate": Decimal(), "name": Str(), "priority": Int(), "compound": Int(), "shipping": Int(), "order": Int(), "class": Enum([""]) if x == 0 else Enum(tax_classes), "locations": Map({}), }) try: return yaml_load(yaml, Map(schema), path) except YAMLError: raise return as_document(schema)
def to_yaml(self) -> str: # don't try-except here. we have a bug if the schema validation fails and want # to see the stack trace. # cast here because mypy thinks this is an Any return cast( str, strictyaml.as_document( data=self.to_python_object(), schema=self._yaml_schema(), ).as_yaml(), )
def test_maximum_input_images(self): max_files = settings.MAX_PIPERUN_IMAGES user = AnonymousUser() n_files_to_add = max_files - len( self.config_dict["inputs"]["image"]) + 1 for input_type in PipelineConfig._REQUIRED_INPUT_TYPES: input_file_list = self.config_dict["inputs"][input_type] input_file_list.extend( [str(uuid.uuid4()) for _ in range(n_files_to_add)]) config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate(user=user) # type: ignore[arg-type]
def test_input_multiple_globs(self): """Test multiple consecutive glob expressions""" config_yaml_original = yaml.as_document(self.config_dict) pipeline_config_original = PipelineConfig(config_yaml_original) pipeline_config_original.validate() # replace the inputs with glob expressions self.config_dict["inputs"]["image"] = { "glob": [ "vast_pipeline/tests/data/epoch0[12].fits", "vast_pipeline/tests/data/epoch0[34].fits", ], } self.config_dict["inputs"]["selavy"] = { "glob": [ "vast_pipeline/tests/data/epoch0[12].selavy.components.txt", "vast_pipeline/tests/data/epoch0[34].selavy.components.txt", ], } self.config_dict["inputs"]["noise"] = { "glob": [ "vast_pipeline/tests/data/epoch0[12].noiseMap.fits", "vast_pipeline/tests/data/epoch0[34].noiseMap.fits", ], } self.config_dict["inputs"]["background"] = { "glob": [ "vast_pipeline/tests/data/epoch0[12].meanMap.fits", "vast_pipeline/tests/data/epoch0[34].meanMap.fits", ], } config_yaml_globs = yaml.as_document(self.config_dict) pipeline_config_globs = PipelineConfig(config_yaml_globs) pipeline_config_globs.validate() # after validation, the glob expressions should be resolved and be identical to # the original config self.assertDictEqual(pipeline_config_original._yaml.data, pipeline_config_globs._yaml.data)
def WriteYamlToFile(filename: str, entity_yaml_dict: Dict[str, object]) -> None: """Converts a dictionary of entity instances to yaml file. Args: filename: Building Config instance name which is being validated. entity_yaml_dict: Dictionary with entity yaml blocks keyed by entity instance code or GUID. """ try: with open(filename, 'w', encoding='utf-8') as file: file.write(syaml.as_document(entity_yaml_dict).as_yaml()) except PermissionError: print(f'Permission denied when writing to {filename}')
def from_file( cls, yaml_path: str, label: str = "run config", validate: bool = True, add_defaults: bool = True, ) -> "PipelineConfig": """Create a PipelineConfig object from a run configuration YAML file. Args: yaml_path: Path to the run config YAML file. label: A label for the config object that will be used in error messages. Default is "run config". validate: Perform config schema validation immediately after loading the config file. If set to False, the full schema validation will not be performed until PipelineConfig.validate() is explicitly called. The inputs are always validated regardless. Defaults to True. add_defaults: Add missing configuration parameters using configured defaults. The defaults are read from the Django settings file. Defaults to True. Raises: PipelineConfigError: The run config YAML file fails schema validation. """ schema = PipelineConfig.SCHEMA if validate else yaml.Any() with open(yaml_path) as fh: config_str = fh.read() try: config_yaml = yaml.load(config_str, schema=schema, label=label) except yaml.YAMLValidationError as e: raise PipelineConfigError(e) if add_defaults: # make a template config based on defaults config_defaults_str = make_config_template( cls.TEMPLATE_PATH, **settings.PIPE_RUN_CONFIG_DEFAULTS, ) config_defaults_dict: Dict[str, Any] = yaml.load( config_defaults_str).data # merge configs config_dict = dict_merge(config_defaults_dict, config_yaml.data) config_yaml = yaml.as_document(config_dict, schema=schema, label=label) return cls(config_yaml)
def Save(self, output_file_path): """Save the config to a file.""" lab_config_pb = lab_config_pb2.LabConfig() lab_config_pb.CopyFrom(self.lab_config_pb) del lab_config_pb.cluster_configs[:] cluster_config_pb = lab_config_pb2.ClusterConfig() cluster_config_pb.CopyFrom(self.cluster_config_pb) del cluster_config_pb.host_configs[:] cluster_config_pb.host_configs.add().CopyFrom(self.host_config_pb) lab_config_pb.cluster_configs.add().CopyFrom(cluster_config_pb) with open(output_file_path, 'w') as f: lab_config_dict = json_format.MessageToDict( lab_config_pb, preserving_proto_field_name=True) f.write(syaml.as_document(lab_config_dict, schema=_YAML_SCHEMA).as_yaml())
def main() -> None: """Serialize typecasts.casts to YAML-LD.""" serialized = map( operator.methodcaller('dict', by_alias=True, exclude_defaults=True), serialize(), ) document = { '$context': { 'casts': '$included', 'seeAlso': 'rdfs:seeAlso', 'label': 'rdfs:label', }, '$id': 'python://typecasts.casts', 'casts': list(serialized), } print(strictyaml.as_document(document).as_yaml())
def add_requirement(flavour_package_name, platform_fam, version, yaml_hash): flavour_file = "app.flavour" with Path(flavour_file).open("r") as f: yml = f.read() yaml_data = libflavour.Application(yml)._data if ( "addons" in yaml_data and flavour_package_name in yaml_data["addons"] ): log("can not add to configuration, addon entry already exists") else: log(f"adding new {flavour_package_name}") yaml_data["addons"][ f"{flavour_package_name}:{version}" ] = as_document( OrderedDict([("manager", platform_fam), ("hash", yaml_hash)]) ) with Path(flavour_file).open("w") as f: f.write(yaml_data.as_yaml())
def load_schema(json_schema): return get_schema(as_document(json_schema, JSONSCHEMA_SCHEMA).data)
def config_data_to_config(data): # type: ignore return as_document(data, schema_v4)
def test_source_finder_value(self): self.config_dict["measurements"]["source_finder"] = "foo" config_yaml = yaml.as_document(self.config_dict) with self.assertRaises(PipelineConfigError): pipeline_config = PipelineConfig(config_yaml) pipeline_config.validate()
def dumps(demes_graph): d = demes_graph.asdict_compact() doc = as_document(d, schema=_deme_graph_schema) return doc.as_yaml()
def do_list(name, output, pretend, force): if not pretend: os.makedirs(output, exist_ok=True) listbase = urllib.parse.urljoin(base, 'audioplayer/stations/{}/'.format(name)) listindex = urllib.parse.urljoin(listbase, '~list.js') matcher = re.compile( r'^{0}Array\[{0}Array.length\]\s*=\s*([^;]+);\s*$'.format( re.escape(name)), re.I) stub = collections.OrderedDict() stub['prefix'] = '../{}'.format(name) stub['music'] = [] with requests.get(listindex) as f: f.raise_for_status() if not f.encoding: f.encoding = f.apparent_encoding for line in f.iter_lines(): if not line: continue line = line.decode(f.encoding) m = matcher.match(line) if not m: raise RuntimeError('line failed to match: {!r}'.format(line)) fname = ast.literal_eval(m.group(1)) furl = urllib.parse.urljoin(listbase, urllib.parse.quote(fname + '.mp3')) fullfname = os.path.join(output, fname + '.ogg') print(fullfname) if ' - ' in fname: artist, title = fname.split(' - ', 2) meta = collections.OrderedDict() meta['path'] = fname meta['title'] = title.strip() meta['artist'] = artist.strip() meta['pre'] = '0:00' meta['post'] = '0:00' stub['music'].append(meta) if not pretend and (force or not os.path.exists(fullfname)): with requests.get(furl, stream=True) as r: r.raise_for_status() with tempfile.TemporaryDirectory() as tmpdir: dest = os.path.join(tmpdir, 'input') with open(dest, 'wb') as w: for chunk in r.iter_content(chunk_size=8192): if chunk: w.write(chunk) # now, convert dest to fname subprocess.check_output([ 'ffmpeg', '-i', dest, '-c:a', 'libvorbis', '-vn', '-q:a', '4', fullfname ], stderr=subprocess.PIPE) if stub['music']: y = strictyaml.as_document(stub) with open(os.path.join(output, 'stub.yaml'), 'w') as f: f.write(y.as_yaml())
def ydump(data: Mapping) -> str: return strictyaml.as_document(data).as_yaml()