def rmap_apply(self, func, *args, **keys): """Apply `func()` to *args and **keys, adding the pmap, imap, and rmap values associated with the elaboration of args.source_context, args.instruments, args.types. """ keywords = dict(keys) self._setup_source_context() if self.args.rmaps: for rmap_name in self.args.rmaps: with log.error_on_exception("Failed processing rmap", srepr(rmap_name)): log.info("="*20, "Refactoring rmap", srepr(rmap_name), "="*20) rmapping = rmap.load_mapping(rmap_name) new_filename = self._process_rmap(func, rmapping=rmapping, **keywords) self._diff_and_certify(rmapping=rmapping, new_filename=new_filename, source_context=self.source_context, **keywords) else: pmapping = rmap.load_mapping(self.source_context) instruments = pmapping.selections.keys() if "all" in self.args.instruments else self.args.instruments for instr in instruments: with log.augment_exception("Failed loading imap for", repr(instr), "from", repr(self.source_context)): imapping = pmapping.get_imap(instr) types = imapping.selections.keys() if "all" in self.args.types else self.args.types for filekind in types: with log.error_on_exception("Failed processing rmap for", repr(filekind)): #, "from", # repr(imapping.basename), "of", repr(self.source_context)): try: rmapping = imapping.get_rmap(filekind).copy() except crds.exceptions.IrrelevantReferenceTypeError as exc: log.info("Skipping type", srepr(filekind), "as N/A") continue log.info("="*20, "Refactoring rmap", srepr(rmapping.basename), "="*20) new_filename = self._process_rmap(func, rmapping=rmapping, **keywords) self._diff_and_certify(rmapping=rmapping, source_context=self.source_context, new_filename=new_filename, **keywords)
def get_data_model_flat_dict(filepath, needed_keys=()): """Get the header from `filepath` using the jwst data model.""" from jwst import datamodels with log.augment_exception("JWST Data Model (jwst.datamodels)"): with datamodels.open(filepath) as d_model: flat_dict = d_model.to_flat_dict(include_arrays=False) return flat_dict
def _x_schema_to_flat(schema): """Recursively flatten `schema` without addressing case issues.""" results = {} for feature in ["oneOf", "allOf", "$ref"]: if feature in schema: log.verbose_warning("Schema item has unhandled feature {}.", verbosity=80) return None if schema["type"] == "object": subprops = schema["properties"] for prop in subprops: with log.augment_exception("In schema property", repr(prop)): sub_tree = _schema_to_flat(subprops[prop]) if sub_tree is None: continue if isinstance(sub_tree, dict): for subprop, val in list(sub_tree.items()): results[prop + "." + subprop] = val else: results[prop] = sub_tree elif schema["type"] in BASIC_TYPES: return schema elif schema["type"] in OPTIONAL_TYPES: return schema elif schema["type"] == "array": return None elif schema["type"] in ["any", "null"]: return None else: log.verbose_warning("Schema item has unhandled type", repr(schema["type"]), verbosity=80) return results
def _x_schema_to_flat(schema): """Recursively flatten `schema` without addressing case issues.""" results = {} for feature in ["oneOf","allOf","$ref"]: if feature in schema: log.verbose_warning("Schema item has unhandled feature {}.", verbosity=80) return None if schema["type"] == "object": subprops = schema["properties"] for prop in subprops: with log.augment_exception("In schema property", repr(prop)): sub_tree = _schema_to_flat(subprops[prop]) if sub_tree is None: continue if isinstance(sub_tree, dict): for subprop, val in list(sub_tree.items()): results[prop + "." + subprop] = val else: results[prop] = sub_tree elif schema["type"] in BASIC_TYPES: return schema elif schema["type"] in OPTIONAL_TYPES: return schema elif schema["type"] == "array": return None elif schema["type"] in ["any", "null"]: return None else: log.verbose_warning("Schema item has unhandled type", repr(schema["type"]), verbosity=80) return results
def determine_contexts(self): """Support explicit specification of contexts, context id range, or all.""" log.verbose("Determining contexts.", verbosity=55) if self.args.contexts: # permit instrument and reference mappings, not just pipelines: contexts = [self.resolve_context(ctx) for ctx in self.args.contexts] elif self.args.all: contexts = self._list_mappings("*.pmap") elif self.args.last_n_contexts: contexts = self._list_mappings("*.pmap")[-self.args.last_n_contexts:] elif self.args.range: rmin, rmax = self.args.range contexts = [] all_contexts = self._list_mappings("*.pmap") for context in all_contexts: match = re.match(r"\w+_(\d+).pmap", context) if match: serial = int(match.group(1)) if rmin <= serial <= rmax: contexts.append(context) elif self.args.up_to_context: pmaps = self._list_mappings("*.pmap") with log.augment_exception("Invalid --up-to-context", repr(self.args.up_to_context[0]), exc_class=exceptions.CrdsError): up_to_context = self.resolve_context(self.args.up_to_context[0]) up_to_ix = pmaps.index(up_to_context)+1 contexts = pmaps[:up_to_ix] elif self.args.after_context: pmaps = self._list_mappings("*.pmap") with log.augment_exception("Invalid --after-context", repr(self.args.after_context[0]), exc_class=exceptions.CrdsError): after_context = self.resolve_context(self.args.after_context[0]) after_ix = pmaps.index(after_context) contexts = pmaps[after_ix:] else: contexts = [self.resolve_context(config.get_crds_env_context() or self.observatory + "-operational")] log.verbose("Determined contexts: ", contexts, verbosity=55) return sorted(contexts)
def parse_mapping(filename): """Parse mapping `filename`. Return parsing.""" global parsley, MAPPING_PARSER if parsley is None: raise NotImplementedError("Parsley parsing package must be installed.") if MAPPING_PARSER is None: MAPPING_PARSER = parsley.makeGrammar(MAPPING_GRAMMAR, selectors.SELECTORS) log.verbose("Parsing", repr(filename)) filename = rmap.locate_mapping(filename) with log.augment_exception("Parsing error in", repr(filename), exception_class=exceptions.MappingFormatError): with open(filename) as pfile: parser = MAPPING_PARSER(pfile.read()) header, selector, comment = parser.mapping() return Parsing(header, selector, comment)
def get_object(*args): """Import the given `dotted_name` and return the object. >>> rmap = get_object("crds.rmap") >>> fail = get_object("crds.rmap; eval('2+2')") Traceback (most recent call last): ... AssertionError: Invalid dotted name for get_object() : "crds.rmap; eval('2+2')" >>> rmap = get_object("crds","rmap") """ dotted_name = ".".join(args) assert MODULE_PATH_RE.match(dotted_name), \ "Invalid dotted name for get_object() : " + repr(dotted_name) parts = dotted_name.split(".") pkgpath = ".".join(parts[:-1]) cls = parts[-1] namespace = {} import_cmd = "from " + pkgpath + " import " + cls with log.augment_exception("Error importing", repr(import_cmd)): exec(import_cmd, namespace, namespace) return namespace[cls]
def rmap_delete_references(old_rmap, new_rmap, deleted_references): """Given the full path of starting rmap `old_rmap`, modify it by deleting all files in `deleted_references` and write out the result to `new_rmap`. If no actions are performed, don't write out `new_rmap`. Return new ReferenceMapping named `new_rmap` """ new = old = rmap.load_mapping(old_rmap, ignore_checksum=True) for reference in deleted_references: baseref = os.path.basename(reference) log.info("Deleting", srepr(baseref), "from", srepr(new.name)) with log.augment_exception("In reference", srepr(baseref)): new = new.delete(reference) new.header["derived_from"] = old.basename log.verbose("Writing", srepr(new_rmap)) new.write(new_rmap) formatted = new.format() for reference in deleted_references: reference = os.path.basename(reference) assert reference not in formatted, \ "Rules update failure. Deleted " + srepr(reference) + " still appears in new rmap." return new
def rmap_insert_references_by_matches(old_rmap, new_rmap, references_headers): """Given the full path of starting rmap `old_rmap`, modify it by inserting or replacing all files in dict `references_headers` which maps a reference file basename onto a list of headers under which it should be matched. Write out the result to `new_rmap`. If no actions are performed, don't write out `new_rmap`. Return new ReferenceMapping named `new_rmap` """ new = old = rmap.load_mapping(old_rmap, ignore_checksum=True) for baseref, header in references_headers.items(): with log.augment_exception("In reference", srepr(baseref)): log.info("Inserting", srepr(baseref), "match case", srepr(header), "into", srepr(baseref)) new = new.insert_header_reference(header, baseref) new.header["derived_from"] = old.basename log.verbose("Writing", srepr(new_rmap)) new.write(new_rmap) formatted = new.format() for baseref in references_headers: assert baseref in formatted, \ "Rules update failure. " + srepr(baseref) + " does not appear in new rmap." \ " May be identical match with other submitted references." return new
def rmap_insert_references_by_matches(old_rmap, new_rmap, references_headers): """Given the full path of starting rmap `old_rmap`, modify it by inserting or replacing all files in dict `references_headers` which maps a reference file basename onto a list of headers under which it should be matched. Write out the result to `new_rmap`. If no actions are performed, don't write out `new_rmap`. Return new ReferenceMapping named `new_rmap` """ new = old = rmap.load_mapping(old_rmap, ignore_checksum=True) for baseref, header in references_headers.items(): with log.augment_exception("In reference", srepr(baseref)): log.info("Inserting", srepr(baseref), "into", srepr(old_rmap)) log.verbose("Inserting", srepr(baseref), "match case", srepr(header), "into", srepr(old_rmap)) new = new.insert_header_reference(header, baseref) new.header["derived_from"] = old.basename log.verbose("Writing", srepr(new_rmap)) new.write(new_rmap) formatted = new.format() for baseref in references_headers: assert baseref in formatted, \ "Rules update failure. " + srepr(baseref) + " does not appear in new rmap." \ " May be identical match with other submitted references." return new
def rmap_insert_references(old_rmap, new_rmap, inserted_references): """Given the full path of starting rmap `old_rmap`, modify it by inserting or replacing all files in `inserted_references` and write out the result to `new_rmap`. If no actions are performed, don't write out `new_rmap`. Return new ReferenceMapping named `new_rmap` """ new = old = rmap.fetch_mapping(old_rmap, ignore_checksum=True) for reference in inserted_references: baseref = os.path.basename(reference) with log.augment_exception("In reference", srepr(baseref)): log.info("Inserting", srepr(baseref), "into", srepr(new.name)) new = new.insert_reference(reference) new.header["derived_from"] = old.basename log.verbose("Writing", srepr(new_rmap)) new.write(new_rmap) formatted = new.format() for reference in inserted_references: reference = os.path.basename(reference) assert reference in formatted, \ "Rules update failure. " + srepr(reference) + " does not appear in new rmap." \ " May be identical match with other submitted references." return new