def list_dataset_headers(self): """List dataset header info for self.args.dataset_headers with respect to self.args.context""" for context in self.contexts: with log.error_on_exception("Failed fetching dataset parameters with repect to", repr(context), "for", repr(self.args.dataset_headers)): pars = api.get_dataset_headers_by_id(context, self.args.dataset_headers) pmap = crds.get_cached_mapping(context) for requested_id in self.args.dataset_headers: for returned_id in sorted(pars.keys()): if requested_id.upper() in returned_id.upper(): header = pars[returned_id] if isinstance(header, python23.string_types): log.error("No header for", repr(returned_id), ":", repr(header)) # header is reason continue if self.args.id_expansions_only: print(returned_id, context if len(self.contexts) > 1 else "") else: if self.args.minimize_headers: header2 = pmap.minimize_header(header) else: header2 = dict(header) header2.pop("REFTYPE", None) header2["dataset_id"] = returned_id log.info("Dataset pars for", repr(returned_id), "with respect to", repr(context) + ":\n", log.PP(header2)) if self.args.first_id_expansion_only: break
def get_free_header(filepath, needed_keys=(), original_name=None, observatory=None): """Return the complete unconditioned header dictionary of a reference file. Does not hijack warnings. Original name is used to determine file type for web upload temporary files which have no distinguishable extension. Original name is browser-side name for file. """ if original_name is None: original_name = os.path.basename(filepath) filetype = get_filetype(original_name, filepath) try: header_func = { "asdf": get_asdf_header, "json": get_json_header, "yaml": get_yaml_header, "geis": get_geis_header, }[filetype] header = header_func(filepath, needed_keys) except KeyError: if observatory is None: observatory = get_observatory(filepath, original_name) if observatory == "jwst": header = get_data_model_header(filepath, needed_keys) else: header = get_fits_header_union(filepath, needed_keys) log.verbose("Header of", repr(filepath), "=", log.PP(header), verbosity=90) return header
def log_affected(self, i, affected): """PLUGIN: Banner log and debug output for each context switch.""" if log.get_verbose(): print("#" * 100, file=sys.stderr) log.debug("History:", i, "Effects:\n", log.PP(affected)) else: if not self.args.quiet: print("#" * 100, file=sys.stderr) print(affected.bestrefs_err_summary, file=sys.stderr)
def __call__(self, *args, **kwargs): jsonrpc = self._call(*args, **kwargs) if jsonrpc["error"]: decoded = str(python23.unescape(jsonrpc["error"]["message"])) raise self.classify_exception(decoded) else: result = crds_decode(jsonrpc["result"]) result = fix_strings(result) log.verbose("RPC OK", log.PP(result) if log.get_verbose() >= 70 else "") return result
def _cat_header(self, path): """Dump out the header associated with a reference file.""" old = config.ALLOW_SCHEMA_VIOLATIONS.set(True) header = data_file.get_unconditioned_header(path) if self.args.keywords: header2 = {} for keyword in header: for substr in self.args.keywords: if substr in keyword: header2[keyword] = header[keyword] else: header2 = header self._print_lines(path, str(log.PP(header2)).splitlines()) config.ALLOW_SCHEMA_VIOLATIONS.set(old)
def test_0_recursive_modify_rmap(self): # , header, value, classes): # Load the test rmap from a string. The top level selector must exist. # This is not a "realistic" test case. It's a test of the recursive # insertion capabilities of all the Selector classes in one go. log.verbose("-" * 60) r = rmap.ReferenceMapping.from_string(self.rmap_str, "./test.rmap", ignore_checksum=True) log.verbose("insert_header:", log.PP(self.insert_header)) result = r.insert(self.insert_header, "foo.fits") result.write(self.result_filename) diffs = r.difference(result) log.verbose("diffs:", diffs) diffs = [diff for diff in diffs if "Selector" not in diff[-1]] assert len( diffs) == 1, "Fewer/more differences than expected: " + repr(diffs) log.verbose("recursive insert result rmap:") log.verbose(open(self.result_filename).read())
def get_ingested_files(self): """Return the server-side JSON info on the files already in the submitter's ingest directory.""" log.info("Determining existing files.") result = self.connection.get('/upload/list/').json() log.verbose("JSON info on existing ingested files:\n", log.PP(result)) return {info["name"]: info for info in result}