def get_schema_tpninfos(*key): """Load the list of TPN info tuples corresponding to `instrument` and `filekind` from it's .tpn file. """ with log.warn_on_exception("Failed loading schema constraints for", repr(key)): schema_name = reference_to_schema_name(key[1]) tpns = get_schema_tpns(schema_name) parkeys = tpninfos_key_to_parkeys(key) return [ info for info in tpns if info.name in parkeys ] return []
def get_schema_tpninfos(*key): """Load the list of TPN info tuples corresponding to `instrument` and `filekind` from it's .tpn file. """ with log.warn_on_exception("Failed loading schema constraints for", repr(key)): schema_name = reference_to_schema_name(key[1]) tpns = get_schema_tpns(schema_name) parkeys = tpninfos_key_to_parkeys(key) return [info for info in tpns if info.name in parkeys] return []
def get_context_mappings(self): """Return the set of mappings which are pointed to by the mappings in `self.contexts`. """ files = set() useable_contexts = [] if not self.contexts: return [] log.verbose("Getting all mappings for specified contexts.", verbosity=55) if self.args.all: files = self._list_mappings("*.*map") pmaps = self._list_mappings("*.pmap") useable_contexts = [] if pmaps and files: with log.warn_on_exception("Failed dumping mappings for", repr(self.contexts)): self.dump_files(pmaps[-1], files) for context in self.contexts: with log.warn_on_exception("Failed loading context", repr(context)): pmap = crds.get_cached_mapping(context) useable_contexts.append(context) else: for context in self.contexts: with log.warn_on_exception("Failed listing mappings for", repr(context)): try: pmap = crds.get_cached_mapping(context) files |= set(pmap.mapping_names()) except Exception: files |= set(api.get_mapping_names(context)) useable_contexts.append(context) useable_contexts = sorted(useable_contexts) if useable_contexts and files: with log.warn_on_exception("Failed dumping mappings for", repr(self.contexts)): self.dump_files(useable_contexts[-1], files) self.contexts = useable_contexts # XXXX reset self.contexts files = sorted(files) log.verbose("Got mappings from specified (usable) contexts: ", files, verbosity=55) return files
def _get_crds_ini_parser(): """Load and return the environment from the CRDS rc file.""" global CRDS_INI_PARSER if CRDS_INI_PARSER is None: parser = python23.configparser.SafeConfigParser() with log.warn_on_exception("Failed reading CRDS rc file"): ini_path = _get_crds_ini_path() if os.path.exists(ini_path): mode = os.stat(ini_path).st_mode if mode & 0o077: raise exceptions.CrdsWebAuthenticationError("You must 'chmod 0600 $HOME/.crds.ini' to make it secret.") parser.read(ini_path) CRDS_INI_PARSER = parser else: log.verbose("No CRDS .ini file found at", repr(ini_path)) else: parser = CRDS_INI_PARSER return parser
def load_specs(spec_path): """Load either the combined .json formatted type specs (preferred) or specs from individual type spec files (if necessary). """ combined_specs_path = os.path.join(spec_path, "combined_specs.json") if os.path.exists(combined_specs_path): headers = load_json_specs(combined_specs_path) headers = python23.unicode_to_str(headers) else: headers = load_raw_specs(spec_path) with log.warn_on_exception("Failed to save type specs .json at", repr(combined_specs_path)): save_json_specs(headers, combined_specs_path) type_specs = { instr : { filekind : TypeSpec(headers[instr][filekind]) for filekind in headers[instr] } for instr in headers } return type_specs
def _get_crds_ini_parser(): """Load and return the environment from the CRDS rc file.""" global CRDS_INI_PARSER if CRDS_INI_PARSER is None: parser = python23.configparser.SafeConfigParser() with log.warn_on_exception("Failed reading CRDS rc file"): ini_path = _get_crds_ini_path() if os.path.exists(ini_path): mode = os.stat(ini_path).st_mode if mode & 0o077: raise exceptions.CrdsWebAuthenticationError( "You must 'chmod 0600 $HOME/.crds.ini' to make it secret." ) parser.read(ini_path) CRDS_INI_PARSER = parser else: log.verbose("No CRDS .ini file found at", repr(ini_path)) else: parser = CRDS_INI_PARSER return parser
def newer(name1, name2): """Determine if `name1` is a more recent file than `name2` accounting for limited differences in naming conventions. Official CDBS and CRDS names are comparable using a simple text comparison, just not to each other. >>> newer("s7g1700gl_dead.fits", "hst_cos_deadtab_0001.fits") False >>> newer("hst_cos_deadtab_0001.fits", "s7g1700gl_dead.fits") True >>> newer("s7g1700gl_dead.fits", "bbbbb.fits") Traceback (most recent call last): ... NameComparisonError: Failed to classify name 'bbbbb.fits' for determining time order. >>> newer("bbbbb.fits", "s7g1700gl_dead.fits") Traceback (most recent call last): ... NameComparisonError: Failed to classify name 'bbbbb.fits' for determining time order. >>> newer("hst_cos_deadtab_0001.rmap", "hst_cos_deadtab_0002.rmap") False >>> newer("hst_cos_deadtab_0002.rmap", "hst_cos_deadtab_0001.rmap") True >>> newer("hst_cos_deadtab_0001.asdf", "hst_cos_deadtab_0050.fits") True >>> newer("hst_cos_deadtab_0051.fits", "hst_cos_deadtab_0050.asdf") False >>> newer("hst_cos_deadtab_0001.fits", "hst_cos_deadtab_99991.fits") False >>> newer("hst_cos_deadtab_99991.fits", "hst_cos_deadtab_0001.fits") True >>> newer("07g1700gl_dead.fits", "s7g1700gl_dead.fits") True >>> newer("s7g1700gl_dead.fits", "07g1700gl_dead.fits") False >>> newer("N/A", "s7g1700gl_dead.fits") False >>> newer("07g1700gl_dead.fits", "N/A") True >>> newer("N/A", "hst_cos_deadtab_0002.rmap") False >>> newer("hst_cos_deadtab_0002.rmap", "N/A") True >>> newer("hst_cos_deadtab_0001.fits", "17g1700gl_dead.fits") Traceback (most recent call last): ... NameComparisonError: Unhandled name comparison case: ('crds', 'newcdbs') >>> newer("17g1700gl_dead.fits", "hst_cos_deadtab_0001.fits") Traceback (most recent call last): ... NameComparisonError: Unhandled name comparison case: ('newcdbs', 'crds') """ cases = { ("crds", "crds") : "compare_crds", ("oldcdbs", "oldcdbs") : "compare", ("newcdbs", "newcdbs") : "compare", ("crds", "oldcdbs") : True, ("oldcdbs", "crds") : False, ("newcdbs", "oldcdbs") : True, ("oldcdbs", "newcdbs") : False, ("crds", "newcdbs") : "raise", ("newcdbs", "crds") : "raise", } name1, name2 = crds_basename(name1), crds_basename(name2) class1 = classify_name(name1) class2 = classify_name(name2) case = cases[(class1, class2)] if name1 == "N/A": return False elif name2 =="N/A": result = True elif case == "compare_crds": if extension_rank(name1) == extension_rank(name2): serial1, serial2 = newstyle_serial(name1), newstyle_serial(name2) result = serial1 > serial2 # same extension compares by counter else: result = extension_rank(name1) > extension_rank(name2) elif case == "compare": result = name1 > name2 elif case in [True, False]: result = case elif case == "query": result = True with log.warn_on_exception("Failed obtaining file activation dates for files", repr(name1), "and", repr(name2), "from server. can't determine time order."): info_map = api.get_file_info_map("hst", [name1, name2], fields=["activation_date"]) result = info_map[name1]["activation_date"] > info_map[name2]["activation_date"] else: raise NameComparisonError("Unhandled name comparison case: ", repr((class1, class2))) log.verbose("Comparing filename time order:", repr(name1), ">", repr(name2), "-->", result) return result
elif name2 =="N/A": result = True elif case == "compare_crds": if extension_rank(name1) == extension_rank(name2): serial1, serial2 = newstyle_serial(name1), newstyle_serial(name2) result = serial1 > serial2 # same extension compares by counter else: result = extension_rank(name1) > extension_rank(name2) elif case == "compare": result = name1 > name2 elif case in [True, False]: result = case elif case == "query": result = True with log.warn_on_exception("Failed obtaining file activation dates for files", repr(name1), "and", repr(name2), "from server. can't determine time order."): info_map = api.get_file_info_map("hst", [name1, name2], fields=["activation_date"]) result = info_map[name1]["activation_date"] > info_map[name2]["activation_date"] else: raise NameComparisonError("Unhandled name comparison case: ", repr((class1, class2))) log.verbose("Comparing filename time order:", repr(name1), ">", repr(name2), "-->", result) return result def crds_basename(name): """basename() accounting for N/A pass thru.""" if name == "N/A": return "N/A" else: return os.path.basename(name)
def fetch_sqlite_db(self): """Download a SQLite version of the CRDS catalog from the server.""" log.info("Downloading CRDS catalog database file.") with log.warn_on_exception("Failed updating local CRDS database"): db_path = api.get_sqlite_db(self.observatory) log.info("Sqlite3 database file downloaded to:", db_path)