def sync_files(self, files, context=None, ignore_cache=None): """Like dump_files(), but dumps recursive closure of any mappings rather than just the listed mapping.""" mappings = [ os.path.basename(filename) for filename in files if config.is_mapping(filename) ] references = [os.path.basename(filename) for filename in files if not config.is_mapping(filename) ] if mappings: self.dump_mappings(mappings, ignore_cache) if references: self.dump_files(context, references, ignore_cache)
def are_all_mappings(self, files): """Return True IFF every file in files is a mapping.""" for filename in files: if not config.is_mapping(filename): return False else: return True
def get_file_properties(filename): """Figure out (instrument, filekind) based on `filename` which should be a mapping or FITS reference file. >>> get_file_properties("./hst_acs_biasfile_0001.rmap") ('acs', 'biasfile') >> get_file_properties("./hst_acs_biasfile_0001.pmap") Traceback (most recent call last): ... IOError: [Errno 2] No such file or directory: './hst_acs_biasfile_0001.pmap' >> get_file_properties("test_data/s7g1700gl_dead.fits") """ if data_file.is_geis_data(filename): # determine GEIS data file properties from corresponding header file. filename = filename[:-1] + "h" if config.is_mapping(filename): try: return decompose_newstyle_name(filename)[2:4] except Exception: return properties_inside_mapping(filename) elif config.is_reference(filename): result = get_reference_properties(filename)[2:4] else: try: result = properties_inside_mapping(filename) except Exception: result = get_reference_properties(filename)[2:4] assert result[0] in INSTRUMENTS+[""], "Bad instrument " + \ repr(result[0]) + " in filename " + repr(filename) assert result[1] in FILEKINDS+[""], "Bad filekind " + \ repr(result[1]) + " in filename " + repr(filename) return result
def get_url(self, filename): """Return the URL used to fetch `filename` of `pipeline_context`.""" info = get_server_info() if config.is_mapping(filename): url = info["mapping_url"][self.observatory] else: url = info["reference_url"][self.observatory] if not url.endswith("/"): url += "/" return url + filename
def dump_files(pipeline_context, files, ignore_cache=False, raise_exceptions=True): """Unified interface to dump any file in `files`, mapping or reference. Returns localpaths, downloads count, bytes downloaded """ if files is None: files = get_mapping_names(pipeline_context) mappings = [ os.path.basename(name) for name in files if config.is_mapping(name) ] references = [ os.path.basename(name) for name in files if not config.is_mapping(name) ] if mappings: m_paths, m_downloads, m_bytes = dump_mappings( pipeline_context, mappings=mappings, ignore_cache=ignore_cache, raise_exceptions=raise_exceptions, api=2) else: m_paths, m_downloads, m_bytes = {}, 0, 0 if references: r_paths, r_downloads, r_bytes = dump_references( pipeline_context, baserefs=references, ignore_cache=ignore_cache, raise_exceptions=raise_exceptions, api=2) else: r_paths, r_downloads, r_bytes = {}, 0, 0 return dict(list(m_paths.items())+list(r_paths.items())), m_downloads + r_downloads, m_bytes + r_bytes
def archive_url(self, filename): """Return the URL used to fetch `filename` from the archive.""" if config.is_mapping(filename): return os.path.join(self.mapping_url, filename) else: return os.path.join(self.reference_url, filename)
def mapping(filename): """Ensure `filename` is a CRDS mapping file.""" assert config.is_mapping(filename), "A .rmap, .imap, or .pmap file is required but got: '%s'" % filename return filename