Beispiel #1
0
def update_file_bestrefs(context, dataset, updates):
    """Update the header of `dataset` with best reference recommendations
    `bestrefs` determined by context named `pmap`.
    """
    if not updates:
        return

    version_info = heavy_client.version_info()
    instrument = updates[0].instrument
    locator = utils.instrument_to_locator(instrument)
    prefix = locator.get_env_prefix(instrument) 

    with data_file.fits_open(dataset, mode="update", do_not_scale_image_data=True, checksum=False) as hdulist:

        def set_key(keyword, value):
            """Set a single keyword value with logging,  bound to outer-scope hdulist."""
            log.verbose("Setting", repr(dataset), keyword, "=", value)
            hdulist[0].header[keyword] = value

        set_key("CRDS_CTX", context)
        set_key("CRDS_VER", version_info)

        for update in sorted(updates):
            new_ref = update.new_reference.upper()
            if new_ref != "N/A":
                new_ref = (prefix + new_ref).lower()
            keyword = locator.filekind_to_keyword(update.filekind)
            set_key(keyword, new_ref)

        # This is a workaround for a bug in astropy.io.fits handling of 
        # FITS updates that are header-only and extend the header.
        # This statement appears to do nothing but *is not* pointless.
        for hdu in hdulist:
            hdu.data
Beispiel #2
0
 def rewrite(self, filename, uniqname):
     """Add a FITS checksum to `filename.`"""
     with data_file.fits_open(filename, mode="readonly", checksum=self.args.verify_file, do_not_scale_image_data=True) as hdus:
         verify_mode = "fix+warn" if not self.args.fits_errors else "fix+exception"
         if self.args.verify_file:
             hdus.verify(verify_mode)
         basefile = os.path.basename(filename)
         baseuniq = os.path.basename(uniqname)
         if self.args.add_keywords:
             now = datetime.datetime.utcnow()
             hdus[0].header["FILENAME"] = baseuniq
             hdus[0].header["ROOTNAME"] = os.path.splitext(baseuniq)[0].upper()
             hdus[0].header["HISTORY"] = "{0} renamed to {1} on {2} {3} {4}".format(
                 basefile, baseuniq, MONTHS[now.month - 1], now.day, now.year)
         if self.args.output_path:
             uniqname = os.path.join(self.args.output_path, baseuniq)
         try:
             log.info("Rewriting", self.format_file(filename), "-->", self.format_file(uniqname))
             hdus.writeto(uniqname, output_verify=verify_mode, checksum=self.args.add_checksum)
         except Exception as exc:
             if os.path.exists(uniqname):
                 os.remove(uniqname)
             if "buffer is too small" in str(exc):
                 raise CrdsError(
                     "Failed to rename/rewrite", repr(basefile),
                     "as", repr(baseuniq), ":", 
                     "probable file truncation", ":", str(exc)) from exc
             else:
                 raise CrdsError("Failed to rename/rewrite", repr(basefile),
                                 "as", repr(baseuniq), ":",
                                 str(exc)) from exc
Beispiel #3
0
 def rewrite(self, filename, uniqname):
     """Add a FITS checksum to `filename.`"""
     with data_file.fits_open(filename, mode="readonly", checksum=self.args.verify_file, do_not_scale_image_data=True) as hdus:
         verify_mode = "fix+warn" if not self.args.fits_errors else "fix+exception"
         if self.args.verify_file:
             hdus.verify(verify_mode)
         basefile = os.path.basename(filename)
         baseuniq = os.path.basename(uniqname)
         if self.args.add_keywords:
             now = datetime.datetime.utcnow()
             hdus[0].header["FILENAME"] = baseuniq
             hdus[0].header["ROOTNAME"] = os.path.splitext(baseuniq)[0].upper()
             hdus[0].header["HISTORY"] = "{0} renamed to {1} on {2} {3} {4}".format(
                 basefile, baseuniq, MONTHS[now.month - 1], now.day, now.year)
         if self.args.output_path:
             uniqname = os.path.join(self.args.outpath, baseuniq)
         try:
             log.info("Rewriting", self.format_file(filename), "-->", self.format_file(uniqname))
             hdus.writeto(uniqname, output_verify=verify_mode, checksum=self.args.add_checksum)
         except Exception as exc:
             if os.path.exists(uniqname):
                 os.remove(uniqname)
             if "buffer is too small" in str(exc):
                 raise CrdsError("Failed to rename/rewrite", repr(basefile), "as", repr(baseuniq), ":", 
                                 "probable file truncation", ":", str(exc))
             else:
                 raise CrdsError("Failed to rename/rewrite", repr(basefile), "as", repr(baseuniq), ":", str(exc))
Beispiel #4
0
def ntables(filename):
    """Return the number of segments / hdus in `filename`."""
    if filename.endswith(".fits"):
        with data_file.fits_open(filename) as hdus:
            return len(hdus) - 1
    else:
        return 1
Beispiel #5
0
def ntables(filename):
    """Return the number of segments / hdus in `filename`."""
    if filename.endswith(".fits"):
        with data_file.fits_open(filename) as hdus:
            return len(hdus) - 1
    else:
        return 1
Beispiel #6
0
 def check_header(self, filename, header):
     with data_file.fits_open(filename) as hdul:
         result = self._check_throughput_first_and_last(hdul)
         result = self._check_parametrization(hdul) and result
         result = _check_component_filename(
             self.context, utils.THROUGHPUT_REFTYPE,
             utils.THROUGHPUT_FILENAME_SUFFIX, filename, header) and result
         return result
Beispiel #7
0
def tables(filename):
    """Return [ SimpleTable(filename, segment), ... ] for each table segment in filename.
    
    This function is self-cached.    Clear the cache using clear_cache().
    """
    if filename.endswith(".fits"):
        with data_file.fits_open(filename) as hdus:
            return [SimpleTable(filename, i + 1) for i in range(len(hdus) - 1)]
    else:
        return [SimpleTable(filename, segment=1)]
Beispiel #8
0
def tables(filename):
    """Return [ SimpleTable(filename, segment), ... ] for each table segment in filename.
    
    This function is self-cached.    Clear the cache using clear_cache().
    """
    if filename.endswith(".fits"):
        with data_file.fits_open(filename) as hdus:
            return [ SimpleTable(filename, i+1) for i in range(len(hdus)-1) ]
    else:
        return [ SimpleTable(filename, segment=1) ]
Beispiel #9
0
def ntables(filename):
    """Return the number of segments / hdus in `filename`."""
    if filename.endswith(".fits"):
        tables = 0
        with data_file.fits_open(filename) as hdus:
            for i,hdu in enumerate(hdus):
                if "TABLEHDU" in hdu.__class__.__name__.upper():
                    tables += 1
        return tables
    else:
        return 1
Beispiel #10
0
def ntables(filename):
    """Return the number of segments / hdus in `filename`."""
    if filename.endswith(".fits"):
        tables = 0
        with data_file.fits_open(filename) as hdus:
            for i, hdu in enumerate(hdus):
                if "TABLEHDU" in hdu.__class__.__name__.upper():
                    tables += 1
        return tables
    else:
        return 1
Beispiel #11
0
 def _cat_array_properties(self, path):
     """Print out the CRDS interpretation of every array in `path`,  currently FITS only."""
     with data_file.fits_open(path) as hdulist:
         for i, hdu in enumerate(hdulist):
             with log.warn_on_exception("Can't load array properties for HDU[" + str(i) +"]"):
                 if i > 0:
                     extname = hdu.header.get("EXTNAME",str(i))
                     self._cat_banner("CRDS Array Info [" + repr(extname) + "]:",
                                      delim="-", bottom_delim=None)
                     props = data_file.get_array_properties(path, extname)
                     props = { prop:value for (prop,value) in props.items() if value is not None }
                     self._print_lines(path, _pp_lines(props))
Beispiel #12
0
def checksum_exists(filename):
    """Return True IFF `path` names a file which already has FITS checksums.  As a first guess,
    existing checksums should be maintained across file content updates required by the renaming.
    """
    with data_file.fits_open(filename, mode="readonly", do_not_scale_image_data=True) as hdus:
        for hdu in hdus:
            if "CHECKSUM" in hdu.header or "DATASUM" in hdu.header:
                add_checksum = True
                break
        else:
            add_checksum = False
    return add_checksum
Beispiel #13
0
def has_checksum(filename):
    """Return True IFF `path` names a file which already has FITS checksums.  As a first guess,
    existing checksums should be maintained across file content updates required by the renaming.
    """
    with data_file.fits_open(filename, mode="readonly", do_not_scale_image_data=True) as hdus:
        for hdu in hdus:
            if "CHECKSUM" in hdu.header or "DATASUM" in hdu.header:
                add_checksum = True
                break
        else:
            add_checksum = False
    return add_checksum
Beispiel #14
0
 def __init__(self, filename, segment=1):
     self.filename = filename
     self.segment = segment
     self.basename = os.path.basename(filename)
     self._columns = None  # dynamic,  independent of astropy
     if filename.endswith(".fits"):
         with data_file.fits_open(filename) as hdus:
             tab = hdus[segment].data
             self.colnames = tuple(name.upper() for name in tab.columns.names)
             self.rows = tuple(tuple(row) for row in tab)   # readonly
     else:
         tab = table.Table.read(filename)
         self.colnames = tuple(name.upper() for name in tab.columns)
         self.rows = tuple(tuple(row) for row in tab)   # readonly
     log.verbose("Creating", repr(self), verbosity=60)
Beispiel #15
0
 def __init__(self, filename, segment=1):
     self.filename = filename
     self.segment = segment
     self.basename = os.path.basename(filename)
     self._columns = None  # dynamic,  independent of astropy
     if filename.endswith(".fits"):
         with data_file.fits_open(filename) as hdus:
             tab = hdus[segment].data
             self.colnames = tuple(name.upper()
                                   for name in tab.columns.names)
             self.rows = tuple(tuple(row) for row in tab)  # readonly
     else:
         tab = table.Table.read(filename)
         self.colnames = tuple(name.upper() for name in tab.columns)
         self.rows = tuple(tuple(row) for row in tab)  # readonly
     log.verbose("Creating", repr(self), verbosity=60)
Beispiel #16
0
def tables(filename):
    """Return [ SimpleTable(filename, segment), ... ] for each table segment in filename.
    
    This function is self-cached.    Clear the cache using clear_cache().
    """
    if filename.endswith(".fits"):
        tables = []
        with data_file.fits_open(filename) as hdus:
            for i,hdu in enumerate(hdus):
                classname = hdu.__class__.__name__.upper()
                if "TABLEHDU" in classname:
                    tables.append(SimpleTable(filename, i))
                    if classname == "TABLEHDU":
                        log.warning("ASCII Table detected in HDU#", str(i) +
                                    ".  Particularly for HST, verify that it should not be a BIN Table HDU.")
        return tables
    else:
        return [ SimpleTable(filename, segment=1) ]
Beispiel #17
0
 def _cat_array_properties(self, path):
     """Print out the CRDS interpretation of every array in `path`,  currently FITS only."""
     with data_file.fits_open(path) as hdulist:
         for i, hdu in enumerate(hdulist):
             with log.warn_on_exception(
                     "Can't load array properties for HDU[" + str(i) + "]"):
                 if i > 0:
                     extname = hdu.header.get("EXTNAME", str(i))
                     self._cat_banner("CRDS Array Info [" + repr(extname) +
                                      "]:",
                                      delim="-",
                                      bottom_delim=None)
                     props = data_file.get_array_properties(path, extname)
                     props = {
                         prop: value
                         for (prop, value) in props.items()
                         if value is not None
                     }
                     self._print_lines(path, _pp_lines(props))
Beispiel #18
0
    def generate(self, reftype, delivered_files):
        """
        Generate and return an HDUList containing an updated lookup table
        for the specified reftype.
        """
        # Historically ReDCaT has always set this to the current
        # local time and not UTC.
        timestamp = datetime.datetime.today().strftime(_TIMESTAMP_FORMAT)

        original_ref_path = utils.get_cache_path(self._context,
                                                 reftype,
                                                 error_on_missing=False)
        if original_ref_path is None:
            return self._generate_hdul(reftype, delivered_files, timestamp)
        else:
            with data_file.fits_open(original_ref_path) as original_ref:
                return self._generate_hdul(reftype,
                                           delivered_files,
                                           timestamp,
                                           original_ref=original_ref)
Beispiel #19
0
def tables(filename):
    """Return [ SimpleTable(filename, segment), ... ] for each table segment in filename.
    
    This function is self-cached.    Clear the cache using clear_cache().
    """
    if filename.endswith(".fits"):
        tables = []
        with data_file.fits_open(filename) as hdus:
            for i, hdu in enumerate(hdus):
                classname = hdu.__class__.__name__.upper()
                if "TABLEHDU" in classname:
                    tables.append(SimpleTable(filename, i))
                    if classname == "TABLEHDU":
                        log.warning(
                            "ASCII Table detected in HDU#",
                            str(i) +
                            ".  Particularly for HST, verify that it should not be a BIN Table HDU."
                        )
        return tables
    else:
        return [SimpleTable(filename, segment=1)]
Beispiel #20
0
 def check_header(self, filename, header):
     """Evalutate the header expression associated with this validator (as its sole value)
     with respect to the given `header`.  Read `header` from `filename` if `header` is None.
     """
     array_name = self.complex_name
     max_ver = 0
     with data_file.fits_open(filename) as hdus:
         first = dict()
         for hdu in hdus:
             if hdu.name != self.name:
                 continue
             self.verbose(filename, "ver=" + str(hdu.ver),
                          "Array has shape=" + str(hdu.data.shape),
                          "and dtype=" + repr(str(hdu.data.dtype)) + ".")
             if hdu.name not in first:
                 first[hdu.name] = (hdu.data.shape, hdu.data.dtype)
             else:
                 expected = first[hdu.name][0]
                 got = hdu.data.shape
                 assert expected == got, \
                     "Shape mismtatch for " + repr((hdu.name, hdu.ver)) + \
                     "relative to" + repr((self.name,1)) + ". Expected " + \
                     str(expected) + " but got " + str(got) + "."
                 expected = first[hdu.name][1]
                 got = hdu.data.dtype
                 assert expected == got, \
                     "Data type mismtatch for " + \
                     repr((hdu.name,hdu.ver)) + \
                     " relative to " + repr((self.name,1)) + \
                     ". Expected " + str(expected) + \
                     " but got " + str(got) + "."
             max_ver = hdu.ver
         if self.max_ver is not None:
             assert self.max_ver == max_ver, \
                 "Bad maximum HDU ver for " + repr(self.name) + \
                 ". Expected " +  str(self.max_ver) + \
                 ", got " + str(max_ver) + "."
Beispiel #21
0
 def check_header(self, filename, header):
     """Evalutate the header expression associated with this validator (as its sole value)
     with respect to the given `header`.  Read `header` from `filename` if `header` is None.
     """
     array_name = self.complex_name
     max_ver = 0
     with data_file.fits_open(filename) as hdus:
         first = dict()
         for hdu in hdus:
             if hdu.name != self.name:
                 continue
             self.verbose(filename, "ver=" + str(hdu.ver),
                          "Array has shape=" + str(hdu.data.shape),
                          "and dtype=" + repr(str(hdu.data.dtype)) + ".")
             if hdu.name not in first:
                 first[hdu.name] = (hdu.data.shape, hdu.data.dtype)
             else:
                 expected = first[hdu.name][0]
                 got = hdu.data.shape
                 assert expected == got, \
                     "Shape mismtatch for " + repr((hdu.name, hdu.ver)) + \
                     "relative to" + repr((self.name,1)) + ". Expected " + \
                     str(expected) + " but got " + str(got) + "."
                 expected = first[hdu.name][1]
                 got = hdu.data.dtype
                 assert expected == got, \
                     "Data type mismtatch for " + \
                     repr((hdu.name,hdu.ver)) + \
                     " relative to " + repr((self.name,1)) + \
                     ". Expected " + str(expected) + \
                     " but got " + str(got) + "."
             max_ver = hdu.ver
         if self.max_ver is not None:
             assert self.max_ver == max_ver, \
                 "Bad maximum HDU ver for " + repr(self.name) + \
                 ". Expected " +  str(self.max_ver) + \
                 ", got " + str(max_ver) + "."
Beispiel #22
0
    def _generate_hdul(self,
                       reftype,
                       delivered_files,
                       timestamp,
                       original_ref=None):
        if original_ref is not None:
            original_table = original_ref[-1].data
            new_time = original_table["TIME"]
            new_compname = original_table["COMPNAME"]
            new_filename = original_table["FILENAME"]
            new_comment = original_table["COMMENT"]
        else:
            new_time = np.chararray((0, ), unicode=True)
            new_compname = np.chararray((0, ), unicode=True)
            new_filename = np.chararray((0, ), unicode=True)
            new_comment = np.chararray((0, ), unicode=True)

        updated_instruments = set()
        created_instruments = set()
        for file in delivered_files:
            with data_file.fits_open(file) as hdul:
                component = hdul[0].header["COMPNAME"]
                lookup_filename = utils.get_lookup_filename(
                    component, os.path.basename(file))
                description = hdul[0].header.get("DESCRIP", "")
                if component in new_compname:
                    idx = np.argwhere(new_compname == component)[0][0]
                    new_time[idx] = timestamp
                    new_filename[idx] = lookup_filename
                    new_comment[idx] = description
                    updated_instruments.add(utils.get_instrument(component))
                else:
                    new_time = np.append(new_time, timestamp)
                    new_compname = np.append(new_compname, component)
                    new_filename = np.append(new_filename, lookup_filename)
                    new_comment = np.append(new_comment, description)
                    created_instruments.add(utils.get_instrument(component))

        ind = np.argsort(new_compname)
        columns = [
            fits.Column(name="TIME",
                        format="A26",
                        array=new_time[ind],
                        disp="A26"),
            fits.Column(name="COMPNAME",
                        format="A18",
                        array=new_compname[ind],
                        disp="A18"),
            fits.Column(name="FILENAME",
                        format="A56",
                        array=new_filename[ind],
                        disp="A56"),
            fits.Column(name="COMMENT",
                        format="A68",
                        array=new_comment[ind],
                        disp="A68")
        ]

        new_table_hdu = fits.BinTableHDU.from_columns(columns)

        new_primary_hdu = fits.PrimaryHDU()
        header = new_primary_hdu.header
        header["USEAFTER"] = timestamp
        header["INSTRUME"] = "HST"
        header["COMMENT"] = "Reference file automatically generated by CRDS"
        header["DBTABLE"] = _DBTABLE_BY_REFTYPE[reftype]
        header["DESCRIP"] = _DESCRIP_BY_REFTYPE[reftype]
        header["PEDIGREE"] = self._make_pedigree(new_table_hdu)

        if original_ref:
            for item in original_ref[0].header["HISTORY"]:
                header.add_history(item)

            header.add_history(" ")
            header.add_history("Updated on {}".format(timestamp))
        else:
            header.add_history("Created on {}".format(timestamp))

        header.add_history(" ")
        self._add_instrument_history(reftype, header, updated_instruments,
                                     created_instruments)

        return fits.HDUList([new_primary_hdu, new_table_hdu])
Beispiel #23
0
 def check_header(self, filename, header):
     with data_file.fits_open(filename) as hdul:
         return self._check_filenames(hdul)
Beispiel #24
0
 def check_header(self, filename, header):
     with data_file.fits_open(filename) as hdul:
         result = self._check_connectivity(hdul)
         result = self._check_direction(hdul) and result
         return result