Example #1
0
def get_deleted_references(old_pmap, new_pmap, cached=True):
    """Return the list of references from `new_pmap` which were not in `old_pmap`."""
    old_pmap = rmap.asmapping(old_pmap, cached=cached)
    new_pmap = rmap.asmapping(new_pmap, cached=cached)
    return sorted(
        list(
            set(old_pmap.reference_names()) - set(new_pmap.reference_names())))
Example #2
0
def mapping_check_references(mapping, derived_from):
    """Regardless of matching criteria,  do a simple check listing added or deleted
    references as appropritate.
    """
    mapping = rmap.asmapping(mapping, cached="readonly")
    derived_from = rmap.asmapping(derived_from, cached="readonly")
    old_refs = set(derived_from.reference_names())
    new_refs = set(mapping.reference_names())
    if old_refs - new_refs:
        log.warning("Deleted references for", repr(derived_from.filename), "and", repr(mapping.filename), "=",
                 list(old_refs - new_refs))
    if new_refs - old_refs:
        log.warning("Added references for", repr(derived_from.filename), "and", repr(mapping.filename), "=",
                 list(new_refs - old_refs))
Example #3
0
def mapping_check_references(mapping, derived_from):
    """Regardless of matching criteria,  do a simple check listing added or deleted
    references as appropritate.
    """
    mapping = rmap.asmapping(mapping, cached="readonly")
    derived_from = rmap.asmapping(derived_from, cached="readonly")
    old_refs = set(derived_from.reference_names())
    new_refs = set(mapping.reference_names())
    if old_refs - new_refs:
        log.warning("Deleted references for", repr(derived_from.filename), "and", repr(mapping.filename), "=",
                 list(old_refs - new_refs))
    if new_refs - old_refs:
        log.warning("Added references for", repr(derived_from.filename), "and", repr(mapping.filename), "=",
                 list(new_refs - old_refs))
Example #4
0
def mapping_check_diffs(mapping, derived_from):
    """Issue warnings for *deletions* in self relative to parent derived_from
    mapping.  Issue warnings for *reversions*,  defined as replacements which
    where the replacement is older than the original,  as defined by the names.

    This is intended to check for missing modes and for inadvertent reversions
    to earlier versions of files.   For speed and simplicity,  file time order
    is currently determined by the names themselves,  not file contents, file
    system,  or database info.
    """
    mapping = rmap.asmapping(mapping, cached="readonly")
    derived_from = rmap.asmapping(derived_from, cached="readonly")
    log.info("Checking diffs from", repr(derived_from.basename), "to", repr(mapping.basename))
    diffs = derived_from.difference(mapping)
    mapping_check_diffs_core(diffs)
Example #5
0
def mapping_check_diffs(mapping, derived_from):
    """Issue warnings for *deletions* in self relative to parent derived_from
    mapping.  Issue warnings for *reversions*,  defined as replacements which
    where the replacement is older than the original,  as defined by the names.   
    
    This is intended to check for missing modes and for inadvertent reversions
    to earlier versions of files.   For speed and simplicity,  file time order
    is currently determined by the names themselves,  not file contents, file
    system,  or database info.
    """
    mapping = rmap.asmapping(mapping, cached="readonly")
    derived_from = rmap.asmapping(derived_from, cached="readonly")
    log.info("Checking diffs from", repr(derived_from.basename), "to", repr(mapping.basename))
    diffs = derived_from.difference(mapping)
    mapping_check_diffs_core(diffs)
Example #6
0
def get_refactoring_header(rmapping, refname, required_keywords):
    """Create a composite header which is derived from the file contents overidden by any values
    as they appear in the rmap.
    """
    rmapping = rmap.asmapping(rmapping)
    # A fallback source of information is the reference file headers
    header = rmapping.get_refactor_header(
        rmap.locate_file(refname, rmapping.observatory),
        extra_keys=("META.OBSERVATION.DATE", "META.OBSERVATION.TIME", "DATE-OBS","TIME-OBS") + required_keywords)
    # The primary source of information is the original rmap and the matching values defined there
    headers2 = matches.find_match_paths_as_dict(rmapping.filename, refname)
    # Combine the two,  using the rmap values to override anything duplicated in the reffile header
    assert len(headers2) == 1, "Can't refactor file with more than one match: " + srepr(refname)
    header.update(headers2[0])
    return header
Example #7
0
    def __init__(
        self,
        context,
        graph_file=None,
        throughput_lookup_file=None,
        thermal_lookup_file=None,
        delivered_throughput_files=None,
        delivered_thermal_files=None
    ):
        self.context = context

        if graph_file is None:
            self.graph_file = utils.get_cache_path(context, utils.GRAPH_REFTYPE)
        else:
            self.graph_file = graph_file

        if throughput_lookup_file is None:
            self.throughput_lookup_file = utils.get_cache_path(context, utils.THROUGHPUT_LOOKUP_REFTYPE)
        else:
            self.throughput_lookup_file = throughput_lookup_file

        if thermal_lookup_file is None:
            self.thermal_lookup_file = utils.get_cache_path(context, utils.THERMAL_LOOKUP_REFTYPE)
        else:
            self.thermal_lookup_file = thermal_lookup_file

        if delivered_throughput_files is None:
            self.delivered_throughput_files = []
        else:
            self.delivered_throughput_files = delivered_throughput_files

        if delivered_thermal_files is None:
            self.delivered_thermal_files = []
        else:
            self.delivered_thermal_files = delivered_thermal_files

        self.imap = rmap.asmapping(context).get_imap(utils.SYNPHOT_INSTRUMENT)

        self.throughput_compname_to_path = self._build_compname_to_path(
            utils.THROUGHPUT_REFTYPE,
            self.delivered_throughput_files
        )

        self.thermal_compname_to_path = self._build_compname_to_path(
            utils.THERMAL_REFTYPE,
            self.delivered_thermal_files
        )
Example #8
0
 def insert_references(self):
     """Insert files specified by --references into the appropriate rmaps identified by --source-context."""
     self._setup_source_context()
     categorized = self.categorize_files(self.args.references)
     pmap = crds.get_pickled_mapping(self.source_context)  # reviewed
     self.args.rmaps = []
     for (instrument, filekind) in categorized:
         try:
             self.args.rmaps.append(pmap.get_imap(instrument).get_rmap(filekind).filename)
         except crexc.CrdsError:
             log.info("Existing rmap for", (instrument, filekind), "not found.  Trying empty spec.")
             spec_file = os.path.join(
                 os.path.dirname(self.obs_pkg.__file__), "specs", instrument + "_" + filekind + ".rmap")
             rmapping = rmap.asmapping(spec_file)
             log.info("Loaded spec file from", repr(spec_file))
             self.args.rmaps.append(spec_file)
     self.rmap_apply(insert_rmap_references, categorized=categorized)
Example #9
0
def get_refactoring_header(rmapping, refname, required_keywords):
    """Create a composite header which is derived from the file contents overidden by any values
    as they appear in the rmap.
    """
    rmapping = rmap.asmapping(rmapping)
    # A fallback source of information is the reference file headers
    header = rmapping.get_refactor_header(
        rmap.locate_file(refname, rmapping.observatory),
        extra_keys=("META.OBSERVATION.DATE", "META.OBSERVATION.TIME",
                    "DATE-OBS", "TIME-OBS") + required_keywords)
    # The primary source of information is the original rmap and the matching values defined there
    headers2 = matches.find_match_paths_as_dict(rmapping.filename, refname)
    # Combine the two,  using the rmap values to override anything duplicated in the reffile header
    assert len(
        headers2
    ) == 1, "Can't refactor file with more than one match: " + srepr(refname)
    header.update(headers2[0])
    return header
Example #10
0
def get_cache_path(context, reftype, error_on_missing=True):
    """
    Fetch the full path to the single reference file of reftype.
    """
    context = rmap.asmapping(context)
    imap = context.get_imap(SYNPHOT_INSTRUMENT)
    mapping = imap.get_rmap(reftype)
    filenames = mapping.reference_names()

    if len(filenames) == 0 and not error_on_missing:
        return None

    if len(filenames) != 1:
        raise RuntimeError(
            "Expected '{}' rmap to contain one reference file".format(reftype))

    filename = filenames[0]
    return config.locate_file(filename, observatory="hst")
Example #11
0
 def insert_references(self):
     """Insert files specified by --references into the appropriate rmaps identified by --source-context."""
     self._setup_source_context()
     categorized = self.categorize_files(self.args.references)
     pmap = crds.get_pickled_mapping(self.source_context)  # reviewed
     self.args.rmaps = []
     for (instrument, filekind) in categorized:
         try:
             self.args.rmaps.append(
                 pmap.get_imap(instrument).get_rmap(filekind).filename)
         except crexc.CrdsError:
             log.info("Existing rmap for", (instrument, filekind),
                      "not found.  Trying empty spec.")
             spec_file = os.path.join(
                 os.path.dirname(self.obs_pkg.__file__), "specs",
                 instrument + "_" + filekind + ".rmap")
             rmapping = rmap.asmapping(spec_file)
             log.info("Loaded spec file from", repr(spec_file))
             self.args.rmaps.append(spec_file)
     self.rmap_apply(insert_rmap_references, categorized=categorized)
Example #12
0
def insert_mapping(context, mapping):
    """Replace the filename in file `context` with the same generic name
    as `mapping` with `mapping`.  Re-write `context` in place.

    If mapping is of the form <instrument>_<type>_"n/a",  then it specifies
    that <type> of <instrument> should be set to "N/A".
    """
    # 'ACS' : 'hst_acs.imap',
    where = config.locate_mapping(context)
    # readonly caching is ok because this call is always made on a newly named
    # copy of the original rmap;  the only thing mutated is the uncached new mapping.
    loaded = rmap.asmapping(context, cache="readonly")
    if mapping.endswith(("_n/a", "_N/A")):
        instrument, key, special = mapping.split("_")
        replacement = special.upper()
    else:
        key = loaded.get_item_key(mapping)
        replacement = mapping
    key, replaced = loaded.set_item(key, replacement)
    loaded.write(where)
    return key, replaced, replacement
Example #13
0
def insert_mapping(context, mapping):
    """Replace the filename in file `context` with the same generic name
    as `mapping` with `mapping`.  Re-write `context` in place.
    
    If mapping is of the form <instrument>_<type>_"n/a",  then it specifies
    that <type> of <instrument> should be set to "N/A".
    """
    # 'ACS' : 'hst_acs.imap',
    where = rmap.locate_mapping(context)
    # readonly caching is ok because this call is always made on a newly named
    # copy of the original rmap;  the only thing mutated is the uncached new mapping.
    loaded = rmap.asmapping(context, cache="readonly")
    if mapping.endswith(("_n/a", "_N/A")):
        instrument, key, special = mapping.split("_")
        replacement = special.upper()
    else:
        key = loaded.get_item_key(mapping)
        replacement = mapping
    key, replaced = loaded.set_item(key, replacement)
    loaded.write(where)
    return key, replaced, replacement
Example #14
0
def is_reprocessing_required(dataset,  dataset_parameters, old_context, new_context, update):
    """This is the top level interface to crds.bestrefs running in "Affected Datasets" mode.
    
    It determines if reprocessing `dataset` with parameters `dataset_parameters` should be performed as
    a consequence of switching from `old_reference` to `new_reference`.  old_reference is assigned to dataset
    by old_context,  and new_reference is assigned to dataset by new_context.
        
    Parameters
    ----------
    dataset: 
             id of dataset being reprocessed,  <assoc>:<member> or <unassoc>:<unassoc> format
    
    dataset_parameters:
                        { parameter : value, ...} for all matching parameters and row selection parameters
    
                        XXX row selection parameters not used in file selection may not be present until
                        XXX explicitly added to the CRDS interface to the DADSOPS parameter database...
                        XXX and possibly even to DADSOPS itself. Normally the row selections have only been
                        XXX done with direct access to dataset .fits files.
    
    old_context: loaded pmap or name of old context,  possibly for metadata or None

    new_context: loaded pmap or name of new context,  possibly for metadata
    
    update: Update object

    Returns
    -------
    True        IFF reprocessing should be done as a consequence of the table change.
    """

    log.verbose('is_reprocessing_required: Called with:\n',
                dataset, '\n',
                dataset_parameters, '\n',
                old_context, '\n',
                new_context, '\n',
                update,
                verbosity=100)
                
    # no old_context means "single context" mode,  always reprocess.
    if old_context is None:   
        return True
    
    # NOTE: non-tables are treated in DeepLook as filekinds which aren't (or maybe someday are) handled,  
    # hence reprocessed for now.
    
    # Reprocess for non-file special values.  Other code will decide what to do with the updates,
    # the point here is that table comparison isn't possible so filtering shouldn't be done.
    old_ref = update.old_reference.lower()
    new_ref = update.new_reference.lower()
    incomparable = ('n/a', 'undefined', 'not found')
    if old_ref.startswith(incomparable) or new_ref.startswith(incomparable):
        return True

    # mostly debug wrappers here,  allows simple string parameters to work and resolves cache paths.
    old_context = rmap.asmapping(old_context, cached=True)   
    new_context = rmap.asmapping(new_context, cached=True)
    old_reference = old_context.locate_file(old_ref)
    new_reference = new_context.locate_file(new_ref)
    
    # Log that deep examination is occuring.
    log.verbose('Deep Reference examination between {} and {} initiated.'.format(old_reference, new_reference), 
                verbosity=25)
    
    with log.error_on_exception("Failed fetching comparison reference tables:", repr([old_ref, new_ref])):
        api.dump_files(new_context.name, [old_ref, new_ref])

    # See if deep checking into the reference is possible.
    try:
        deep_look = DeepLook.from_filekind(update.instrument, update.filekind)

        dataset_id = dataset.split(':')[0]

        # **DEBUG**
        # ** Since we are not getting full headers, if this is a test
        # ** dataset, replace the headers.
        #log.verbose_warning('Forcing use of LBYX01010, regardless...', verbosity=25)
        #dataset_id = 'LBYX01010'           #***DEBUG: force headers regardless of actua data

        if dataset_id in deep_look.stub_input:
            log.verbose_warning('Substituting header for dataset "{}"'.format(dataset))
            dataset_parameters = deep_look.stub_input[dataset_id]['headers']
            log.verbose_warning('headers = ', dataset_parameters, verbosity=25)

        log.verbose(deep_look.preamble, 'Dataset headers = {}'.format(dataset_parameters), verbosity=75)
        log.verbose(deep_look.preamble, 'Comparing references {} and {}.'.format(old_reference, new_reference), verbosity=75)
        deep_look.are_different(dataset_parameters, old_reference, new_reference)
        
        log.verbose(deep_look.preamble, 'Reprocessing is {}required.'.format('' if deep_look.is_different else 'not '), verbosity=25)
        log.verbose(deep_look.preamble, deep_look.message, verbosity=25)
        return deep_look.is_different

    except DeepLookError as error:

        # Could not determine difference, therefore presume so.
        log.verbose_warning('Deep examination error: {}'.format(error.message), verbosity=25)
        log.verbose_warning('Deep examination failed, presuming reprocessing.', verbosity=25)
        return True
Example #15
0
def get_deleted_references(old_pmap, new_pmap, cached=True):
    """Return the list of references from `new_pmap` which were not in `old_pmap`."""
    old_pmap = rmap.asmapping(old_pmap, cached=cached)
    new_pmap = rmap.asmapping(new_pmap, cached=cached)
    return sorted(list(set(old_pmap.reference_names()) - set(new_pmap.reference_names())))
Example #16
0
def is_reprocessing_required(dataset,  dataset_parameters, old_context, new_context, update):
    """This is the top level interface to crds.bestrefs running in "Affected Datasets" mode.

    It determines if reprocessing `dataset` with parameters `dataset_parameters` should be performed as
    a consequence of switching from `old_reference` to `new_reference`.  old_reference is assigned to dataset
    by old_context,  and new_reference is assigned to dataset by new_context.

    Parameters
    ----------
    dataset:
             id of dataset being reprocessed,  <assoc>:<member> or <unassoc>:<unassoc> format

    dataset_parameters:
                        { parameter : value, ...} for all matching parameters and row selection parameters

                        XXX row selection parameters not used in file selection may not be present until
                        XXX explicitly added to the CRDS interface to the DADSOPS parameter database...
                        XXX and possibly even to DADSOPS itself. Normally the row selections have only been
                        XXX done with direct access to dataset .fits files.

    old_context: loaded pmap or name of old context,  possibly for metadata or None

    new_context: loaded pmap or name of new context,  possibly for metadata

    update: Update object

    Returns
    -------
    True        IFF reprocessing should be done as a consequence of the table change.
    """

    log.verbose('is_reprocessing_required: Called with:\n',
                dataset, '\n',
                dataset_parameters, '\n',
                old_context, '\n',
                new_context, '\n',
                update,
                verbosity=100)

    # no old_context means "single context" mode,  always reprocess.
    if old_context is None:
        return True

    # NOTE: non-tables are treated in DeepLook as filekinds which aren't (or maybe someday are) handled,
    # hence reprocessed for now.

    # Reprocess for non-file special values.  Other code will decide what to do with the updates,
    # the point here is that table comparison isn't possible so filtering shouldn't be done.
    old_ref = update.old_reference.lower()
    new_ref = update.new_reference.lower()
    incomparable = ('n/a', 'undefined', 'not found')
    if old_ref.startswith(incomparable) or new_ref.startswith(incomparable):
        return True

    # mostly debug wrappers here,  allows simple string parameters to work and resolves cache paths.
    old_context = rmap.asmapping(old_context, cached=True)
    new_context = rmap.asmapping(new_context, cached=True)
    old_reference = old_context.locate_file(old_ref)
    new_reference = new_context.locate_file(new_ref)

    # Log that deep examination is occuring.
    log.verbose('Deep Reference examination between {} and {} initiated.'.format(old_reference, new_reference),
                verbosity=25)

    with log.error_on_exception("Failed fetching comparison reference tables:", repr([old_ref, new_ref])):
        api.dump_files(new_context.name, [old_ref, new_ref])

    # See if deep checking into the reference is possible.
    try:
        deep_look = DeepLook.from_filekind(update.instrument, update.filekind)

        dataset_id = dataset.split(':')[0]

        # **DEBUG**
        # ** Since we are not getting full headers, if this is a test
        # ** dataset, replace the headers.
        #log.verbose_warning('Forcing use of LBYX01010, regardless...', verbosity=25)
        #dataset_id = 'LBYX01010'           #***DEBUG: force headers regardless of actua data

        if dataset_id in deep_look.stub_input:
            log.verbose_warning('Substituting header for dataset "{}"'.format(dataset))
            dataset_parameters = deep_look.stub_input[dataset_id]['headers']
            log.verbose_warning('headers = ', dataset_parameters, verbosity=25)

        log.verbose(deep_look.preamble, 'Dataset headers = {}'.format(dataset_parameters), verbosity=75)
        log.verbose(deep_look.preamble, 'Comparing references {} and {}.'.format(old_reference, new_reference), verbosity=75)
        deep_look.are_different(dataset_parameters, old_reference, new_reference)

        log.verbose(deep_look.preamble, 'Reprocessing is {}required.'.format('' if deep_look.is_different else 'not '), verbosity=25)
        log.verbose(deep_look.preamble, deep_look.message, verbosity=25)
        return deep_look.is_different

    except DeepLookError as error:

        # Could not determine difference, therefore presume so.
        log.verbose_warning('Deep examination error: {}'.format(error.message), verbosity=25)
        log.verbose_warning('Deep examination failed, presuming reprocessing.', verbosity=25)
        return True