Exemplo n.º 1
0
def header_to_pipelines(header, context=None):
    """Given a dataset `header`,  extract the EXP_TYPE or META.EXPOSURE.TYPE keyword
    from and use it to look up the pipelines required to process it.

    Return a list of pipeline .cfg names.
    """
    with log.augment_exception(
            "Failed determining exp_type, cal_ver from header",
            log.PP(header)):
        exp_type, cal_ver = _header_to_exptype_calver(header)
    config_manager = _get_config_manager(context, cal_ver)
    pipelines = _get_pipelines(exp_type, cal_ver, context)  # uncorrected
    if config_manager.pipeline_exceptions:  # correction based on extra non-EXP_TYPE params
        pipelines2 = []
        for cfg in pipelines:
            for param, exceptions in config_manager.pipeline_exceptions.items(
            ):
                exceptions = dict(exceptions)
                dont_replace = exceptions.pop("dont_replace")
                default_missing = exceptions.pop("default_missing")
                paramval = header.get(param.upper(), default_missing)
                if paramval not in dont_replace:
                    cfg = exceptions.get(cfg, cfg)
            pipelines2.append(cfg)
        pipelines = pipelines2
    log.verbose("Applicable pipelines for", srepr(exp_type), "are",
                srepr(pipelines))
    return pipelines
Exemplo n.º 2
0
 def rmap_apply(self, func, *args, **keys):
     """Apply `func()` to *args and **keys,  adding the pmap, imap, and rmap values
     associated with the elaboration of args.source_context, args.instruments, args.types.
     """
     keywords = dict(keys)
     self._setup_source_context()
     if self.args.rmaps:
         for rmap_name in self.args.rmaps:
             with log.error_on_exception("Failed processing rmap", srepr(rmap_name)):
                 log.info("="*20, "Refactoring rmap", srepr(rmap_name), "="*20)
                 rmapping = rmap.load_mapping(rmap_name)
                 new_filename = self._process_rmap(func, rmapping=rmapping, **keywords)
                 self._diff_and_certify(rmapping=rmapping, new_filename=new_filename,
                                        source_context=self.source_context, **keywords)
     else:
         pmapping = rmap.load_mapping(self.source_context)
         instruments = pmapping.selections.keys() if "all" in self.args.instruments else self.args.instruments
         for instr in instruments:
             with log.augment_exception("Failed loading imap for", repr(instr), "from", 
                                         repr(self.source_context)):
                 imapping = pmapping.get_imap(instr)
             types = imapping.selections.keys() if "all" in self.args.types else self.args.types
             for filekind in types:
                 with log.error_on_exception("Failed processing rmap for", repr(filekind)): 
                     #, "from", 
                     # repr(imapping.basename), "of", repr(self.source_context)):
                     try:
                         rmapping = imapping.get_rmap(filekind).copy()
                     except crds.exceptions.IrrelevantReferenceTypeError as exc:
                         log.info("Skipping type", srepr(filekind), "as N/A")
                         continue
                     log.info("="*20, "Refactoring rmap", srepr(rmapping.basename), "="*20)
                     new_filename = self._process_rmap(func, rmapping=rmapping, **keywords)
                     self._diff_and_certify(rmapping=rmapping, source_context=self.source_context, 
                                            new_filename=new_filename, **keywords)
Exemplo n.º 3
0
def header_to_pipelines(header, context=None):
    """Given a dataset `header`,  extract the EXP_TYPE or META.EXPOSURE.TYPE keyword
    from and use it to look up the pipelines required to process it.

    Return a list of reftype names.
    """
    with log.augment_exception("Failed determining exp_type, cal_ver from header", log.PP(header)):
        exp_type, cal_ver = _header_to_exptype_calver(header)
    return get_pipelines(exp_type, cal_ver, context)
Exemplo n.º 4
0
def reftype_to_pipelines(reftype, cal_ver=None, context=None):
    """Given `exp_type` and `cal_ver` and `context`,  locate the appropriate SYSTEM CRDSCFG
    reference file and determine the sequence of pipeline .cfgs required to process that
    exp_type.
    """
    with log.augment_exception("Failed determining required pipeline .cfgs for",
                               "REFTYPE", srepr(reftype)):
        config_manager = _get_config_manager(context, cal_ver)
        return config_manager.reftype_to_pipelines(reftype)
Exemplo n.º 5
0
def reftype_to_pipelines(reftype, cal_ver=None, context=None):
    """Given `exp_type` and `cal_ver` and `context`,  locate the appropriate SYSTEM CRDSCFG
    reference file and determine the sequence of pipeline .cfgs required to process that
    exp_type.
    """
    context = _get_missing_context(context)
    cal_ver = _get_missing_calver(cal_ver)
    with log.augment_exception("Failed determining required pipeline .cfgs for",
                               "EXP_TYPE", srepr(reftype), "CAL_VER", srepr(cal_ver)):
        config_manager = _get_config_manager(context, cal_ver)
        return config_manager.reftype_to_pipelines(reftype)
Exemplo n.º 6
0
def get_pipelines(exp_type, cal_ver=None, context=None):
    """Given `exp_type` and `cal_ver` and `context`,  locate the appropriate SYSTEM CRDSCFG
    reference file and determine the sequence of pipeline .cfgs required to process that
    exp_type.
    """
    context = _get_missing_context(context)
    cal_ver = _get_missing_calver(cal_ver)
    with log.augment_exception("Failed determining required pipeline .cfgs for",
                               "EXP_TYPE", srepr(exp_type), "CAL_VER", srepr(cal_ver)):
        config_manager = _get_config_manager(context, cal_ver)
        return config_manager.exptype_to_pipelines(exp_type)
Exemplo n.º 7
0
def _get_pipelines(exp_type, cal_ver=None, context=None):
    """Given `exp_type` and `cal_ver` and `context`,  locate the appropriate SYSTEM CRDSCFG
    reference file and determine the sequence of pipeline .cfgs required to process that
    exp_type.

    NOTE:  This is an uncorrected result,  config_manager.pipeline_exceptions is used to
    alter this based on other header parameters.
    """
    with log.augment_exception("Failed determining required pipeline .cfgs for",
                               "EXP_TYPE", srepr(exp_type)):
        config_manager = _get_config_manager(context, cal_ver)
        return config_manager.exptype_to_pipelines(exp_type)
Exemplo n.º 8
0
 def rmap_apply(self, func, *args, **keys):
     """Apply `func()` to *args and **keys,  adding the pmap, imap, and rmap values
     associated with the elaboration of args.source_context, args.instruments, args.types.
     """
     keywords = dict(keys)
     self._setup_source_context()
     if self.args.rmaps:
         for rmap_name in self.args.rmaps:
             with log.error_on_exception("Failed processing rmap",
                                         srepr(rmap_name)):
                 log.info("=" * 20, "Refactoring rmap", srepr(rmap_name),
                          "=" * 20)
                 rmapping = rmap.load_mapping(rmap_name)
                 new_filename = self._process_rmap(func,
                                                   rmapping=rmapping,
                                                   **keywords)
                 self._diff_and_certify(rmapping=rmapping,
                                        new_filename=new_filename,
                                        source_context=self.source_context,
                                        **keywords)
     else:
         pmapping = rmap.load_mapping(self.source_context)
         instruments = pmapping.selections.keys(
         ) if "all" in self.args.instruments else self.args.instruments
         for instr in instruments:
             with log.augment_exception("Failed loading imap for",
                                        repr(instr), "from",
                                        repr(self.source_context)):
                 imapping = pmapping.get_imap(instr)
             types = imapping.selections.keys(
             ) if "all" in self.args.types else self.args.types
             for filekind in types:
                 with log.error_on_exception("Failed processing rmap for",
                                             repr(filekind)):
                     #, "from",
                     # repr(imapping.basename), "of", repr(self.source_context)):
                     try:
                         rmapping = imapping.get_rmap(filekind).copy()
                     except crds.exceptions.IrrelevantReferenceTypeError as exc:
                         log.info("Skipping type", srepr(filekind),
                                  "as N/A")
                         continue
                     log.info("=" * 20, "Refactoring rmap",
                              srepr(rmapping.basename), "=" * 20)
                     new_filename = self._process_rmap(func,
                                                       rmapping=rmapping,
                                                       **keywords)
                     self._diff_and_certify(
                         rmapping=rmapping,
                         source_context=self.source_context,
                         new_filename=new_filename,
                         **keywords)
Exemplo n.º 9
0
def parse_mapping(filename):
    """Parse mapping `filename`.   Return parsing."""
    global parsley, MAPPING_PARSER
    
    if parsley is None:
        raise NotImplementedError("Parsley parsing package must be installed.")

    if MAPPING_PARSER is None:
        MAPPING_PARSER = parsley.makeGrammar(MAPPING_GRAMMAR, selectors.SELECTORS)

    log.verbose("Parsing", repr(filename))
    filename = rmap.locate_mapping(filename)
    with log.augment_exception("Parsing error in", repr(filename), exception_class=exceptions.MappingFormatError):
        with open(filename) as pfile:
            parser = MAPPING_PARSER(pfile.read())
            header, selector, comment = parser.mapping()
            return Parsing(header, selector, comment)
Exemplo n.º 10
0
    def _check_pedigree_start_stop(self, filename, value):
        """Check the start + stop dates for INFLIGHT PEDIGREE values,  and
        return the PEDIGREE "kind" for all values.
        """
        with log.augment_exception("Invalid value:", repr(value)):
            
            values = value.split()
            if len(values) == 0:
                raise ValueError("Missing value.")
            else:
                pedigree = values[0].upper()
            if len(values) == 1:
                if pedigree not in self._values_wo_date:
                    raise ValueError("Value " + repr(pedigree) +
                                     " cannot be used as a simple value.")
                else:
                    return pedigree
            elif len(values) == 3:
                if pedigree not in self._values_w_date:
                    raise ValueError(
                        "Value " + repr(pedigree) +
                        " cannot be specified with <start> and <stop> dates.")
            else:
                raise ValueError(
                    "Invalid format for PEDIGREE: ", repr(value))
            
            start, stop = values[1:]

            # timecan't appear in either string
            for char in start+stop:
                if char in ["T"," ",":"]:
                    raise self._inflight_exc(
                        "Time should not appear in INFLIGHT dates.")
        
            try:
                start_dt = self.validate_date(start)
                stop_dt = self.validate_date(stop)
            except Exception as exc:
                raise self._inflight_exc(str(exc)) from exc
        
            if not (start_dt <= stop_dt):
                raise self._inflight_exc("Start date > stop date")

            return pedigree
Exemplo n.º 11
0
    def _check_pedigree_start_stop(self, filename, value):
        """Check the start + stop dates for INFLIGHT PEDIGREE values,  and
        return the PEDIGREE "kind" for all values.
        """
        with log.augment_exception("Invalid value:", repr(value)):

            values = value.split()
            if len(values) == 0:
                raise ValueError("Missing value.")
            else:
                pedigree = values[0].upper()
            if len(values) == 1:
                if pedigree not in self._values_wo_date:
                    raise ValueError("Value " + repr(pedigree) +
                                     " cannot be used as a simple value.")
                else:
                    return pedigree
            elif len(values) == 3:
                if pedigree not in self._values_w_date:
                    raise ValueError(
                        "Value " + repr(pedigree) +
                        " cannot be specified with <start> and <stop> dates.")
            else:
                raise ValueError("Invalid format for PEDIGREE: ", repr(value))

            start, stop = values[1:]

            # timecan't appear in either string
            for char in start + stop:
                if char in ["T", " ", ":"]:
                    raise self._inflight_exc(
                        "Time should not appear in INFLIGHT dates.")

            try:
                start_dt = self.validate_date(start)
                stop_dt = self.validate_date(stop)
            except Exception as exc:
                raise self._inflight_exc(str(exc)) from exc

            if not (start_dt <= stop_dt):
                raise self._inflight_exc("Start date > stop date")

            return pedigree
Exemplo n.º 12
0
def parse_mapping(filename):
    """Parse mapping `filename`.   Return parsing."""
    global parsley, MAPPING_PARSER

    if parsley is None:
        raise NotImplementedError("Parsley parsing package must be installed.")

    if MAPPING_PARSER is None:
        MAPPING_PARSER = parsley.makeGrammar(MAPPING_GRAMMAR,
                                             selectors.SELECTORS)

    log.verbose("Parsing", repr(filename))
    filename = rmap.locate_mapping(filename)
    with log.augment_exception("Parsing error in",
                               repr(filename),
                               exception_class=exceptions.MappingFormatError):
        with open(filename) as pfile:
            parser = MAPPING_PARSER(pfile.read())
            header, selector, comment = parser.mapping()
            return Parsing(header, selector, comment)
Exemplo n.º 13
0
def rmap_delete_references(old_rmap, new_rmap, deleted_references):
    """Given the full path of starting rmap `old_rmap`,  modify it by deleting 
    all files in `deleted_references` and write out the result to
    `new_rmap`.    If no actions are performed, don't write out `new_rmap`.
    
    Return new ReferenceMapping named `new_rmap`
    """
    new = old = rmap.load_mapping(old_rmap, ignore_checksum=True)
    for reference in deleted_references:
        baseref = os.path.basename(reference)
        log.info("Deleting", srepr(baseref), "from", srepr(new.name))
        with log.augment_exception("In reference", srepr(baseref)):
            new = new.delete(reference)
    new.header["derived_from"] = old.basename
    log.verbose("Writing", srepr(new_rmap))
    new.write(new_rmap)
    formatted = new.format()
    for reference in deleted_references:
        reference = os.path.basename(reference)
        assert reference not in formatted, \
            "Rules update failure.  Deleted " + srepr(reference) + " still appears in new rmap."
    return new
Exemplo n.º 14
0
def rmap_delete_references(old_rmap, new_rmap, deleted_references):
    """Given the full path of starting rmap `old_rmap`,  modify it by deleting 
    all files in `deleted_references` and write out the result to
    `new_rmap`.    If no actions are performed, don't write out `new_rmap`.
    
    Return new ReferenceMapping named `new_rmap`
    """
    new = old = rmap.load_mapping(old_rmap, ignore_checksum=True)
    for reference in deleted_references:
        baseref = os.path.basename(reference)
        log.info("Deleting", srepr(baseref), "from", srepr(new.name))
        with log.augment_exception("In reference", srepr(baseref)):
            new = new.delete(reference)
    new.header["derived_from"] = old.basename
    log.verbose("Writing", srepr(new_rmap))
    new.write(new_rmap)
    formatted = new.format()
    for reference in deleted_references:
        reference = os.path.basename(reference)
        assert reference not in formatted, \
            "Rules update failure.  Deleted " + srepr(reference) + " still appears in new rmap."
    return new
Exemplo n.º 15
0
def _x_schema_to_flat(schema):
    """Recursively flatten `schema` without addressing case issues."""
    results = {}
    for feature in ["oneOf", "allOf", "$ref"]:
        if feature in schema:
            log.verbose_warning("Schema item has unhandled feature {}.",
                                verbosity=80)
            return None

    if "anyOf" in schema and "type" in schema["anyOf"]:
        schema_type = schema["anyOf"]["type"]
    else:
        schema_type = schema.get("type", "null")

    if schema_type == "object":
        subprops = schema["properties"]
        for prop in subprops:
            with log.augment_exception("In schema property", repr(prop)):
                sub_tree = _schema_to_flat(subprops[prop])
                if sub_tree is None:
                    continue
                if isinstance(sub_tree, dict):
                    for subprop, val in list(sub_tree.items()):
                        results[prop + "." + subprop] = val
                else:
                    results[prop] = sub_tree
    elif schema_type in BASIC_TYPES:
        return schema
    elif schema_type in OPTIONAL_TYPES:
        return schema
    elif schema_type == "array":
        return None
    elif schema_type in ["any", "null"]:
        return None
    else:
        log.verbose_warning("Schema item has unhandled type",
                            repr(schema_type),
                            verbosity=80)
    return results
Exemplo n.º 16
0
def rmap_insert_references(old_rmap, new_rmap, inserted_references):
    """Given the full path of starting rmap `old_rmap`,  modify it by inserting 
    or replacing all files in `inserted_references` and write out the result to
    `new_rmap`.    If no actions are performed, don't write out `new_rmap`.
    
    Return new ReferenceMapping named `new_rmap`
    """
    new = old = rmap.fetch_mapping(old_rmap, ignore_checksum=True)
    new.header["derived_from"] = old.basename
    for reference in inserted_references:
        baseref = os.path.basename(reference)
        with log.augment_exception("In reference", srepr(baseref)):
            log.info("Inserting", srepr(baseref), "into", srepr(new.name))
            new = new.insert_reference(reference)
            log.verbose("Writing", srepr(new_rmap))
            new.write(new_rmap)
    formatted = new.format()
    for reference in inserted_references:
        reference = os.path.basename(reference)
        assert reference in formatted, \
            "Rules update failure. " + srepr(reference) + " does not appear in new rmap." \
            "  May be identical match with other submitted references."
    return new
Exemplo n.º 17
0
def rmap_insert_references_by_matches(old_rmap, new_rmap, references_headers):
    """Given the full path of starting rmap `old_rmap`,  modify it by inserting 
    or replacing all files in dict `references_headers` which maps a reference file basename
    onto a list of headers under which it should be  matched.  Write out the result to
    `new_rmap`.    If no actions are performed, don't write out `new_rmap`.
    
    Return new ReferenceMapping named `new_rmap`
    """
    new = old = rmap.load_mapping(old_rmap, ignore_checksum=True)
    for baseref, header in references_headers.items():
        with log.augment_exception("In reference", srepr(baseref)):
            log.info("Inserting", srepr(baseref), "into", srepr(old_rmap))
            log.verbose("Inserting", srepr(baseref), "match case", srepr(header), "into", srepr(old_rmap))
            new = new.insert_header_reference(header, baseref)
    new.header["derived_from"] = old.basename
    log.verbose("Writing", srepr(new_rmap))
    new.write(new_rmap)
    formatted = new.format()
    for baseref in references_headers:
        assert baseref in formatted, \
            "Rules update failure. " + srepr(baseref) + " does not appear in new rmap." \
            "  May be identical match with other submitted references."
    return new
Exemplo n.º 18
0
def rmap_insert_references(old_rmap, new_rmap, inserted_references):
    """Given the full path of starting rmap `old_rmap`,  modify it by inserting 
    or replacing all files in `inserted_references` and write out the result to
    `new_rmap`.    If no actions are performed, don't write out `new_rmap`.
    
    Return new ReferenceMapping named `new_rmap`
    """
    new = old = rmap.fetch_mapping(old_rmap, ignore_checksum=True)
    new.header["derived_from"] = old.basename
    for reference in inserted_references:
        baseref = os.path.basename(reference)
        with log.augment_exception("In reference", srepr(baseref)):
            log.info("Inserting", srepr(baseref), "into", srepr(new.name))
            new = new.insert_reference(reference)
            log.verbose("Writing", srepr(new_rmap))
            new.write(new_rmap)
    formatted = new.format()
    for reference in inserted_references:
        reference = os.path.basename(reference)
        assert reference in formatted, \
            "Rules update failure. " + srepr(reference) + " does not appear in new rmap." \
            "  May be identical match with other submitted references."
    return new
Exemplo n.º 19
0
def rmap_insert_references_by_matches(old_rmap, new_rmap, references_headers):
    """Given the full path of starting rmap `old_rmap`,  modify it by inserting 
    or replacing all files in dict `references_headers` which maps a reference file basename
    onto a list of headers under which it should be  matched.  Write out the result to
    `new_rmap`.    If no actions are performed, don't write out `new_rmap`.
    
    Return new ReferenceMapping named `new_rmap`
    """
    new = old = rmap.load_mapping(old_rmap, ignore_checksum=True)
    for baseref, header in references_headers.items():
        with log.augment_exception("In reference", srepr(baseref)):
            log.info("Inserting", srepr(baseref), "into", srepr(old_rmap))
            log.verbose("Inserting", srepr(baseref), "match case",
                        srepr(header), "into", srepr(old_rmap))
            new = new.insert_header_reference(header, baseref)
    new.header["derived_from"] = old.basename
    log.verbose("Writing", srepr(new_rmap))
    new.write(new_rmap)
    formatted = new.format()
    for baseref in references_headers:
        assert baseref in formatted, \
            "Rules update failure. " + srepr(baseref) + " does not appear in new rmap." \
            "  May be identical match with other submitted references."
    return new
Exemplo n.º 20
0
def _x_schema_to_flat(schema):
    """Recursively flatten `schema` without addressing case issues."""
    results = {}
    for feature in ["oneOf","allOf","$ref"]:
        if feature in schema:
            log.verbose_warning("Schema item has unhandled feature {}.", verbosity=80)
            return None
        
    if "anyOf" in schema and "type" in schema["anyOf"]:
        schema_type = schema["anyOf"]["type"]
    else:
        schema_type = schema.get("type", "null")
        
    if schema_type ==  "object":
        subprops = schema["properties"]
        for prop in subprops:
            with log.augment_exception("In schema property", repr(prop)):
                sub_tree = _schema_to_flat(subprops[prop])
                if sub_tree is None:
                    continue
                if isinstance(sub_tree, dict):
                    for subprop, val in list(sub_tree.items()):
                        results[prop + "." + subprop] = val
                else:
                    results[prop] = sub_tree
    elif schema_type in BASIC_TYPES:
        return schema
    elif schema_type in OPTIONAL_TYPES:
        return schema
    elif schema_type == "array":
        return None
    elif schema_type in ["any", "null"]:
        return None
    else:
        log.verbose_warning("Schema item has unhandled type", repr(schema_type), verbosity=80)
    return results