Exemplo n.º 1
0
def _flat_to_tpns(flat=None, schema_name=None):
    """Convert flat representation of DM schema to list of all TpnInfo objects."""
    if flat is None:
        flat = _schema_to_flat(_load_schema(schema_name))
    tpns = []
    for key, value in flat.items():
        if key.endswith(".TYPE"):
            basekey = str(key[:-len(".TYPE")])
            legal_values = [
                str(val) for val in flat.get(basekey + ".ENUM", [])
            ]
            if legal_values:
                legal_values += ["ANY", "N/A"]
            legal_values = sorted(set(legal_values))
            if isinstance(value, list):
                value = tuple(value)
            datatype = SCHEMA_TYPE_TO_TPN.get(value, None)
            if datatype is not None:
                tpn = TpnInfo(name=basekey.upper(),
                              keytype="H",
                              datatype=datatype[0],
                              presence=datatype[1],
                              values=legal_values)
                log.verbose("Adding tpn constraint from DM schema:",
                            repr(tpn),
                            verbosity=65)
                tpns.append(tpn)
            else:
                log.warning("No TPN form for", repr(key), repr(value))
    return sorted(tpns)
Exemplo n.º 2
0
def difference(observatory, old_file, new_file, *args, **keys):
    """Difference different kinds of CRDS files (mappings, FITS references, etc.)
    named `old_file` and `new_file` against one another and print out the results 
    on stdout.
    
    Returns:
    
    0 no differences
    1 some differences
    2 errors in subshells
    
    """
    filetype = config.filetype(old_file)
    differs = {
        "mapping": MappingDifferencer,
        "fits": FitsDifferencer,
        "text": TextDifferencer,
        "yaml": TextDifferencer,
        "json": JsonDifferencer,
    }
    differ_class = differs.get(filetype, None)
    if differ_class is None:
        log.warning("Cannot difference file of type", repr(filetype), ":",
                    repr(old_file), repr(new_file))
        status = 2  #  arguably, this should be an error not a warning.  wary of changing now.
    else:
        differ = differ_class(observatory, old_file, new_file, *args, **keys)
        status = differ.difference()
    return status
Exemplo n.º 3
0
Arquivo: api.py Projeto: nden/crds
def get_crds_server():
    """Return the base URL for the CRDS JSON RPC server.
    """
    url = URL[:-len(URL_SUFFIX)]
    if not url.startswith("https://") and "localhost" not in url:
        log.warning("CRDS_SERVER_URL does not start with https://  ::", url)
    return url
Exemplo n.º 4
0
    def _call(self, *args, **kwargs):
        """Core of RPC dispatch without error interpretation, logging, or return value decoding."""
        params = kwargs if len(kwargs) else args
        # if Any.kind(params) == Object and self.__version != '2.0':
        #   raise Exception('Unsupport arg type for JSON-RPC 1.0 '
        #                  '(the default version for this client, '
        #                  'pass version="2.0" to use keyword arguments)')
        jsonrpc_params = {"jsonrpc": self.__version,
                          "method": self.__service_name,
                          'params': params,
                          'id': message_id()
                         }
        
        parameters = json.dumps(jsonrpc_params)
        
        url = self._get_url(jsonrpc_params)
        
        if "serverless" in url or "server-less" in url:
            raise exceptions.ServiceError("Configured for server-less mode.  Skipping JSON RPC " + repr(self.__service_name))

        if log.get_verbose() <= 50:
            log.verbose("CRDS JSON RPC", self.__service_name, params if len(str(params)) <= 60 else "(...)", "-->")
        else:
            log.verbose("CRDS JSON RPC to", url, "parameters", params, "-->")
        
        response = apply_with_retries(self._call_service, parameters, url)

        try:
            rval = json.loads(response)
        except Exception:
            log.warning("Invalid CRDS jsonrpc response:\n", response)
            raise
        
        return rval
Exemplo n.º 5
0
def difference(observatory, old_file, new_file, *args, **keys):
    """Difference different kinds of CRDS files (mappings, FITS references, etc.)
    named `old_file` and `new_file` against one another and print out the results 
    on stdout.
    
    Returns:
    
    0 no differences
    1 some differences
    2 errors in subshells
    
    """
    filetype = config.filetype(old_file)
    differs = {
        "mapping" : MappingDifferencer,
        "fits" : FitsDifferencer,
        "text" : TextDifferencer,
        "yaml" : TextDifferencer,
        "json" : JsonDifferencer,
        }
    differ_class = differs.get(filetype, None)
    if differ_class is None:
        log.warning("Cannot difference file of type", repr(filetype), ":", repr(old_file), repr(new_file))
        status = 2   #  arguably, this should be an error not a warning.  wary of changing now.
    else:
        differ = differ_class(observatory, old_file, new_file, *args, **keys)
        status = differ.difference()
    return status
Exemplo n.º 6
0
Arquivo: sync.py Projeto: nden/crds
 def organize_references(self, new_mode):
     """Find all references in the CRDS cache and relink them to the paths which are implied by `new_mode`.   
     This is used to reroganize existing file caches into new layouts,  e.g. flat -->  by instrument.
     """
     old_refpaths = rmap.list_references("*", observatory=self.observatory, full_path=True)
     old_mode = config.get_crds_ref_subdir_mode(self.observatory)
     log.info("Reorganizing", len(old_refpaths), "references from", repr(old_mode), "to", repr(new_mode))
     config.set_crds_ref_subdir_mode(new_mode, observatory=self.observatory)
     new_mode = config.get_crds_ref_subdir_mode(self.observatory)  # did it really change.
     for refpath in old_refpaths:
         with log.error_on_exception("Failed relocating:", repr(refpath)):
             desired_loc = rmap.locate_file(os.path.basename(refpath), observatory=self.observatory)
             if desired_loc != refpath:
                 if os.path.exists(desired_loc):
                     if not self.args.organize_delete_junk:
                         log.warning("Link or directory already exists at", repr(desired_loc), "Skipping", repr(refpath))
                         continue
                     utils.remove(desired_loc, observatory=self.observatory)
                 if config.writable_cache_or_info("Skipping file relocation from", repr(refpath), "to", repr(desired_loc)):
                     log.info("Relocating", repr(refpath), "to", repr(desired_loc))
                     shutil.move(refpath, desired_loc)
             else:
                 if old_mode != new_mode:
                     log.verbose_warning("Keeping existing cached file", repr(desired_loc), "already in target mode", repr(new_mode))
                 else:
                     log.verbose_warning("No change in subdirectory mode", repr(old_mode), "skipping reorganization of", repr(refpath))
     if new_mode == "flat" and old_mode == "instrument":
         log.info("Reorganizing from 'instrument' to 'flat' cache,  removing instrument directories.")
         for instrument in self.locator.INSTRUMENTS:
             self.remove_dir(instrument)
Exemplo n.º 7
0
    def main(self):
        """Perform the differencing."""
        self.args.files = [ self.args.old_file, self.args.new_file ]   # for defining self.observatory
        self.old_file = self.locate_file(self.args.old_file)
        self.new_file = self.locate_file(self.args.new_file)
        if self.args.brief:
            self.args.lowest_mapping_only = True
            self.args.remove_paths = True
            self.args.hide_boring_diffs = True
            self.args.include_header_diffs = True
        if self.args.sync_files:
            assert not (self.args.cache1 or self.args.cache2), \
                "--sync-files is not compatible with cache-to-cache differences."
            if self.args.print_all_new_files:
                serial_old = naming.newstyle_serial(self.old_file)
                serial_new = naming.newstyle_serial(self.new_file) + 1
                if None not in [serial_old, serial_new]:
                    errs = sync.SyncScript("crds.sync --range {0}:{1}".format(serial_old, serial_new))()
                    assert not errs, "Errors occurred while syncing all rules to CRDS cache."
                else:
                    log.warning("Cannot sync non-standard mapping names,  results may be incomplete.")
            else:
                self.sync_files([self.old_file, self.new_file])
        elif self.args.print_all_new_files:
            log.warning("--print-all-new-files requires a complete set of rules.  suggest --sync-files.")
            
        # self.args.files = [ self.old_file, self.new_file ]   # for defining self.observatory
    
        assert (self.args.cache1 and self.args.cache2) or (not self.args.cache1 and not self.args.cache2), \
            "Cache-to-cache comparison requires both --cache1 and --cache2;  otherwise neither for single cache comparison."

        if self.args.print_new_files:
            status = self.print_new_files()
        elif self.args.print_all_new_files:
            status = self.print_all_new_files()
        elif self.args.print_affected_instruments:
            status = self.print_affected_instruments()
        elif self.args.print_affected_types:
            status = self.print_affected_types()
        elif self.args.print_affected_modes:
            status = self.print_affected_modes()
        else:
            status = difference(self.observatory, self.old_file, self.new_file, 
                                primitive_diffs=self.args.primitive_diffs, 
                                check_diffs=self.args.check_diffs,
                                check_references=self.args.check_references,
                                mapping_text_diffs=self.args.mapping_text_diffs,
                                include_header_diffs=self.args.include_header_diffs,
                                hide_boring_diffs=self.args.hide_boring_diffs,
                                recurse_added_deleted=self.args.recurse_added_deleted,
                                lowest_mapping_only=self.args.lowest_mapping_only,
                                remove_paths=self.args.remove_paths,
                                squash_tuples=self.args.squash_tuples,
                                cache1=self.args.cache1,
                                cache2=self.args.cache2)
        if log.errors() or log.warnings():
            return 2
        else:
            return status
Exemplo n.º 8
0
Arquivo: api.py Projeto: nden/crds
def set_crds_server(url):
    """Configure the CRDS JSON services server to `url`,  
    e.g. 'http://localhost:8000'
    """
    if not url.startswith("https://") and "localhost" not in url:
        log.warning("CRDS_SERVER_URL does not start with https://  ::", url)
    if url.endswith("/"):
        url = url[:-1]
    global URL, S
    URL = url + URL_SUFFIX
    S = CheckingProxy(URL, version="1.0")
Exemplo n.º 9
0
Arquivo: sync.py Projeto: nden/crds
 def verify_file(self, file, info, bytes_so_far, total_bytes, nth_file, total_files):
     """Check one `file` against the provided CRDS database `info` dictionary."""
     path = rmap.locate_file(file, observatory=self.observatory)
     base = os.path.basename(file)
     n_bytes = int(info["size"])
     log.verbose(api.file_progress("Verifying", base, path, n_bytes, bytes_so_far, total_bytes, nth_file, total_files),
                 verbosity=10)
     if not os.path.exists(path):
         log.error("File", repr(base), "doesn't exist at", repr(path))
         return
     size = os.stat(path).st_size
     if int(info["size"]) != size:
         self.error_and_repair(path, "File", repr(base), "length mismatch LOCAL size=" + srepr(size), 
                               "CRDS size=" + srepr(info["size"]))
     elif self.args.check_sha1sum:
         log.verbose("Computing checksum for", repr(base), "of size", repr(size), verbosity=100)
         sha1sum = utils.checksum(path)
         if info["sha1sum"] == "none":
             log.warning("CRDS doesn't know the checksum for", repr(base))
         elif info["sha1sum"] != sha1sum:
             self.error_and_repair(path, "File", repr(base), "checksum mismatch CRDS=" + repr(info["sha1sum"]), 
                                   "LOCAL=" + repr(sha1sum))
     if info["state"] not in ["archived", "operational"]:
         log.warning("File", repr(base), "has an unusual CRDS file state", repr(info["state"]))
     if info["rejected"] != "false":
         log.warning("File", repr(base), "has been explicitly rejected.")
         if self.args.purge_rejected:
             self.remove_files([path], "files")
         return
     if info["blacklisted"] != "false":
         log.warning("File", repr(base), "has been blacklisted or is dependent on a blacklisted file.")
         if self.args.purge_blacklisted:
             self.remove_files([path], "files")
         return
     return
Exemplo n.º 10
0
def mapping_check_references(mapping, derived_from):
    """Regardless of matching criteria,  do a simple check listing added or deleted
    references as appropritate.
    """
    mapping = rmap.asmapping(mapping, cached="readonly")
    derived_from = rmap.asmapping(derived_from, cached="readonly")
    old_refs = set(derived_from.reference_names())
    new_refs = set(mapping.reference_names())
    if old_refs - new_refs:
        log.warning("Deleted references for", repr(derived_from.filename), "and", repr(mapping.filename), "=",
                 list(old_refs - new_refs))
    if new_refs - old_refs:
        log.warning("Added references for", repr(derived_from.filename), "and", repr(mapping.filename), "=",
                 list(new_refs - old_refs))
Exemplo n.º 11
0
def hijacked_showwarning(message, category, filename, lineno, *args, **keys):
    """Map the warnings.showwarning plugin function parameters onto log.warning."""
    try:
        scat = str(category).split(".")[-1].split("'")[0]
    except Exception:
        scat = category
    try:
        sfile = str(filename).split(".egg")[-1].split("site-packages")[-1].replace("/",".").replace(".py", "")
        while sfile.startswith(("/",".")):
            sfile = sfile[1:]
    except Exception:
        sfile = filename
    message = str(message).replace("\n","")
    log.warning(scat, ":", sfile, ":", message)
Exemplo n.º 12
0
 def verify_file(self, file, info, bytes_so_far, total_bytes, nth_file, total_files):
     """Check one `file` against the provided CRDS database `info` dictionary."""
     path = rmap.locate_file(file, observatory=self.observatory)
     base = os.path.basename(file)
     n_bytes = int(info["size"])
     log.verbose(api.file_progress("Verifying", base, path, n_bytes, bytes_so_far, total_bytes, nth_file, total_files),
                 verbosity=10)
     if not os.path.exists(path):
         log.error("File", repr(base), "doesn't exist at", repr(path))
         return
     size = os.stat(path).st_size
     if int(info["size"]) != size:
         self.error_and_repair(path, "File", repr(base), "length mismatch LOCAL size=" + srepr(size), 
                               "CRDS size=" + srepr(info["size"]))
     elif self.args.check_sha1sum:
         log.verbose("Computing checksum for", repr(base), "of size", repr(size), verbosity=100)
         sha1sum = utils.checksum(path)
         if info["sha1sum"] == "none":
             log.warning("CRDS doesn't know the checksum for", repr(base))
         elif info["sha1sum"] != sha1sum:
             self.error_and_repair(path, "File", repr(base), "checksum mismatch CRDS=" + repr(info["sha1sum"]), 
                                   "LOCAL=" + repr(sha1sum))
     if info["state"] not in ["archived", "operational"]:
         log.warning("File", repr(base), "has an unusual CRDS file state", repr(info["state"]))
     if info["rejected"] != "false":
         log.warning("File", repr(base), "has been explicitly rejected.")
         if self.args.purge_rejected:
             self.remove_files([path], "files")
         return
     if info["blacklisted"] != "false":
         log.warning("File", repr(base), "has been blacklisted or is dependent on a blacklisted file.")
         if self.args.purge_blacklisted:
             self.remove_files([path], "files")
         return
     return
Exemplo n.º 13
0
def mapping_check_references(mapping, derived_from):
    """Regardless of matching criteria,  do a simple check listing added or deleted
    references as appropritate.
    """
    mapping = rmap.asmapping(mapping, cached="readonly")
    derived_from = rmap.asmapping(derived_from, cached="readonly")
    old_refs = set(derived_from.reference_names())
    new_refs = set(mapping.reference_names())
    if old_refs - new_refs:
        log.warning("Deleted references for", repr(derived_from.filename),
                    "and", repr(mapping.filename), "=",
                    list(old_refs - new_refs))
    if new_refs - old_refs:
        log.warning("Added references for", repr(derived_from.filename), "and",
                    repr(mapping.filename), "=", list(new_refs - old_refs))
Exemplo n.º 14
0
def hijacked_showwarning(message, category, filename, lineno, *args, **keys):
    """Map the warnings.showwarning plugin function parameters onto log.warning."""
    try:
        scat = str(category).split(".")[-1].split("'")[0]
    except Exception:
        scat = category
    try:
        sfile = str(filename).split(".egg")[-1].split(
            "site-packages")[-1].replace("/", ".").replace(".py", "")
        while sfile.startswith(("/", ".")):
            sfile = sfile[1:]
    except Exception:
        sfile = filename
    message = str(message).replace("\n", "")
    log.warning(scat, ":", sfile, ":", message)
Exemplo n.º 15
0
def reformat_useafter(rmapping, header):
    """Reformat a USEAFTER date in a standard CRDS form which can be split into
    DATE-OBS and TIME-OBS.   Honor the ALLOW_BAD_USEAFTER to provide a safe default
    for early junk USEAFTER values;  1900-01-01T00:00:00.
    """
    useafter = str(header["USEAFTER"])
    try:
        return reformat_date(useafter)
    except Exception:
        if config.ALLOW_BAD_USEAFTER:
            log.warning("Can't parse USEAFTER =", repr(useafter),
                        "in", repr(rmapping.filename), "faking as '1900-01-01T00:00:00'")
            return reformat_date("1900-01-01T00:00:00")
        else:
            raise exceptions.InvalidUseAfterFormat("Bad USEAFTER time format =", repr(useafter))
Exemplo n.º 16
0
def reformat_useafter(rmapping, header):
    """Reformat a USEAFTER date in a standard CRDS form which can be split into
    DATE-OBS and TIME-OBS.   Honor the ALLOW_BAD_USEAFTER to provide a safe default
    for early junk USEAFTER values;  1900-01-01T00:00:00.
    """
    useafter = str(header["USEAFTER"])
    try:
        return reformat_date(useafter)
    except Exception:
        if config.ALLOW_BAD_USEAFTER:
            log.warning("Can't parse USEAFTER =", repr(useafter), "in",
                        repr(rmapping.filename),
                        "faking as '1900-01-01T00:00:00'")
            return reformat_date("1900-01-01T00:00:00")
        else:
            raise exceptions.InvalidUseAfterFormat(
                "Bad USEAFTER time format =", repr(useafter))
Exemplo n.º 17
0
def wfc3_darkfile_filter(kmap_orig):
    """Filter to customize DARKFILE for hst_gentools/gen_rmap.py.
    
    Removes dead SUBTYPE='' darkfiles.   Driven by CDBS reffile_ops database defects.
    """
    darkfile_match_keys = ('DETECTOR', 'CCDAMP', 'BINAXIS1', 'BINAXIS2', 'CCDGAIN', 'SAMP_SEQ', 'SUBTYPE')
    kmap = copy.deepcopy(kmap_orig)
    for match in kmap_orig:
        header = dict(list(zip(darkfile_match_keys, match)))
        if header["SUBTYPE"] == '':
            header["SUBTYPE"] = "N/A"
            new_match = tuple(header[key] for key in darkfile_match_keys)
            for filemap in kmap[match]:
                log.warning("Re-mapping match with SUBTYPE='' to SUBTYPE='N/A' for", filemap)
            kmap[new_match] = kmap[match]
            del kmap[match]
    return kmap, []
Exemplo n.º 18
0
Arquivo: wfc3.py Projeto: nden/crds
def wfc3_darkfile_filter(kmap_orig):
    """Filter to customize DARKFILE for hst_gentools/gen_rmap.py.
    
    Removes dead SUBTYPE='' darkfiles.   Driven by CDBS reffile_ops database defects.
    """
    darkfile_match_keys = ('DETECTOR', 'CCDAMP', 'BINAXIS1', 'BINAXIS2',
                           'CCDGAIN', 'SAMP_SEQ', 'SUBTYPE')
    kmap = copy.deepcopy(kmap_orig)
    for match in kmap_orig:
        header = dict(list(zip(darkfile_match_keys, match)))
        if header["SUBTYPE"] == '':
            header["SUBTYPE"] = "N/A"
            new_match = tuple(header[key] for key in darkfile_match_keys)
            for filemap in kmap[match]:
                log.warning(
                    "Re-mapping match with SUBTYPE='' to SUBTYPE='N/A' for",
                    filemap)
            kmap[new_match] = kmap[match]
            del kmap[match]
    return kmap, []
Exemplo n.º 19
0
 def organize_references(self, new_mode):
     """Find all references in the CRDS cache and relink them to the paths which are implied by `new_mode`.   
     This is used to reroganize existing file caches into new layouts,  e.g. flat -->  by instrument.
     """
     old_refpaths = rmap.list_references("*", observatory=self.observatory, full_path=True)
     old_mode = config.get_crds_ref_subdir_mode(self.observatory)
     log.info("Reorganizing", len(old_refpaths), "references from", repr(old_mode), "to", repr(new_mode))
     config.set_crds_ref_subdir_mode(new_mode, observatory=self.observatory)
     new_mode = config.get_crds_ref_subdir_mode(self.observatory)  # did it really change.
     for refpath in old_refpaths:
         with log.error_on_exception("Failed relocating:", repr(refpath)):
             desired_loc = rmap.locate_file(os.path.basename(refpath), observatory=self.observatory)
             if desired_loc != refpath:
                 if os.path.exists(desired_loc):
                     if not self.args.organize_delete_junk:
                         log.warning("Link or directory already exists at", repr(desired_loc), "Skipping", repr(refpath))
                         continue
                     utils.remove(desired_loc, observatory=self.observatory)
                 if config.writable_cache_or_info("Skipping file relocation from", repr(refpath), "to", repr(desired_loc)):
                     log.info("Relocating", repr(refpath), "to", repr(desired_loc))
                     shutil.move(refpath, desired_loc)
             else:
                 if old_mode != new_mode:
                     log.warning("Keeping existing cached file", repr(desired_loc), "already in target mode", repr(new_mode))
                 else:
                     log.warning("No change in subdirectory mode", repr(old_mode), "skipping reorganization of", repr(refpath))
     if new_mode == "flat" and old_mode == "instrument":
         log.info("Reorganizing from 'instrument' to 'flat' cache,  removing instrument directories.")
         for instrument in self.locator.INSTRUMENTS:
             self.remove_dir(instrument)
Exemplo n.º 20
0
    def _call(self, *args, **kwargs):
        """Core of RPC dispatch without error interpretation, logging, or return value decoding."""
        params = kwargs if len(kwargs) else args
        # if Any.kind(params) == Object and self.__version != '2.0':
        #   raise Exception('Unsupport arg type for JSON-RPC 1.0 '
        #                  '(the default version for this client, '
        #                  'pass version="2.0" to use keyword arguments)')
        jsonrpc_params = {
            "jsonrpc": self.__version,
            "method": self.__service_name,
            'params': params,
            'id': message_id()
        }

        parameters = json.dumps(jsonrpc_params)

        url = self._get_url(jsonrpc_params)

        if "serverless" in url or "server-less" in url:
            raise exceptions.ServiceError(
                "Configured for server-less mode.  Skipping JSON RPC " +
                repr(self.__service_name))

        if log.get_verbose() <= 50:
            log.verbose("CRDS JSON RPC", self.__service_name,
                        params if len(str(params)) <= 60 else "(...)", "-->")
        else:
            log.verbose("CRDS JSON RPC to", url, "parameters", params, "-->")

        response = apply_with_retries(self._call_service, parameters, url)

        try:
            rval = json.loads(response)
        except Exception:
            log.warning("Invalid CRDS jsonrpc response:\n", response)
            raise

        return rval
Exemplo n.º 21
0
def _flat_to_tpns(flat=None, schema_name=None):
    """Convert flat representation of DM schema to list of all TpnInfo objects."""
    if flat is None:
        flat = _schema_to_flat(_load_schema(schema_name))
    tpns = []
    for key, value in flat.items():
        if key.endswith(".TYPE"):
            basekey = str(key[:-len(".TYPE")])
            legal_values = [str(val) for val in flat.get(basekey + ".ENUM", [])]
            if legal_values:
                legal_values += ["ANY", "N/A"]
            legal_values = sorted(set(legal_values))
            if isinstance(value, list):
                value = tuple(value)
            datatype = SCHEMA_TYPE_TO_TPN.get(value, None)
            if datatype is not None:
                tpn = TpnInfo(name=basekey.upper(), keytype="H", datatype=datatype[0], 
                              presence=datatype[1], values=legal_values)
                log.verbose("Adding tpn constraint from DM schema:", repr(tpn), verbosity=65)
                tpns.append(tpn)
            else:
                log.warning("No TPN form for", repr(key), repr(value))
    return sorted(tpns)
Exemplo n.º 22
0
 def handle_warning(self, message):
     """Generic "warning" handler issues a  warning from remote process and 
     contiues monitoring.
     """
     log.warning(self.format_remote(message.data))
     return False
Exemplo n.º 23
0
Arquivo: monitor.py Projeto: nden/crds
 def handle_unknown(self, message):
     """Handle unknown `message` types by issuing a warning and continuing monitoring."""
     log.warning(
         self.format_remote("Unknown message type", repr(message.type),
                            "in", repr(message)))
     return False
Exemplo n.º 24
0
Arquivo: monitor.py Projeto: nden/crds
 def handle_warning(self, message):
     """Generic "warning" handler issues a  warning from remote process and 
     contiues monitoring.
     """
     log.warning(self.format_remote(message.data))
     return False
Exemplo n.º 25
0
Arquivo: monitor.py Projeto: nden/crds
 def handle_cancel(self, message):
     """Generic "cancel" handler reports on commanded cancellation of remote process
     and possibly why it was cancelled.   Then stops monitoring /exits.
     """
     log.warning(self.format_remote("Processing cancelled:", message.data))
     return message.data["result"]
Exemplo n.º 26
0
def mapping_check_diffs_core(diffs):
    """Perform the core difference checks on difference tuples `diffs`."""
    categorized = sorted([(diff_action(d), d) for d in diffs])
    for action, msg in categorized:
        if "header" in action:
            log.verbose("In", _diff_tail(msg)[:-1], msg[-1])
        elif action == "add":
            log.verbose("In", _diff_tail(msg)[:-1], msg[-1])
        elif "rule" in action:
            log.warning("Rule change at", _diff_tail(msg)[:-1], msg[-1])
        elif action == "replace":
            old_val, new_val = diff_replace_old_new(msg)
            if old_val and new_val:
                old_val, new_val = [x for x in diff_replace_old_new(msg)]
                if naming.newer(new_val, old_val):
                    log.verbose("In", _diff_tail(msg)[:-1], msg[-1])
                else:
                    log.warning("Reversion at", _diff_tail(msg)[:-1], msg[-1])
            else:
                log.warning("Unusual replacement",
                            _diff_tail(msg)[:-1], msg[-1])
        elif action == "delete":
            log.warning("Deletion at", _diff_tail(msg)[:-1], msg[-1])
        elif action == "parkey_difference":
            log.warning("Different lookup parameters",
                        _diff_tail(msg)[:-1], msg[-1])
        elif action == "class_difference":
            log.warning("Different classes at", _diff_tail(msg)[:-1], msg[-1])
        else:
            raise ValueError("Unexpected difference action:", action)
Exemplo n.º 27
0
    def main(self):
        """Perform the differencing."""
        self.args.files = [self.args.old_file,
                           self.args.new_file]  # for defining self.observatory
        self.old_file = self.locate_file(self.args.old_file)
        self.new_file = self.locate_file(self.args.new_file)
        if self.args.brief:
            self.args.lowest_mapping_only = True
            self.args.remove_paths = True
            self.args.hide_boring_diffs = True
            self.args.include_header_diffs = True
        if self.args.sync_files:
            assert not (self.args.cache1 or self.args.cache2), \
                "--sync-files is not compatible with cache-to-cache differences."
            if self.args.print_all_new_files:
                serial_old = naming.newstyle_serial(self.old_file)
                serial_new = naming.newstyle_serial(self.new_file) + 1
                if None not in [serial_old, serial_new]:
                    errs = sync.SyncScript("crds.sync --range {0}:{1}".format(
                        serial_old, serial_new))()
                    assert not errs, "Errors occurred while syncing all rules to CRDS cache."
                else:
                    log.warning(
                        "Cannot sync non-standard mapping names,  results may be incomplete."
                    )
            else:
                self.sync_files([self.old_file, self.new_file])
        elif self.args.print_all_new_files:
            log.warning(
                "--print-all-new-files requires a complete set of rules.  suggest --sync-files."
            )

        # self.args.files = [ self.old_file, self.new_file ]   # for defining self.observatory

        assert (self.args.cache1 and self.args.cache2) or (not self.args.cache1 and not self.args.cache2), \
            "Cache-to-cache comparison requires both --cache1 and --cache2;  otherwise neither for single cache comparison."

        if self.args.print_new_files:
            status = self.print_new_files()
        elif self.args.print_all_new_files:
            status = self.print_all_new_files()
        elif self.args.print_affected_instruments:
            status = self.print_affected_instruments()
        elif self.args.print_affected_types:
            status = self.print_affected_types()
        elif self.args.print_affected_modes:
            status = self.print_affected_modes()
        else:
            status = difference(
                self.observatory,
                self.old_file,
                self.new_file,
                primitive_diffs=self.args.primitive_diffs,
                check_diffs=self.args.check_diffs,
                check_references=self.args.check_references,
                mapping_text_diffs=self.args.mapping_text_diffs,
                include_header_diffs=self.args.include_header_diffs,
                hide_boring_diffs=self.args.hide_boring_diffs,
                recurse_added_deleted=self.args.recurse_added_deleted,
                lowest_mapping_only=self.args.lowest_mapping_only,
                remove_paths=self.args.remove_paths,
                squash_tuples=self.args.squash_tuples,
                cache1=self.args.cache1,
                cache2=self.args.cache2)
        if log.errors() or log.warnings():
            return 2
        else:
            return status
Exemplo n.º 28
0
def mapping_check_diffs_core(diffs):
    """Perform the core difference checks on difference tuples `diffs`."""
    categorized = sorted([ (diff_action(d), d) for d in diffs ])
    for action, msg in categorized:
        if "header" in action:
            log.verbose("In", _diff_tail(msg)[:-1], msg[-1])
        elif action == "add":
            log.verbose("In", _diff_tail(msg)[:-1], msg[-1])
        elif "rule" in action:
            log.warning("Rule change at", _diff_tail(msg)[:-1], msg[-1])
        elif action == "replace":
            old_val, new_val = diff_replace_old_new(msg)
            if old_val and new_val:
                old_val, new_val = [x for x in diff_replace_old_new(msg)]
                if naming.newer(new_val, old_val):
                    log.verbose("In", _diff_tail(msg)[:-1], msg[-1])
                else:
                    log.warning("Reversion at", _diff_tail(msg)[:-1], msg[-1])
            else:
                log.warning("Unusual replacement", _diff_tail(msg)[:-1], msg[-1])
        elif action == "delete":
            log.warning("Deletion at", _diff_tail(msg)[:-1], msg[-1])
        elif action == "parkey_difference":
            log.warning("Different lookup parameters", _diff_tail(msg)[:-1], msg[-1])
        elif action == "class_difference":
            log.warning("Different classes at", _diff_tail(msg)[:-1], msg[-1])
        else:
            raise ValueError("Unexpected difference action:", action)
Exemplo n.º 29
0
 def handle_cancel(self, message):
     """Generic "cancel" handler reports on commanded cancellation of remote process
     and possibly why it was cancelled.   Then stops monitoring /exits.
     """
     log.warning(self.format_remote("Processing cancelled:", message.data))
     return message.data["result"]
Exemplo n.º 30
0
 def handle_unknown(self,  message):
     """Handle unknown `message` types by issuing a warning and continuing monitoring."""
     log.warning(self.format_remote("Unknown message type", repr(message.type), "in", repr(message)))
     return False
Exemplo n.º 31
0
Arquivo: locate.py Projeto: nden/crds
def check_naming_consistency(checked_instrument=None,
                             exhaustive_mapping_check=False):
    """Dev function to compare the properties returned by name decomposition
    to the properties determined by file contents and make sure they're the same.
    Also checks rmap membership.

    >> from crds.tests import test_config
    >> old_config = test_config.setup()
    >> check_naming_consistency("acs")
    >> check_naming_consistency("cos")
    >> check_naming_consistency("nicmos")
    >> check_naming_consistency("stis")
    >> check_naming_consistency("wfc3")
    >> check_naming_consistency("wfpc2")
    >> test_config.cleanup(old_config)
    """
    from crds import certify

    for ref in rmap.list_references("*", observatory="hst", full_path=True):
        with log.error_on_exception("Failed processing:", repr(ref)):

            _path, _observ, instrument, filekind, _serial, _ext = ref_properties_from_cdbs_path(
                ref)

            if checked_instrument is not None and instrument != checked_instrument:
                continue

            if data_file.is_geis_data(ref):
                if os.path.exists(data_file.get_conjugate(ref)):
                    continue
                else:
                    log.warning("No GEIS header for", repr(ref))

            log.verbose("Processing:", instrument, filekind, ref)

            _path2, _observ2, instrument2, filekind2, _serial2, _ext2 = ref_properties_from_header(
                ref)
            if instrument != instrument2:
                log.error("Inconsistent instruments", repr(instrument), "vs.",
                          repr(instrument2), "for", repr(ref))
            if filekind != filekind2:
                log.error("Inconsistent filekinds", repr(filekind), "vs.",
                          repr(filekind2), "for", repr(ref))

            for pmap_name in reversed(
                    sorted(rmap.list_mappings("*.pmap", observatory="hst"))):

                pmap = crds.get_cached_mapping(pmap_name)

                r = certify.find_governing_rmap(pmap_name, ref)

                if not r:
                    continue

                if r.instrument != instrument:
                    log.error("Rmap instrument", repr(r.instrument),
                              "inconsistent with name derived instrument",
                              repr(instrument), "for", repr(ref), "in",
                              repr(pmap_name))
                if r.filekind != filekind:
                    log.error("Rmap filekind", repr(r.filekind),
                              "inconsistent with name derived filekind",
                              repr(filekind), "for", repr(ref), "in",
                              repr(pmap_name))
                if r.instrument != instrument2:
                    log.error("Rmap instrument", repr(r.instrument),
                              "inconsistent with content derived instrument",
                              repr(instrument2), "for", repr(ref), "in",
                              repr(pmap_name))
                if r.filekind != filekind2:
                    log.error("Rmap filekind", repr(r.filekind),
                              "inconsistent with content derived filekind",
                              repr(filekind2), "for", repr(ref), "in",
                              repr(pmap_name))

                if not exhaustive_mapping_check:
                    break

            else:
                log.error("Orphan reference", repr(ref),
                          "not found under any context.")
Exemplo n.º 32
0
def check_naming_consistency(checked_instrument=None, exhaustive_mapping_check=False):
    """Dev function to compare the properties returned by name decomposition
    to the properties determined by file contents and make sure they're the same.
    Also checks rmap membership.

    >> from crds.tests import test_config
    >> old_config = test_config.setup()
    >> check_naming_consistency("acs")
    >> check_naming_consistency("cos")
    >> check_naming_consistency("nicmos")
    >> check_naming_consistency("stis")
    >> check_naming_consistency("wfc3")
    >> check_naming_consistency("wfpc2")
    >> test_config.cleanup(old_config)
    """
    from crds import certify

    for ref in rmap.list_references("*", observatory="hst", full_path=True):
        with log.error_on_exception("Failed processing:", repr(ref)):

            _path, _observ, instrument, filekind, _serial, _ext = ref_properties_from_cdbs_path(ref)

            if checked_instrument is not None and instrument != checked_instrument:
                continue

            if data_file.is_geis_data(ref):
                if os.path.exists(data_file.get_conjugate(ref)):
                    continue
                else:
                    log.warning("No GEIS header for", repr(ref))

            log.verbose("Processing:", instrument, filekind, ref)
            
            _path2, _observ2, instrument2, filekind2, _serial2, _ext2 = ref_properties_from_header(ref)
            if instrument != instrument2:
                log.error("Inconsistent instruments", repr(instrument), "vs.", repr(instrument2), 
                          "for", repr(ref))
            if filekind != filekind2:
                log.error("Inconsistent filekinds", repr(filekind), "vs.", repr(filekind2), 
                          "for", repr(ref))

            for pmap_name in reversed(sorted(rmap.list_mappings("*.pmap", observatory="hst"))):

                pmap = crds.get_cached_mapping(pmap_name)

                r = certify.find_governing_rmap(pmap_name, ref)

                if not r:
                    continue

                if r.instrument != instrument:
                    log.error("Rmap instrument", repr(r.instrument), 
                              "inconsistent with name derived instrument", repr(instrument), "for", repr(ref), "in", repr(pmap_name))
                if r.filekind != filekind:
                    log.error("Rmap filekind", repr(r.filekind), 
                              "inconsistent with name derived filekind", repr(filekind), "for", repr(ref), "in", repr(pmap_name))
                if r.instrument != instrument2:
                    log.error("Rmap instrument", repr(r.instrument), 
                              "inconsistent with content derived instrument", repr(instrument2), "for", repr(ref), "in", repr(pmap_name))
                if r.filekind != filekind2:
                    log.error("Rmap filekind", repr(r.filekind), 
                              "inconsistent with content derived filekind", repr(filekind2), "for", repr(ref), "in", repr(pmap_name))
                
                if not exhaustive_mapping_check:
                    break

            else:
                log.error("Orphan reference", repr(ref), "not found under any context.")
Exemplo n.º 33
0
# ----------------------------------------------------------------------

import gc

# ----------------------------------------------------------------------

import crds
from crds import log, config

try:
    from crds.core import crds_cache_locking
except ImportError:
    crds_cache_locking = None
    log.warning(
        "CRDS needs to be updated to v7.1.4 or greater to support cache locking and association based CRDS cache updates.  Try 'conda update crds'."
    )

# ----------------------------------------------------------------------


def _flatten_dict(nested):
    def flatten(root, path, output):
        for key, val in root.items():
            if isinstance(key, six.string_types):
                if isinstance(val, dict):
                    flatten(val, path + [key], output)
                else:
                    output['.'.join(path + [key])] = val

    output = {}