def _initial_recommendations(name, parameters, reftypes=None, context=None, ignore_cache=False, observatory="jwst", fast=False): """shared logic for getreferences() and getrecommendations().""" if not fast: log.verbose("=" * 120) log.verbose(name + "() CRDS version: ", version_info()) log.verbose(name + "() server:", api.get_crds_server()) log.verbose(name + "() observatory:", observatory) log.verbose(name + "() parameters:\n", log.PP(parameters), verbosity=65) log.verbose(name + "() reftypes:", reftypes) log.verbose(name + "() context:", repr(context)) log.verbose(name + "() ignore_cache:", ignore_cache) for var in os.environ: if var.upper().startswith("CRDS"): log.verbose(var, "=", repr(os.environ[var])) check_observatory(observatory) check_parameters(parameters) check_reftypes(reftypes) check_context(context) mode, final_context = get_processing_mode(observatory, context) log.verbose("Final effective context is", repr(final_context)) if mode == "local": log.verbose("Computing best references locally.") bestrefs = local_bestrefs(parameters, reftypes=reftypes, context=final_context, ignore_cache=ignore_cache) else: log.verbose("Computing best references remotely.") bestrefs = api.get_best_references(final_context, parameters, reftypes=reftypes) if not fast: # nominally fast=True (this is skipped) in crds.bestrefs, used for HST and reprocessing # because bestrefs sreprocessing determinations for two contexts which run on 100's of # thousands of datasets and should only report bad files once and only when arising from # the new vs. the old context. update_config_info(observatory) log.verbose(name + "() results:\n", log.PP(bestrefs), verbosity=65) instrument = utils.header_to_instrument(parameters) warn_bad_context(observatory, final_context, instrument) warn_bad_references(observatory, bestrefs) return final_context, bestrefs
def _initial_recommendations( name, parameters, reftypes=None, context=None, ignore_cache=False, observatory="jwst", fast=False): """shared logic for getreferences() and getrecommendations().""" if not fast: log.verbose("="*120) log.verbose(name + "() CRDS version: ", version_info()) log.verbose(name + "() server:", api.get_crds_server()) log.verbose(name + "() observatory:", observatory) log.verbose(name + "() parameters:\n", log.PP(parameters), verbosity=65) log.verbose(name + "() reftypes:", reftypes) log.verbose(name + "() context:", repr(context)) log.verbose(name + "() ignore_cache:", ignore_cache) # for var in os.environ: # if var.upper().startswith("CRDS"): # log.verbose(var, "=", repr(os.environ[var])) log.verbose("CRDS_PATH =", os.environ.get("CRDS_PATH", "UNDEFINED")) log.verbose("CRDS_SERVER_URL =", os.environ.get("CRDS_SERVER_URL", "UNDEFINED")) check_observatory(observatory) parameters = check_parameters(parameters) check_reftypes(reftypes) check_context(context) mode, final_context = get_processing_mode(observatory, context) log.verbose("Final effective context is", repr(final_context)) if mode == "local": log.verbose("Computing best references locally.") bestrefs = local_bestrefs( parameters, reftypes=reftypes, context=final_context, ignore_cache=ignore_cache) else: log.verbose("Computing best references remotely.") bestrefs = api.get_best_references(final_context, parameters, reftypes=reftypes) if not fast: # nominally fast=True (this is skipped) in crds.bestrefs, used for HST and reprocessing # because bestrefs sreprocessing determinations for two contexts which run on 100's of # thousands of datasets and should only report bad files once and only when arising from # the new vs. the old context. update_config_info(observatory) log.verbose(name + "() results:\n", log.PP(bestrefs), verbosity=65) instrument = utils.header_to_instrument(parameters) warn_bad_context(observatory, final_context, instrument) warn_bad_references(observatory, bestrefs) return final_context, bestrefs