コード例 #1
0
def runpipeline(pl, convdict, conf):
    """runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
    """

    qlog = qllogger.QLLogger("QuickLook", 20)
    log = qlog.getlog()
    hb = QLHB.QLHeartbeat(log, conf["Period"], conf["Timeout"])

    inp = convdict["rawimage"]
    paconf = conf["PipeLine"]
    qlog = qllogger.QLLogger("QuickLook", 0)
    log = qlog.getlog()
    for s, step in enumerate(pl):
        log.info("Starting to run step %s" % (paconf[s]["StepName"]))
        pa = step[0]
        pargs = mapkeywords(step[0].config["kwargs"], convdict)
        try:
            hb.start("Running %s" % (step[0].name))
            inp = pa(inp, **pargs)
        except Exception as e:
            log.critical("Failed to run PA %s error was %s" %
                         (step[0].name, e))
            sys.exit("Failed to run PA %s" % (step[0].name))
        qaresult = {}
        for qa in step[1]:
            try:
                qargs = mapkeywords(qa.config["kwargs"], convdict)
                hb.start("Running %s" % (qa.name))
                res = qa(inp, **qargs)
                log.debug("%s %s" % (qa.name, inp))
                qaresult[qa.name] = res
            except Exception as e:
                log.warning("Failed to run QA %s error was %s" % (qa.name, e))
        if len(qaresult):
            yaml.dump(qaresult, open(paconf[s]["OutputFile"], "wb"))
            hb.stop("Step %s finished. Output is in %s " %
                    (paconf[s]["StepName"], paconf[s]["OutputFile"]))
        else:
            hb.stop("Step %s finished." % (paconf[s]["StepName"]))
    hb.stop("Pipeline processing finished. Serializing result")
    return inp
コード例 #2
0
ファイル: qlconfig.py プロジェクト: sdss/lvmspec
def check_config(outconfig, singqa):
    """
    Given the expanded config, check for all possible file existence etc....
    """
    if singqa is None:
        qlog = qllogger.QLLogger(name="QLConfig")
        log = qlog.getlog()
        log.info("Checking if all the necessary files exist.")

        if outconfig["Flavor"] == 'science':
            files = [
                outconfig["RawImage"], outconfig["FiberMap"],
                outconfig["FiberFlatFile"]
            ]
            for thisfile in files:
                if not os.path.exists(thisfile):
                    sys.exit("File does not exist: {}".format(thisfile))
                else:
                    log.info("File check: Okay: {}".format(thisfile))
        elif outconfig["Flavor"] == "flat":
            files = [outconfig["RawImage"], outconfig["FiberMap"]]
            for thisfile in files:
                if not os.path.exists(thisfile):
                    sys.exit("File does not exist: {}".format(thisfile))
                else:
                    log.info("File check: Okay: {}".format(thisfile))
        log.info("All necessary files exist for {} configuration.".format(
            outconfig["Flavor"]))

    return
コード例 #3
0
ファイル: qas.py プロジェクト: secroun/desispec
 def __init__(self, name, inptype, config, logger=None):
     if logger is None:
         self.m_log = qllogger.QLLogger().getlog(name)
     else:
         self.m_log = logger
     self.__inpType__ = type(inptype)
     self.name = name
     self.config = config
     self.m_log.debug("initializing Monitoring alg %s" % name)
コード例 #4
0
ファイル: pas.py プロジェクト: sdss/lvmspec
 def __init__(self, name, inptype, outtype, config, logger=None):
     if logger is None:
         qll = qllogger.QLLogger()
         self.m_log = qll.getlog(name)
     else:
         self.m_log = logger
     self.__inpType__ = type(inptype)
     self.__outType__ = type(outtype)
     self.name = name
     self.config = config
     self.m_log.debug("initializing Monitoring alg {}".format(name))
コード例 #5
0
 def __init__(self,thislist=None,algorithms=None,flavor=None,mode=None):
     """
     thislist: given list of PAs
     algorithms: Algorithm list coming from config file: e.g desispec/data/quicklook/qlconfig_dark.yaml
     flavor: only needed if new list is to be built.
     mode: online offline?
     """
     self.flavor=flavor
     self.mode=mode
     self.thislist=thislist
     self.algorithms=algorithms
     self.palist=self._palist()
     self.qalist=self._qalist()
     qlog=qllogger.QLLogger(name="QLConfig")
     self.log=qlog.getlog()
コード例 #6
0
   def __init__(self, configfile, night, camera, expid, singqa, amps=True,rawdata_dir=None,specprod_dir=None, outdir=None,qlf=False):
       """
       configfile: a configuration file for QL eg: desispec/data/quicklook/qlconfig_dark.yaml
       night: night for the data to process, eg.'20191015'
       camera: which camera to process eg 'r0'
       expid: exposure id for the image to be processed 
       amps: for outputing amps level QA
       Note:
       rawdata_dir and specprod_dir: if not None, overrides the standard DESI convention       
       """
 
       #- load the config file and extract
       self.conf = yaml.load(open(configfile,"r"))
       self.night = night
       self.expid = expid
       self.camera = camera
       self.singqa = singqa
       self.amps = amps
       self.rawdata_dir = rawdata_dir 
       self.specprod_dir = specprod_dir
       self.outdir = outdir
       self.dumpintermediates = self.conf["WriteIntermediatefiles"]
       self.writepixfile = self.conf["WritePixfile"]
       self.writeskymodelfile = self.conf["WriteSkyModelfile"]
       self.writestaticplots = self.conf["WriteStaticPlots"]
       self.usesigma = self.conf["UseResolution"]
       self.pipeline = self.conf["Pipeline"]
       self.algorithms = self.conf["Algorithms"]
       self._palist = Palist(self.pipeline,self.algorithms)
       self.pamodule = self._palist.pamodule
       self.qamodule = self._palist.qamodule
       if "BoxcarExtract" in self.algorithms.keys():
           if "wavelength" in self.algorithms["BoxcarExtract"].keys():
               self.wavelength = self.algorithms["BoxcarExtract"]["wavelength"][self.camera[0]]
       else: self.wavelength = None
       if "SkySub_QL" in self.algorithms.keys():
           if "Calculate_SNR" in self.algorithms["SkySub_QL"]["QA"].keys():
               if "Residual_Cut" in self.algorithms["SkySub_QL"]["QA"]["Calculate_SNR"].keys():
                   self.rescut = self.algorithms["SkySub_QL"]["QA"]["Calculate_SNR"]["Residual_Cut"]
               else: self.rescut = None
               if "Sigma_Cut" in self.algorithms["SkySub_QL"]["QA"]["Calculate_SNR"].keys():
                   self.sigmacut = self.algorithms["SkySub_QL"]["QA"]["Calculate_SNR"]["Sigma_Cut"]
               else: self.sigmacut = None
       self._qlf=qlf
       qlog=qllogger.QLLogger(name="QLConfig")
       self.log=qlog.getlog()
       self._qaRefKeys={"Bias_From_Overscan":"BIAS_AMP", "Get_RMS":"NOISE_AMP", "Count_Pixels":"LITFRAC_AMP", "Calc_XWSigma":"XWSIGMA", "CountSpectralBins":"NGOODFIB", "Sky_Peaks":"PEAKCOUNT", "Sky_Continuum":"SKYCONT", "Integrate_Spec":"DELTAMAG_TGT", "Sky_Residual":"MED_RESID", "Calculate_SNR":"FIDSNR_TGT"}
コード例 #7
0
def mapkeywords(kw, kwmap):
    """
    Maps the keyword in the configuration to the corresponding object
    returned by the desispec.io module.
    e.g  Bias Image file is mapped to biasimage object... for the same keyword "BiasImage"
    """

    newmap = {}
    qlog = qllogger.QLLogger("QuickLook", 20)
    log = qlog.getlog()
    for k, v in kw.iteritems():
        if isinstance(v, basestring) and len(v) >= 3 and v[0:2] == "%%":
            if v[2:] in kwmap:
                newmap[k] = kwmap[v[2:]]
            else:
                log.warning("Can't find key %s in conversion map. Skipping" %
                            (v[2:]))
        else:
            newmap[k] = v
    return newmap
コード例 #8
0
ファイル: quicklook.py プロジェクト: michaelJwilson/LBGCMB
def mapkeywords(kw,kwmap):
    """
    Maps the keyword in the configuration to the corresponding object
    returned by the desispec.io module.
    e.g  Bias Image file is mapped to biasimage object... for the same keyword "BiasImage"
    """

    newmap={}
    qlog=qllogger.QLLogger("QuickLook",20)
    log=qlog.getlog()
    for k,v in kw.items():
        if isinstance(v,str) and len(v)>=3 and  v[0:2]=="%%": #- For direct configuration
            if v[2:] in kwmap:
                newmap[k]=kwmap[v[2:]]
            else:
                log.warning("Can't find key {} in conversion map. Skipping".format(v[2:]))
        if k in kwmap: #- for configs generated via desispec.quicklook.qlconfig
            newmap[k]=kwmap[k]          
        else:
            newmap[k]=v
    return newmap
コード例 #9
0
ファイル: quicklook.py プロジェクト: michaelJwilson/LBGCMB
def ql_main(args=None):

    qlog = qllogger.QLLogger("QuickLook", 20)
    log = qlog.getlog()

    if args is None:
        args = parse()

    if args.config is not None:

        if args.rawdata_dir:
            rawdata_dir = args.rawdata_dir
        else:
            if 'QL_SPEC_DATA' not in os.environ:
                sys.exit(
                    "must set ${} environment variable or provide rawdata_dir".
                    format('QL_SPEC_DATA'))
            rawdata_dir = os.getenv('QL_SPEC_DATA')

        if args.specprod_dir:
            specprod_dir = args.specprod_dir
        else:
            if 'QL_SPEC_REDUX' not in os.environ:
                sys.exit(
                    "must set ${} environment variable or provide specprod_dir"
                    .format('QL_SPEC_REDUX'))
            specprod_dir = os.getenv('QL_SPEC_REDUX')

        log.info("Running Quicklook using configuration file {}".format(
            args.config))
        if os.path.exists(args.config):
            if "yaml" in args.config:
                config = qlconfig.Config(args.config,
                                         args.night,
                                         args.camera,
                                         args.expid,
                                         rawdata_dir=rawdata_dir,
                                         specprod_dir=specprod_dir)
                configdict = config.expand_config()
            else:
                log.critical("Can't open config file {}".format(args.config))
                sys.exit("Can't open config file")
        else:
            sys.exit("File does not exist: {}".format(args.config))

    elif args.fullconfig is not None:  #- This is mostly for development/debugging purpose
        log.info("Running Quicklook using full configuration file {}".format(
            args.fullconfig))
        if os.path.exists(args.fullconfig):
            if "yaml" in args.fullconfig:
                configdict = yaml.load(open(args.fullconfig, "r"))
            else:
                log.critical("Can't open config file {}".format(args.config))
                sys.exit("Can't open config file")
        else:
            sys.exit("File does not exist: {}".format(args.config))
    else:
        sys.exit(
            "Must provide a valid config file. See desispec/data/quicklook for an example"
        )

    #- save the expanded config to a file
    if args.save:
        if "yaml" in args.save:
            f = open(args.save, "w")
            yaml.dump(configdict, f)
            log.info("Output saved for this configuration to {}".format(
                args.save))
            f.close()
        else:
            log.info(
                "Can save config to only yaml output. Put a yaml in the argument"
            )

    pipeline, convdict = quicklook.setup_pipeline(configdict)
    res = quicklook.runpipeline(pipeline,
                                convdict,
                                configdict,
                                mergeQA=args.mergeQA)
    inpname = configdict["RawImage"]
    night = configdict["Night"]
    camera = configdict["Camera"]
    expid = configdict["Expid"]

    if isinstance(res, image.Image):
        if configdict["OutputFile"]:
            finalname = configdict["OutputFile"]
        else:
            finalname = "image-{}-{:08d}.fits".format(camera, expid)
            log.critical(
                "No final outputname given. Writing to a image file {}".format(
                    finalname))
        imIO.write_image(finalname, res, meta=None)
    elif isinstance(res, frame.Frame):
        if configdict["Flavor"] == 'arcs':
            from desispec.io.meta import findfile
            finalname = "psfnight-{}.fits".format(camera)
            finalframe = findfile('frame',
                                  night=night,
                                  expid=expid,
                                  camera=camera)
            frIO.write_frame(finalframe, res, header=None)
        else:
            if configdict["OutputFile"]:
                finalname = configdict["OutputFile"]
            else:
                finalname = "frame-{}-{:08d}.fits".format(camera, expid)
                log.critical(
                    "No final outputname given. Writing to a frame file {}".
                    format(finalname))
            frIO.write_frame(finalname, res, header=None)
    else:
        log.error(
            "Result of pipeline is an unknown type {}. Don't know how to write"
            .format(type(res)))
        sys.exit("Unknown pipeline result type {}.".format(type(res)))
    log.info("Pipeline completed. Final result is in {}".format(finalname))
コード例 #10
0
ファイル: quicklook.py プロジェクト: gnizq64/desispec
def runpipeline(pl, convdict, conf, mergeQA=False):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
        mergedQA: if True, outputs the merged QA after the execution of pipeline. Perhaps, this 
            should always be True, but leaving as option, until configuration and IO settles.
    """

    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    hb = QLHB.QLHeartbeat(log, conf["Period"], conf["Timeout"])

    inp = convdict["rawimage"]
    singqa = conf["singleqa"]
    paconf = conf["PipeLine"]
    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    passqadict = None  #- pass this dict to QAs downstream
    schemaMerger = QL_QAMerger(conf['Night'], conf['Expid'], conf['Flavor'],
                               conf['Camera'], conf['Program'])
    QAresults = [
    ]  #- merged QA list for the whole pipeline. This will be reorganized for databasing after the pipeline executes
    if singqa is None:
        for s, step in enumerate(pl):
            log.info("Starting to run step {}".format(paconf[s]["StepName"]))
            pa = step[0]
            pargs = mapkeywords(step[0].config["kwargs"], convdict)
            schemaStep = schemaMerger.addPipelineStep(paconf[s]["StepName"])
            try:
                hb.start("Running {}".format(step[0].name))
                oldinp = inp  #-  copy for QAs that need to see earlier input
                inp = pa(inp, **pargs)
            except Exception as e:
                log.critical("Failed to run PA {} error was {}".format(
                    step[0].name, e),
                             exc_info=True)
                sys.exit("Failed to run PA {}".format(step[0].name))
            qaresult = {}
            for qa in step[1]:
                try:
                    qargs = mapkeywords(qa.config["kwargs"], convdict)
                    hb.start("Running {}".format(qa.name))
                    qargs[
                        "dict_countbins"] = passqadict  #- pass this to all QA downstream

                    if qa.name == "RESIDUAL" or qa.name == "Sky_Residual":
                        res = qa(inp[0], inp[1], **qargs)
                    else:
                        if isinstance(inp, tuple):
                            res = qa(inp[0], **qargs)
                        else:
                            res = qa(inp, **qargs)

                    if qa.name == "COUNTBINS" or qa.name == "CountSpectralBins":  #TODO -must run this QA for now. change this later.
                        passqadict = res
                    if "qafile" in qargs:
                        qawriter.write_qa_ql(qargs["qafile"], res)
                    log.debug("{} {}".format(qa.name, inp))
                    qaresult[qa.name] = res
                    schemaStep.addParams(res['PARAMS'])
                    schemaStep.addMetrics(res['METRICS'])
                except Exception as e:
                    log.warning("Failed to run QA {}. Got Exception {}".format(
                        qa.name, e),
                                exc_info=True)
            if len(qaresult):
                if conf["DumpIntermediates"]:
                    f = open(paconf[s]["OutputFile"], "w")
                    f.write(yaml.dump(yamlify(qaresult)))
                    hb.stop("Step {} finished. Output is in {} ".format(
                        paconf[s]["StepName"], paconf[s]["OutputFile"]))
            else:
                hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
            QAresults.append([pa.name, qaresult])
        hb.stop("Pipeline processing finished. Serializing result")
    else:
        import numpy as np
        qa = None
        qas = [
            'Bias_From_Overscan', ['Get_RMS', 'Calc_XWSigma', 'Count_Pixels'],
            'CountSpectralBins', ['Sky_Continuum', 'Sky_Peaks'],
            ['Sky_Residual', 'Integrate_Spec', 'Calculate_SNR']
        ]
        for palg in range(len(qas)):
            if singqa in qas[palg]:
                pa = pl[palg][0]
                pac = paconf[palg]
                if singqa == 'Bias_From_Overscan' or singqa == 'CountSpectralBins':
                    qa = pl[palg][1][0]
                else:
                    for qalg in range(len(qas[palg])):
                        if qas[palg][qalg] == singqa:
                            qa = pl[palg][1][qalg]
        if qa is None:
            log.critical("Unknown input... Valid QAs are: {}".format(qas))
            sys.exit()

        log.info("Starting to run step {}".format(pac["StepName"]))
        pargs = mapkeywords(pa.config["kwargs"], convdict)
        schemaStep = schemaMerger.addPipelineStep(pac["StepName"])
        qaresult = {}
        try:
            qargs = mapkeywords(qa.config["kwargs"], convdict)
            hb.start("Running {}".format(qa.name))
            if singqa == "Sky_Residual":
                res = qa(inp[0], inp[1], **qargs)
            else:
                if isinstance(inp, tuple):
                    res = qa(inp[0], **qargs)
                else:
                    res = qa(inp, **qargs)
            if singqa == "CountSpectralBins":
                passqadict = res
            if "qafile" in qargs:
                qawriter.write_qa_ql(qargs["qafile"], res)
            log.debug("{} {}".format(qa.name, inp))
            schemaStep.addMetrics(res['METRICS'])
        except Exception as e:
            log.warning("Failed to run QA {}. Got Exception {}".format(
                qa.name, e),
                        exc_info=True)
        if len(qaresult):
            if conf["DumpIntermediates"]:
                f = open(pac["OutputFile"], "w")
                f.write(yaml.dump(yamlify(qaresult)))
                log.info("{} finished".format(qa.name))

    #- merge QAs for this pipeline execution
    if mergeQA is True:
        # from desispec.quicklook.util import merge_QAs
        # log.info("Merging all the QAs for this pipeline execution")
        # merge_QAs(QAresults,conf)
        log.debug("Dumping mergedQAs")
        from desispec.io import findfile
        ftype = 'ql_mergedQA_file'
        specprod_dir = os.environ[
            'QL_SPEC_REDUX'] if 'QL_SPEC_REDUX' in os.environ else ""
        if conf['Flavor'] == 'arcs':
            ftype = 'ql_mergedQAarc_file'
        destFile = findfile(ftype,
                            night=conf['Night'],
                            expid=conf['Expid'],
                            camera=conf['Camera'],
                            specprod_dir=specprod_dir)
        # this will overwrite the file. above function returns same name for different QL executions
        # results will be erased.
        schemaMerger.writeToFile(destFile)
        log.info("Wrote merged QA file {}".format(destFile))
        schemaMerger.writeTojsonFile(destFile)
        log.info("Wrote merged QA file {}".format(
            destFile.split('.yaml')[0] + '.json'))
    if isinstance(inp, tuple):
        return inp[0]
    else:
        return inp
コード例 #11
0
ファイル: qlconfig.py プロジェクト: sdss/lvmspec
    def __init__(self,
                 configfile,
                 night,
                 camera,
                 expid,
                 singqa,
                 amps=True,
                 rawdata_dir=None,
                 specprod_dir=None,
                 outdir=None,
                 qlf=False,
                 psfid=None,
                 flatid=None,
                 templateid=None,
                 templatenight=None,
                 qlplots=False,
                 store_res=None):
        """
        configfile: a configuration file for QL eg: desispec/data/quicklook/qlconfig_dark.yaml
        night: night for the data to process, eg.'20191015'
        camera: which camera to process eg 'r0'
        expid: exposure id for the image to be processed 
        amps: for outputing amps level QA
        Note:
        rawdata_dir and specprod_dir: if not None, overrides the standard DESI convention       
        """
        with open(configfile, 'r') as f:
            self.conf = yaml.safe_load(f)
            f.close()
        self.night = night
        self.expid = expid
        self.psfid = psfid
        self.flatid = flatid
        self.templateid = templateid
        self.templatenight = templatenight
        self.camera = camera
        self.singqa = singqa
        self.amps = amps
        self.rawdata_dir = rawdata_dir
        self.specprod_dir = specprod_dir
        self.outdir = outdir
        self.flavor = self.conf["Flavor"]

        #- Options to write out frame, fframe, preproc, and sky model files
        self.dumpintermediates = False
        self.writepreprocfile = self.conf["WritePreprocfile"]
        self.writeskymodelfile = False

        self.plotconf = None
        self.hardplots = False
        #- Load plotting configuration file
        if qlplots != 'noplots' and qlplots is not None:
            with open(qlplots, 'r') as pf:
                self.plotconf = yaml.safe_load(pf)
                pf.close()
        #- Use hard coded plotting algorithms
        elif qlplots is None:
            self.hardplots = True

        # Use --resolution to store full resolution informtion
        if store_res:
            self.usesigma = True
        else:
            self.usesigma = False

        self.pipeline = self.conf["Pipeline"]
        self.algorithms = self.conf["Algorithms"]
        self._palist = Palist(self.pipeline, self.algorithms)
        self.pamodule = self._palist.pamodule
        self.qamodule = self._palist.qamodule

        algokeys = self.algorithms.keys()

        # Extract mapping of scalar/refence key names for each QA
        qaRefKeys = {}
        for i in algokeys:
            for k in self.algorithms[i]["QA"].keys():
                if k == "Check_HDUs":
                    qaRefKeys[k] = "CHECKHDUS"
                qaparams = self.algorithms[i]["QA"][k]["PARAMS"]
                for par in qaparams.keys():
                    if "NORMAL_RANGE" in par:
                        scalar = par.replace("_NORMAL_RANGE", "")
                        qaRefKeys[k] = scalar

        # Special additional parameters to read in.
        self.wavelength = None
        for key in ["BoxcarExtract", "Extract_QP"]:
            if key in self.algorithms.keys():
                if "wavelength" in self.algorithms[key].keys():
                    self.wavelength = self.algorithms[key]["wavelength"][
                        self.camera[0]]

        self._qlf = qlf
        qlog = qllogger.QLLogger(name="QLConfig")
        self.log = qlog.getlog()
        self._qaRefKeys = qaRefKeys
コード例 #12
0
ファイル: quicklook.py プロジェクト: michaelJwilson/LBGCMB
def setup_pipeline(config):
    """
    Given a configuration from QLF, this sets up a pipeline [pa,qa] and also returns a
    conversion dictionary from the configuration dictionary so that Pipeline steps (PA) can
    take them. This is required for runpipeline.
    """
    import astropy.io.fits as fits
    import desispec.io.fibermap as fibIO
    import desispec.io.sky as skyIO
    import desispec.io.fiberflat as ffIO
    import desispec.fiberflat as ff
    import desispec.io.image as imIO
    import desispec.image as im
    import desispec.io.frame as frIO
    import desispec.frame as dframe
    from desispec.quicklook import procalgs
    from desispec.boxcar import do_boxcar

    qlog=qllogger.QLLogger("QuickLook",20)
    log=qlog.getlog()
    if config is None:
        return None
    log.info("Reading Configuration")
    if "RawImage" not in config:
        log.critical("Config is missing \"RawImage\" key.")
        sys.exit("Missing \"RawImage\" key.")
    inpname=config["RawImage"]
    if "FiberMap" not in config:
        log.critical("Config is missing \"FiberMap\" key.")
        sys.exit("Missing \"FiberMap\" key.")
    fibname=config["FiberMap"]
    proctype="Exposure"
    if "Camera" in config:
        camera=config["Camera"]
    if "DataType" in config:
        proctype=config["DataType"]
    debuglevel=20
    if "DebugLevel" in config:
        debuglevel=config["DebugLevel"]
        log.setLevel(debuglevel)
    hbeat=QLHB.QLHeartbeat(log,config["Period"],config["Timeout"])
    if config["Timeout"]> 200.0:
        log.warning("Heartbeat timeout exceeding 200.0 seconds")
    dumpintermediates=False
    if "DumpIntermediates" in config:
        dumpintermediates=config["DumpIntermediates"]

    biasimage=None #- This will be the converted dictionary key
    biasfile=None
    if "BiasImage" in config:
        biasfile=config["BiasImage"]

    darkimage=None
    darkfile=None
    if "DarkImage" in config:
        darkfile=config["DarkImage"]

    pixelflatfile=None
    pixflatimage=None
    if "PixelFlat" in config:
        pixelflatfile=config["PixelFlat"]

    fiberflatimagefile=None
    fiberflatimage=None
    if "FiberFlatImage" in config:
        fiberflatimagefile=config["FiberFlatImage"]

    arclampimagefile=None
    arclampimage=None
    if "ArcLampImage" in config:
        arclampimagefile=config["ArcLampImage"]

    fiberflatfile=None
    fiberflat=None
    if "FiberFlatFile" in config:
        if config["Flavor"] == 'arcs':
            pass
        else:
            fiberflatfile=config["FiberFlatFile"]

    skyfile=None
    skyimage=None
    if "SkyFile" in config:
        skyfile=config["SkyFile"]

    psf=None
    if config["Flavor"] == 'arcs':
        if not os.path.exists(os.path.join(os.environ['QL_SPEC_REDUX'],'calib2d','psf',config["Night"])):
            os.mkdir(os.path.join(os.environ['QL_SPEC_REDUX'],'calib2d','psf',config["Night"]))
        pass
    elif "PSFFile" in config:
        #from specter.psf import load_psf
        import desispec.psf
        psf=desispec.psf.PSF(config["PSFFile"])
        #psf=load_psf(config["PSFFile"])

    if "basePath" in config:
        basePath=config["basePath"]

    hbeat.start("Reading input file {}".format(inpname))
    inp=fits.open(inpname) #- reading raw image directly from astropy.io.fits
    hbeat.start("Reading fiberMap file {}".format(fibname))
    fibfile=fibIO.read_fibermap(fibname)
    fibhdr=fibfile.meta

    convdict={"FiberMap":fibfile}

    if psf is not None:
        convdict["PSFFile"]=psf

    if biasfile is not None:
        hbeat.start("Reading Bias Image {}".format(biasfile))
        biasimage=imIO.read_image(biasfile)
        convdict["BiasImage"]=biasimage

    if darkfile is not None:
        hbeat.start("Reading Dark Image {}".format(darkfile))
        darkimage=imIO.read_image(darkfile)
        convdict["DarkImage"]=darkimage

    if pixelflatfile:
        hbeat.start("Reading PixelFlat Image {}".format(pixelflatfile))
        pixelflatimage=imIO.read_image(pixelflatfile)
        convdict["PixelFlat"]=pixelflatimage

    if fiberflatimagefile:
        hbeat.start("Reading FiberFlat Image {}".format(fiberflatimagefile))
        fiberflatimage=imIO.read_image(fiberflatimagefile)
        convdict["FiberFlatImage"]=fiberflatimage

    if arclampimagefile:
        hbeat.start("Reading ArcLampImage {}".format(arclampimagefile))
        arclampimage=imIO.read_image(arclampimagefile)
        convdict["ArcLampImage"]=arclampimage

    if fiberflatfile:
        hbeat.start("Reading FiberFlat {}".format(fiberflatfile))
        fiberflat=ffIO.read_fiberflat(fiberflatfile)
        convdict["FiberFlatFile"]=fiberflat

    if skyfile:
        hbeat.start("Reading SkyModel file {}".format(skyfile))
        skymodel=skyIO.read_sky(skyfile)
        convdict["SkyFile"]=skymodel

    if dumpintermediates:
        convdict["DumpIntermediates"]=dumpintermediates
   
    hbeat.stop("Finished reading all static files")

    img=inp
    convdict["rawimage"]=img
    pipeline=[]
    for step in config["PipeLine"]:
        pa=getobject(step["PA"],log)
        if len(pipeline) == 0:
            if not pa.is_compatible(type(img)):
                log.critical("Pipeline configuration is incorrect! check configuration {} {}".format(img,pa.is_compatible(img)))
                sys.exit("Wrong pipeline configuration")
        else:
            if not pa.is_compatible(pipeline[-1][0].get_output_type()):
                log.critical("Pipeline configuration is incorrect! check configuration")
                log.critical("Can't connect input of {} to output of {}. Incompatible types".format(pa.name,pipeline[-1][0].name))
                sys.exit("Wrong pipeline configuration")
        qas=[]
        for q in step["QAs"]:
            qa=getobject(q,log)
            if not qa.is_compatible(pa.get_output_type()):
                log.warning("QA {} can not be used for output of {}. Skipping expecting {} got {} {}".format(qa.name,pa.name,qa.__inpType__,pa.get_output_type(),qa.is_compatible(pa.get_output_type())))
            else:
                qas.append(qa)
        pipeline.append([pa,qas])
    return pipeline,convdict
コード例 #13
0
ファイル: quicklook.py プロジェクト: michaelJwilson/LBGCMB
def runpipeline(pl,convdict,conf,mergeQA=False):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
        mergedQA: if True, outputs the merged QA after the execution of pipeline. Perhaps, this 
            should always be True, but leaving as option, until configuration and IO settles.
    """

    qlog=qllogger.QLLogger("QuickLook",20)
    log=qlog.getlog()
    hb=QLHB.QLHeartbeat(log,conf["Period"],conf["Timeout"])

    inp=convdict["rawimage"]
    paconf=conf["PipeLine"]
    qlog=qllogger.QLLogger("QuickLook",0)
    log=qlog.getlog()
    passqadict=None #- pass this dict to QAs downstream

    QAresults=[] #- merged QA list for the whole pipeline. This will be reorganized for databasing after the pipeline executes
    for s,step in enumerate(pl):
        log.info("Starting to run step {}".format(paconf[s]["StepName"]))
        pa=step[0]
        pargs=mapkeywords(step[0].config["kwargs"],convdict)
        try:
            hb.start("Running {}".format(step[0].name))
            oldinp=inp #-  copy for QAs that need to see earlier input
            inp=pa(inp,**pargs)
        except Exception as e:
            log.critical("Failed to run PA {} error was {}".format(step[0].name,e))
            sys.exit("Failed to run PA {}".format(step[0].name))
        qaresult={}
        for qa in step[1]:
            try:
                qargs=mapkeywords(qa.config["kwargs"],convdict)
                hb.start("Running {}".format(qa.name))
                qargs["dict_countbins"]=passqadict #- pass this to all QA downstream

                if qa.name=="RESIDUAL" or qa.name=="Sky_Residual":
                    res=qa(inp[0],inp[1],**qargs)
                    
                else:
                    if isinstance(inp,tuple):
                        res=qa(inp[0],**qargs)
                    else:
                        res=qa(inp,**qargs)

                if qa.name=="COUNTBINS" or qa.name=="CountSpectralBins":         #TODO -must run this QA for now. change this later.
                    passqadict=res
                log.debug("{} {}".format(qa.name,inp))
                qaresult[qa.name]=res

            except Exception as e:
                log.warning("Failed to run QA {} error was {}".format(qa.name,e))
        if len(qaresult):
            if conf["DumpIntermediates"]:
                f = open(paconf[s]["OutputFile"],"w")
                f.write(yaml.dump(yamlify(qaresult)))
                hb.stop("Step {} finished. Output is in {} ".format(paconf[s]["StepName"],paconf[s]["OutputFile"]))
        else:
            hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
        QAresults.append([pa.name,qaresult])
    hb.stop("Pipeline processing finished. Serializing result")

    #- merge QAs for this pipeline execution
    if mergeQA is True:
        from desispec.quicklook.util import merge_QAs
        log.info("Merging all the QAs for this pipeline execution")
        merge_QAs(QAresults)

    if isinstance(inp,tuple):
       return inp[0]
    else:
       return inp
コード例 #14
0
ファイル: quicklook.py プロジェクト: michaelJwilson/LBGCMB
def testconfig(outfilename="qlconfig.yaml"):
    """
    Make a test Config file, should be provided by the QL framework
    Below the %% variables are replaced by actual object when the respective
    algorithm is executed.
    """
    qlog=qllogger.QLLogger("QuickLook",20)
    log=qlog.getlog()
    url=None #- QA output will be posted to QLF if set true

    conf={'BiasImage':os.environ['BIASIMAGE'],# path to bias image
          'DarkImage':os.environ['DARKIMAGE'],# path to dark image
          'DataType':'Exposure',# type of input ['Exposure','Arc','Dark']
          'DebugLevel':20, # debug level
          'Period':5.0, # Heartbeat Period (Secs)
          'Timeout': 120.0, # Heartbeat Timeout (Secs)
          'DumpIntermediates':False, # whether to dump output of each step
          'FiberFlatFile':os.environ['FIBERFLATFILE'], # path to fiber flat field file
          'FiberFlatImage':os.environ['FIBERFLATIMAGE'], # for psf calibration
          'ArcLampImage':os.environ['ARCLAMPIMAGE'], # for psf calibration
          'SkyFile':os.environ['SKYFILE'], # path to Sky file
          'FiberMap':os.environ['FIBERMAP'],# path to fiber map
          'RawImage':os.environ['PIXIMAGE'],#path to input image
          'PixelFlat':os.environ['PIXELFLAT'], #path to pixel flat image
          'PSFFile':os.environ['PSFFILE'],  # for boxcar this can be bootcalib psf or specter psf file
          #'PSFFile_sp':os.environ['PSFFILE_sp'], # .../desimodel/data/specpsf/psf-r.fits (for running 2d extraction)
          'basePath':os.environ['DESIMODEL'],
          'OutputFile':'lastframe_QL-r0-00000004.fits', # output file from last pipeline step. Need to output intermediate steps? Most likely after boxcar extraction?
          'PipeLine':[{'PA':{"ModuleName":"desispec.quicklook.procalgs",
                             "ClassName":"BiasSubtraction",
                             "Name":"Bias Subtraction",
                             "kwargs":{"BiasImage":"%%BiasImage"}
                             },
                       'QAs':[{"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"Get_RMS",
                               "Name":"Get RMS",
                               "kwargs":{},
                               },
                              {"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"Count_Pixels",
                               "Name":"Count Pixels",
                               "kwargs":{'Width':3.}
                               }
                              ],
                       "StepName":"Preprocessing-Bias Subtraction",
                       "OutputFile":"QA_biassubtraction.yaml"
                       },
                      {'PA':{"ModuleName":"desispec.quicklook.procalgs",
                             "ClassName":"DarkSubtraction",
                             "Name":"Dark Subtraction",
                             "kwargs":{"DarkImage":"%%DarkImage"}
                             },
                       'QAs':[{"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"Get_RMS",
                               "Name":"Get RMS",
                               "kwargs":{},
                               },
                              {"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"Count_Pixels",
                               "Name":"Count Pixels",
                               "kwargs":{'Width':3.},
                               }
                              ],
                       "StepName":"Preprocessing-Dark Subtraction",
                       "OutputFile":"QA_darksubtraction.yaml"
                       },
                      {'PA':{"ModuleName":"desispec.quicklook.procalgs",
                             "ClassName":"PixelFlattening",
                             "Name":"Pixel Flattening",
                             "kwargs":{"PixelFlat":"%%PixelFlat"}
                             },
                       'QAs':[{"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"Get_RMS",
                               "Name":"Get RMS",
                               "kwargs":{},
                               },
                              {"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"Count_Pixels",
                               "Name":"Count Pixels",
                               "kwargs":{'Width':3.},
                               }
                              ],
                       "StepName":"Preprocessing-Pixel Flattening",
                       "OutputFile":"QA_pixelflattening.yaml"
                       },
                      #{'PA':{"ModuleName":"desispec.quicklook.procalgs",
                      #       "ClassName":"BoxcarExtraction",
                      #       "Name":"Boxcar Extraction",
                      #       "kwargs":{"PSFFile":"%%PSFFile",
                      #                 "BoxWidth":2.5,
                      #                 "DeltaW":0.5,
                      #                 "Nspec":500
                      #                 }
                      #       },
                      # 'QAs':[],
                      # "StepName":"Boxcar Extration",
                      # "OutputFile":"QA_boxcarextraction.yaml"
                      # },
                      {'PA':{"ModuleName":"desispec.quicklook.procalgs",
                             "ClassName":"Extraction_2d",
                             "Name":"2D Extraction",
                             "kwargs":{"PSFFile_sp":"/home/govinda/Desi/desimodel/data/specpsf/psf-r.fits",
                                       "Nspec":10,
                                       "Wavelength": "5630,7740,0.5",
                                       "FiberMap":"%%FiberMap" #need this for qa_skysub downstream as well.
                                       }
                             },
                       'QAs':[{"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"CountSpectralBins",
                               "Name":"Count Bins above n",
                               "kwargs":{'thresh':100,
                                         'camera':"r0",
                                         'expid':"%08d"%2,
                                         'url':url
                                        }
                               }
                             ],
                       "StepName":"2D Extraction",
                       "OutputFile":"qa-extract-r0-00000002.yaml"
                       },
                      {'PA':{"ModuleName":"desispec.quicklook.procalgs",
                             "ClassName": "ApplyFiberFlat",
                             "Name": "Apply Fiberflat",
                             "kwargs":{"FiberFlatFile":"%%FiberFlatFile"
                                      }
                             },
                       'QAs':[],
                       "StepName":"Apply Fiberflat",
                       "Outputfile":"apply_fiberflat_QA.yaml"
                      },
                      {'PA':{"ModuleName":"desispec.quicklook.procalgs",
                             "ClassName":"SubtractSky",
                             "Name": "Sky Subtraction",
                             "kwargs":{"SkyFile":"%%SkyFile"
                                      }
                             },
                       'QAs':[{"ModuleName":"desispec.qa.qa_quicklook",
                               "ClassName":"Calculate_SNR",
                               "Name":"Calculate Signal-to-Noise ratio",
                               "kwargs":{'SkyFile':"%%SkyFile",
                                         'camera':"r0",
                                         'expid':"%08d"%2,
                                         'url':url
                                        }
                               }
                             ],
                       "StepName": "Sky Subtraction",
                       "OutputFile":"qa-r0-00000002.yaml"
                      }
                      ]
          }

    if "yaml" in outfilename:
        f=open(outfilename,"w")
        yaml.dump(conf,f)
        f.close()
    else:
        log.warning("Only yaml defined. Use yaml format in the output config file")
        sys.exit(0)
コード例 #15
0
ファイル: quicklook.py プロジェクト: CManser/desispec
def runpipeline(pl, convdict, conf):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
    """

    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    hb = QLHB.QLHeartbeat(log, conf["Period"], conf["Timeout"])

    inp = convdict["rawimage"]
    singqa = conf["singleqa"]
    paconf = conf["PipeLine"]
    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    passqadict = None  #- pass this dict to QAs downstream
    schemaMerger = QL_QAMerger(conf['Night'], conf['Expid'], conf['Flavor'],
                               conf['Camera'], conf['Program'], convdict)
    QAresults = []
    if singqa is None:
        for s, step in enumerate(pl):
            log.info("Starting to run step {}".format(paconf[s]["StepName"]))
            pa = step[0]
            pargs = mapkeywords(step[0].config["kwargs"], convdict)
            schemaStep = schemaMerger.addPipelineStep(paconf[s]["StepName"])
            try:
                hb.start("Running {}".format(step[0].name))
                oldinp = inp  #-  copy for QAs that need to see earlier input
                inp = pa(inp, **pargs)
                if step[0].name == 'Initialize':
                    schemaStep.addMetrics(inp[1])
            except Exception as e:
                log.critical("Failed to run PA {} error was {}".format(
                    step[0].name, e),
                             exc_info=True)
                sys.exit("Failed to run PA {}".format(step[0].name))
            qaresult = {}
            for qa in step[1]:
                try:
                    qargs = mapkeywords(qa.config["kwargs"], convdict)
                    hb.start("Running {}".format(qa.name))
                    qargs[
                        "dict_countbins"] = passqadict  #- pass this to all QA downstream

                    if qa.name == "RESIDUAL" or qa.name == "Sky_Residual":
                        res = qa(inp[0], inp[1], **qargs)
                    else:
                        if isinstance(inp, tuple):
                            res = qa(inp[0], **qargs)
                        else:
                            res = qa(inp, **qargs)

                    if qa.name == "COUNTBINS" or qa.name == "CountSpectralBins":
                        passqadict = res
                    if "qafile" in qargs:
                        qawriter.write_qa_ql(qargs["qafile"], res)
                    log.debug("{} {}".format(qa.name, inp))
                    qaresult[qa.name] = res
                    schemaStep.addParams(res['PARAMS'])
                    schemaStep.addMetrics(res['METRICS'])
                except Exception as e:
                    log.warning("Failed to run QA {}. Got Exception {}".format(
                        qa.name, e),
                                exc_info=True)
            hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
            QAresults.append([pa.name, qaresult])
        hb.stop("Pipeline processing finished. Serializing result")
    else:
        import numpy as np
        qa = None
        qas = [[],
               [
                   'Bias_From_Overscan', 'Get_RMS', 'Count_Pixels',
                   'Calc_XWSigma'
               ], 'Trace_Shifts', 'CountSpectralBins',
               ['Sky_Continuum', 'Sky_Peaks'], ['Calculate_SNR'],
               ['Sky_Rband', 'Integrate_Spec']]

        singleqaperpa = [
            'Bias_From_Overscan', 'Check_HDUs', 'Trace_Shifts',
            'CountSpectralBins'
        ]
        for palg in range(len(qas)):
            if singqa in qas[palg]:
                pa = pl[palg][0]
                pac = paconf[palg]
                if singqa in singleqaperpa:
                    qa = pl[palg][1][0]
                else:
                    for qalg in range(len(qas[palg])):
                        if qas[palg][qalg] == singqa:
                            qa = pl[palg][1][qalg]
        if qa is None:
            log.critical("Unknown input QA... Valid QAs are: {}".format(qas))
            sys.exit()

        log.info("Starting to run step {}".format(pac["StepName"]))
        pargs = mapkeywords(pa.config["kwargs"], convdict)
        schemaStep = schemaMerger.addPipelineStep(pac["StepName"])
        qaresult = {}
        try:
            qargs = mapkeywords(qa.config["kwargs"], convdict)
            hb.start("Running {}".format(qa.name))
            if singqa == "Sky_Residual":
                res = qa(inp[0], inp[1], **qargs)
            else:
                if isinstance(inp, tuple):
                    res = qa(inp[0], **qargs)
                else:
                    res = qa(inp, **qargs)
            if singqa == "CountSpectralBins":
                passqadict = res
            if "qafile" in qargs:
                qawriter.write_qa_ql(qargs["qafile"], res)
            log.debug("{} {}".format(qa.name, inp))
            schemaStep.addMetrics(res['METRICS'])
        except Exception as e:
            log.warning("Failed to run QA {}. Got Exception {}".format(
                qa.name, e),
                        exc_info=True)
        if len(qaresult):
            if conf["DumpIntermediates"]:
                f = open(pac["OutputFile"], "w")
                f.write(yaml.dump(yamlify(qaresult)))
                log.info("{} finished".format(qa.name))

    #- merge QAs for this pipeline execution
    #- RS: don't write merged file if running single QA
    if singqa is None:
        log.debug("Dumping mergedQAs")
        from desispec.io import findfile
        specprod_dir = os.environ[
            'QL_SPEC_REDUX'] if 'QL_SPEC_REDUX' in os.environ else ""
        destFile = findfile('ql_mergedQA_file',
                            night=conf['Night'],
                            expid=conf['Expid'],
                            camera=conf['Camera'],
                            specprod_dir=specprod_dir)

        schemaMerger.writeTojsonFile(destFile)
        log.info("Wrote merged QA file {}".format(destFile))
        if isinstance(inp, tuple):
            return inp[0]
        else:
            return inp
コード例 #16
0
import tempfile
import numpy as np
import os
from desispec.qa import qalib
from desispec.qa import qa_quicklook as QA
from pkg_resources import resource_filename
import desispec.sky
from desispec.preproc import _parse_sec_keyword
from specter.psf import load_psf
import astropy.io.fits as fits
from desispec.quicklook import qllogger
import desispec.io
import desispec.image
from desitarget.targetmask import desi_mask

qlog=qllogger.QLLogger("QuickLook",0)
log=qlog.getlog()

def xy2hdr(xyslice):
    '''
    convert 2D slice into IRAF style [a:b,c:d] hdr value
    
    e.g. xyslice2hdr(np.s_[0:10, 5:20]) -> '[6:20,1:10]'
    '''
    yy, xx = xyslice
    value = '[{}:{},{}:{}]'.format(xx.start+1, xx.stop, yy.start+1, yy.stop)
    return value

#- 2D gaussian function to model sky peaks
def gaussian2D(x,y,amp,xmu,ymu,xsigma,ysigma):
    x,y = np.meshgrid(x,y)
コード例 #17
0
def ql_main(args=None):

    qlog = qllogger.QLLogger("QuickLook", 20)
    log = qlog.getlog()

    if args is None:
        args = parse()

    if args.dotest is not None:
        quicklook.testconfig(args.dotest)

    if args.config is not None:
        if os.path.exists(args.config):
            if "yaml" in args.config:
                configdict = yaml.load(open(args.config, 'rb'))
        else:
            log.critical("Can't open config file %s" % (args.config))
            sys.exit("Can't open config file")
    else:
        log.warning(
            "No config file given. Trying to create config from other options")
        PAs = qlconfig.Palist(args.flavor)

        config = qlconfig.Make_Config(args.night,
                                      args.flavor,
                                      args.expid,
                                      args.camera,
                                      PAs,
                                      psfboot=args.psfboot,
                                      rawdata_dir=args.rawdata_dir,
                                      specprod_dir=args.specprod_dir,
                                      fiberflat=args.fiberflat,
                                      qlf=args.qlf)
        configdict = qlconfig.build_config(config)

        #- save this config to a file
        if args.save:
            if "yaml" in args.save:
                yaml.dump(configdict, open(args.save, "wb"))
                log.info("Output saved for this configuration to %s " %
                         args.save)
            else:
                log.info(
                    "Can save config to only yaml output. Put a yaml in the argument"
                )

    pipeline, convdict = quicklook.setup_pipeline(configdict)
    res = quicklook.runpipeline(pipeline, convdict, configdict)
    inpname = configdict["RawImage"]
    camera = configdict["Camera"]
    chan, spectrograph, expid = quicklook.get_chan_spec_exp(
        inpname, camera=camera)  #- may be other ways to get it as well

    if isinstance(res, image.Image):
        if configdict["OutputFile"]: finalname = configdict["OutputFile"]
        else: finalname = "image-%s%d-%08d.fits" % (chan, spectrograph, expid)
        imIO.write_image(finalname, res, meta=None)
    elif isinstance(res, frame.Frame):
        if configdict["OutputFile"]: finalname = configdict["OutputFile"]
        else: finalname = "frame-%s%d-%08d.fits" % (chan, spectrograph, expid)
        frIO.write_frame(finalname, res, header=None)
    else:
        log.error(
            "Result of pipeline is in unkown type %s. Don't know how to write"
            % (type(res)))
        sys.exit("Unknown pipeline result type %s." % (type(res)))
    log.info("Pipeline completed. Final result is in %s" % finalname)
コード例 #18
0
ファイル: quicklook.py プロジェクト: secroun/desispec
def runpipeline(pl, convdict, conf):
    """runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
    """

    qlog = qllogger.QLLogger("QuickLook", 20)
    log = qlog.getlog()
    hb = QLHB.QLHeartbeat(log, conf["Period"], conf["Timeout"])

    inp = convdict["rawimage"]
    paconf = conf["PipeLine"]
    qlog = qllogger.QLLogger("QuickLook", 0)
    log = qlog.getlog()
    passqadict = None  #- pass this dict to QAs downstream
    for s, step in enumerate(pl):
        log.info("Starting to run step %s" % (paconf[s]["StepName"]))
        pa = step[0]
        pargs = mapkeywords(step[0].config["kwargs"], convdict)
        try:
            hb.start("Running %s" % (step[0].name))
            oldinp = inp  #-  copy for QAs that need to see earlier input
            inp = pa(inp, **pargs)
        except Exception as e:
            log.critical("Failed to run PA %s error was %s" %
                         (step[0].name, e))
            sys.exit("Failed to run PA %s" % (step[0].name))
        qaresult = {}
        for qa in step[1]:
            try:
                qargs = mapkeywords(qa.config["kwargs"], convdict)
                hb.start("Running %s" % (qa.name))
                qargs[
                    "dict_countbins"] = passqadict  #- pass this to all QA downstream

                if qa.name == "RESIDUAL" or qa.name == "Sky_Residual":
                    res = qa(oldinp, inp[1], **qargs)

                else:
                    if isinstance(inp, tuple):
                        res = qa(inp[0], **qargs)
                    else:
                        res = qa(inp, **qargs)

                if qa.name == "COUNTBINS" or qa.name == "CountSpectralBins":  #TODO -must run this QA for now. change this later.
                    passqadict = res
                log.debug("%s %s" % (qa.name, inp))
                qaresult[qa.name] = res

            except Exception as e:
                log.warning("Failed to run QA %s error was %s" % (qa.name, e))
        if len(qaresult):
            #- TODO - This dump of QAs for each PA should be reorganised. Dumping everything now.
            yaml.dump(qaresult, open(paconf[s]["OutputFile"], "wb"))
            hb.stop("Step %s finished. Output is in %s " %
                    (paconf[s]["StepName"], paconf[s]["OutputFile"]))
        else:
            hb.stop("Step %s finished." % (paconf[s]["StepName"]))
    hb.stop("Pipeline processing finished. Serializing result")
    if isinstance(inp, tuple):
        return inp[0]
    else:
        return inp
コード例 #19
0
ファイル: quicklook.py プロジェクト: gnizq64/desispec
def ql_main(args=None):
    from desispec.quicklook import quicklook, qllogger, qlconfig
    import desispec.image as image
    import desispec.frame as frame
    import desispec.io.frame as frIO
    import desispec.io.image as imIO

    if args is None:
        args = parse()

    qlog = qllogger.QLLogger(name="QuickLook", loglevel=args.loglvl)
    log = qlog.getlog()
    # Sami
    # quiet down DESI logs. We don't want DESI_LOGGER to print messages unless they are important
    # initalize singleton with WARNING level
    quietDesiLogger(args.loglvl + 10)
    if args.config is not None:

        if args.rawdata_dir:
            rawdata_dir = args.rawdata_dir
        else:
            if 'QL_SPEC_DATA' not in os.environ:
                sys.exit(
                    "must set ${} environment variable or provide rawdata_dir".
                    format('QL_SPEC_DATA'))
            rawdata_dir = os.getenv('QL_SPEC_DATA')

        if args.specprod_dir:
            specprod_dir = args.specprod_dir
        else:
            if 'QL_SPEC_REDUX' not in os.environ:
                sys.exit(
                    "must set ${} environment variable or provide specprod_dir"
                    .format('QL_SPEC_REDUX'))
            specprod_dir = os.getenv('QL_SPEC_REDUX')

        log.debug("Running Quicklook using configuration file {}".format(
            args.config))
        if os.path.exists(args.config):
            if "yaml" in args.config:
                config = qlconfig.Config(args.config,
                                         args.night,
                                         args.camera,
                                         args.expid,
                                         args.singqa,
                                         rawdata_dir=rawdata_dir,
                                         specprod_dir=specprod_dir)
                configdict = config.expand_config()
            else:
                log.critical("Can't open config file {}".format(args.config))
                sys.exit("Can't open config file")
        else:
            sys.exit("File does not exist: {}".format(args.config))

    elif args.fullconfig is not None:  #- This is mostly for development/debugging purpose
        log.debug("Running Quicklook using full configuration file {}".format(
            args.fullconfig))
        if os.path.exists(args.fullconfig):
            if "yaml" in args.fullconfig:
                configdict = yaml.load(open(args.fullconfig, "r"))
            else:
                log.critical("Can't open config file {}".format(args.config))
                sys.exit("Can't open config file")
        else:
            sys.exit("File does not exist: {}".format(args.config))
    else:
        sys.exit(
            "Must provide a valid config file. See desispec/data/quicklook for an example"
        )

    #- save the expanded config to a file
    if args.save:
        if "yaml" in args.save:
            f = open(args.save, "w")
            yaml.dump(configdict, f)

            log.info("Output saved for this configuration to {}".format(
                args.save))
            f.close()
        else:
            log.warning(
                "Can save config to only yaml output. Put a yaml in the argument"
            )

    pipeline, convdict = quicklook.setup_pipeline(configdict)
    res = quicklook.runpipeline(pipeline,
                                convdict,
                                configdict,
                                mergeQA=args.mergeQA)
    inpname = configdict["RawImage"]
    night = configdict["Night"]
    camera = configdict["Camera"]
    expid = configdict["Expid"]

    if configdict["OutputFile"] is None:
        log.warning(
            "Output filename is None and has a object of {}. SKIPPING FINAL OUTPUT"
            .format(type(res)))
        return
    if isinstance(res, image.Image):
        if configdict["OutputFile"]:
            finalname = configdict["OutputFile"]
        else:
            finalname = "image-{}-{:08d}.fits".format(camera, expid)
            log.critical(
                "No final outputname given. Writing to a image file {}".format(
                    finalname))
        imIO.write_image(finalname, res, meta=None)
    elif isinstance(res, frame.Frame):
        if configdict["OutputFile"]:
            finalname = configdict["OutputFile"]
        else:
            finalname = "frame-{}-{:08d}.fits".format(camera, expid)
            log.critical(
                "No final outputname given. Writing to a frame file {}".format(
                    finalname))
        frIO.write_frame(finalname, res, header=None)
    elif configdict["Flavor"] == 'arcs':
        if configdict["OutputFile"]:
            finalname = configdict["OutputFile"]
        else:
            finalname = "psfnight-{}.fits".format(camera)
    elif configdict["Flavor"] == 'flat':
        if configdict["OutputFile"]:
            finalname = configdict["OutputFile"]
        else:
            finalname = "fiberflat-{}-{:08d}.fits".format(camera, expid)
    else:
        if args.singqa:
            sys.exit()
        else:
            log.error(
                "Result of pipeline is an unknown type {}. Don't know how to write"
                .format(type(res)))
            sys.exit("Unknown pipeline result type {}.".format(type(res)))
    log.info("Pipeline completed. Final result is in {}".format(finalname))
コード例 #20
0
ファイル: quicklook.py プロジェクト: sdss/lvmspec
def ql_main(args=None):

    from desispec.util import set_backend
    _matplotlib_backend = None
    set_backend()
    from desispec.quicklook import quicklook, qllogger, qlconfig

    if args is None:
        args = parse()

    qlog = qllogger.QLLogger(name="QuickLook", loglevel=args.loglvl)
    log = qlog.getlog()

    # quiet down DESI logs. We don't want DESI_LOGGER to print messages unless they are important
    # initalize singleton with WARNING level
    quietDesiLogger(args.loglvl + 10)

    if args.config is not None:
        #RS: have command line arguments for finding files via old datamodel
        psfid = None
        if args.psfid:
            psfid = args.psfid
        flatid = None
        if args.flatid:
            flatid = args.flatid
        templateid = None
        if args.templateid:
            templateid = args.templateid
        templatenight = None
        if args.templatenight:
            templatenight = args.templatenight

        if args.rawdata_dir:
            rawdata_dir = args.rawdata_dir
        else:
            if 'QL_SPEC_DATA' not in os.environ:
                sys.exit(
                    "must set ${} environment variable or provide rawdata_dir".
                    format('QL_SPEC_DATA'))
            rawdata_dir = os.getenv('QL_SPEC_DATA')

        if args.specprod_dir:
            specprod_dir = args.specprod_dir
        else:
            if 'QL_SPEC_REDUX' not in os.environ:
                sys.exit(
                    "must set ${} environment variable or provide specprod_dir"
                    .format('QL_SPEC_REDUX'))
            specprod_dir = os.getenv('QL_SPEC_REDUX')

        log.debug("Running Quicklook using configuration file {}".format(
            args.config))
        if os.path.exists(args.config):
            if "yaml" in args.config:
                config = qlconfig.Config(args.config,
                                         args.night,
                                         args.camera,
                                         args.expid,
                                         args.singqa,
                                         rawdata_dir=rawdata_dir,
                                         specprod_dir=specprod_dir,
                                         psfid=psfid,
                                         flatid=flatid,
                                         templateid=templateid,
                                         templatenight=templatenight,
                                         qlplots=args.qlplots,
                                         store_res=args.resolution)
                configdict = config.expand_config()
            else:
                log.critical("Can't open config file {}".format(args.config))
                sys.exit("Can't open config file")
        else:
            sys.exit("File does not exist: {}".format(args.config))
    else:
        sys.exit(
            "Must provide a valid config file. See desispec/data/quicklook for an example"
        )

    pipeline, convdict = quicklook.setup_pipeline(configdict)
    res = quicklook.runpipeline(pipeline, convdict, configdict)
    log.info("QuickLook Pipeline completed")