Exemplo n.º 1
0
def write_qa_prod(outroot, qaprod, indent=True):
    """Write QA for a given production

    Args:
        outroot : str
          filename without format extension
        qa_prod : QA_Prod object

    Returns:
        outfile: str
          output filename
    """
    from desiutil.io import combine_dicts
    log = get_logger()
    outfile = outroot + '.json'
    outfile = makepath(outfile, 'qa')

    # Loop on exposures
    odict = {}
    for qaexp in qaprod.qa_exps:
        # Get the exposure dict
        idict = write_qa_exposure('foo', qaexp, ret_dict=True)
        odict = combine_dicts(odict, idict)
    ydict = yamlify(odict)  # This works well for JSON too
    # Simple json
    with open(outfile, 'wt') as fh:
        json.dump(ydict, fh, indent=indent)
    log.info('Wrote QA_Prod file: {:s}'.format(outfile))

    return outfile
Exemplo n.º 2
0
def write_qa_exposure(outroot, qaexp, ret_dict=False):
    """Write QA for a given exposure

    Args:
        outroot : str
          filename without format extension
        qa_exp : QA_Exposure object
        ret_dict : bool, optional
          Return dict only?  [for qa_prod, mainly]
    Returns:
        outfile or odict : str or dict
    """
    # Generate the dict
    odict = {qaexp.night: {qaexp.expid: {}}}
    odict[qaexp.night][qaexp.expid]['flavor'] = qaexp.flavor
    cameras = qaexp.data['frames'].keys()
    for camera in cameras:
        odict[qaexp.night][qaexp.expid][camera] = qaexp.data['frames'][camera]
    # Return dict only?
    if ret_dict:
        return odict
    # Simple yaml
    ydict = yamlify(odict)
    outfile = outroot+'.yaml'
    outfile = makepath(outfile, 'qa')
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )

    return outfile
Exemplo n.º 3
0
def write_qa_frame(outfile, qaframe, verbose=False):
    """Write QA for a given frame

    Args:
        outfile : str
          filename
        qa_exp : QA_Frame object, with the following attributes
            qa_data: dict of QA info
    """
    log = get_logger()
    outfile = makepath(outfile, 'qa')

    # Generate the dict
    odict = {
        qaframe.night: {
            qaframe.expid: {
                qaframe.camera: {},
                'flavor': qaframe.flavor
            }
        }
    }
    odict[qaframe.night][qaframe.expid][qaframe.camera] = qaframe.qa_data
    ydict = yamlify(odict)
    # Simple yaml
    with open(outfile, 'w') as yamlf:
        yamlf.write(yaml.dump(ydict))
    if verbose:
        log.info("Wrote QA frame file: {:s}".format(outfile))

    return outfile
Exemplo n.º 4
0
def write_qa_ql(outfile, qaresult):
    """Write QL output files

       Args:
           outfile : str
             filename to be written (yaml)
           qaresult : dict
             QAresults from run_qa()

       Returns:
           outfile : str
    """
    #import yaml
    #from desiutil.io import yamlify
    # Take in QL input and output to yaml
    qadict = yamlify(qaresult)
    f = open(outfile, "w")
    f.write(yaml.dump(qadict))
    f.close()

    g = open(outfile.split('.yaml')[0] + '.json', "w")
    json.dump(qadict, g, sort_keys=True, indent=4)
    g.close()

    return outfile
Exemplo n.º 5
0
def write_qa_frame(outfile, qaframe):
    """Write QA for a given frame

    Args:
        outfile : str
          filename
        qa_exp : QA_Frame object, with the following attributes
            qa_data: dict of QA info
    """
    outfile = makepath(outfile, 'qa')

    # Generate the dict
    odict = {
        qaframe.night: {
            qaframe.expid: {
                qaframe.camera: {},
                'flavor': qaframe.flavor
            }
        }
    }
    odict[qaframe.night][qaframe.expid][qaframe.camera] = qaframe.qa_data
    ydict = yamlify(odict)
    # Simple yaml
    with open(outfile, 'w') as yamlf:
        yamlf.write(yaml.dump(ydict))  #, default_flow_style=True) )

    return outfile
Exemplo n.º 6
0
def write_qa_prod(outroot, qaprod):
    """Write QA for a given production

    Args:
        outroot : str
          filename without format extension
        qa_prod : QA_Prod object

    Returns:
        outfile or odict : str or dict
    """
    from desiutil.io import combine_dicts
    outfile = outroot+'.yaml'
    outfile = makepath(outfile, 'qa')

    # Loop on exposures
    odict = {}
    for qaexp in qaprod.qa_exps:
        # Get the exposure dict
        idict = write_qa_exposure('foo', qaexp, ret_dict=True)
        odict = combine_dicts(odict, idict)
    ydict = yamlify(odict)
    # Simple yaml
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )
    log.info('Wrote QA_Prod file: {:s}'.format(outfile))

    return outfile
Exemplo n.º 7
0
def write_qa_multiexp(outroot, qa_mexp, indent=True, skip_rebuild=False):
    """Write QA for a given production

    Args:
        outroot : str
          filename without format extension
        qa_prod : QA_Prod object
        skip_rebuild : bool, optional
          Do not rebuild the data dict

    Returns:
        outfile: str
          output filename
    """
    log = get_logger()
    outfile = outroot + '.json'
    outfile = makepath(outfile, 'qa')

    if not skip_rebuild:
        qa_mexp.build_data()
    ydict = yamlify(qa_mexp.data)  # This works well for JSON too
    # Simple json
    with open(outfile, 'wt') as fh:
        json.dump(ydict, fh, indent=indent)
    log.info('Wrote QA_Prod file: {:s}'.format(outfile))

    return outfile
Exemplo n.º 8
0
def write_qa_exposure(outroot, qaexp, ret_dict=False):
    """Write QA for a given exposure

    Args:
        outroot : str
          filename without format extension
        qa_exp : QA_Exposure object
        ret_dict : bool, optional
          Return dict only?  [for qa_prod, mainly]
    Returns:
        outfile or odict : str or dict
    """
    # Generate the dict
    odict = {qaexp.night: {qaexp.expid: {}}}
    odict[qaexp.night][qaexp.expid]['flavor'] = qaexp.flavor
    odict[qaexp.night][qaexp.expid]['meta'] = qaexp.meta
    cameras = list(qaexp.data['frames'].keys())
    for camera in cameras:
        odict[qaexp.night][qaexp.expid][camera] = qaexp.data['frames'][camera]
    # Return dict only?
    if ret_dict:
        return odict
    # Simple yaml
    ydict = yamlify(odict)
    outfile = outroot+'.yaml'
    outfile = makepath(outfile, 'qa')
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )

    return outfile
Exemplo n.º 9
0
def write_qa_ql(outfile, qaresult):
    """Write QL output files

       Args:
           outfile : str
             filename to be written (yaml)
           qaresult : dict
             QAresults from run_qa()

       Returns:
           outfile : str
    """
    #import yaml
    #from desiutil.io import yamlify
    # Take in QL input and output to yaml
    #SE:  No yaml creation as of May 2018
    qadict = yamlify(qaresult)
    #f=open(outfile,"w")
    #f.write(yaml.dump(qadict))
    #f.close()
    
    g=open(outfile,"w")
    json.dump(qadict, g, sort_keys=True, indent=4)
    g.close()    
    
    return outfile
Exemplo n.º 10
0
def write_qa_ql(outfile, qaresult):
    """Write QL output files

       Args:
           outfile : str
             filename to be written (yaml)
           qaresult : dict
             QAresults from run_qa()

       Returns:
           outfile : str
    """
    #import yaml
    #from desiutil.io import yamlify
    # Take in QL input and output to yaml
    #SE:  No yaml creation as of May 2018
    qadict = yamlify(qaresult)
    #f=open(outfile,"w")
    #f.write(yaml.dump(qadict))
    #f.close()

    g = open(outfile, "w")
    json.dump(qadict, g, sort_keys=True, indent=4)
    g.close()

    return outfile
Exemplo n.º 11
0
    def writeTojsonFile(self, fileName):
        g = open(fileName, 'w')

        myDict = yamlify(self.__schema)
        #reOrderDict(myDict)

        # remove lists ... after this step there is no list of dictionaries
        EditDic(myDict)

        # this step modifies Takse, renames them, and re-arrange Metrics and corresponding Paramas
        myDict = taskMaker(myDict)

        json.dump(myDict, g, sort_keys=True, indent=4)
        g.close()
Exemplo n.º 12
0
 def writeTojsonFile(self,fileName):
     g=open(fileName,'w')
     
     
     myDict = yamlify(self.__schema)
     #reOrderDict(myDict)
     
     # remove lists ... after this step there is no list of dictionaries
     EditDic(myDict)
     
     # this step modifies Takse, renames them, and re-arrange Metrics and corresponding Paramas
     myDict = taskMaker(myDict)  
     
     json.dump(myDict, g, sort_keys=True, indent=4)
     g.close()   
Exemplo n.º 13
0
def write_qa_brick(outfile, qabrick):
    """Write QA for a given exposure

    Args:
        outfile : filename
        qabrick : QA_Brick object
            _data: dict of QA info
    """
    outfile = makepath(outfile, 'qa')

    # Simple yaml
    ydict = yamlify(qabrick.data)
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )

    return outfile
Exemplo n.º 14
0
def write_qa_brick(outfile, qabrick):
    """Write QA for a given exposure

    Args:
        outfile : filename
        qabrick : QA_Brick object
            _data: dict of QA info
    """
    outfile = makepath(outfile, 'qa')

    # Simple yaml
    ydict = yamlify(qabrick.data)
    with open(outfile, 'w') as yamlf:
        yamlf.write(yaml.dump(ydict))#, default_flow_style=True) )

    return outfile
Exemplo n.º 15
0
def merge_QAs(qaresult):
    """
    Per QL pipeline level merging of QA results
    qaresult: list of [pa,qa]; where pa is pa name; qa is the list of qa results. 
    This list is created inside the QL pipeline.
    
    """
    mergedQA = {}

    for s, result in enumerate(qaresult):
        pa = result[0].upper()
        night = result[1].values()[0]['NIGHT']
        expid = int(result[1].values()[0]['EXPID'])
        camera = result[1].values()[0]['CAMERA']
        flavor = result[1].values()[0]['FLAVOR']

        if night not in mergedQA:
            mergedQA[night] = {}  #- top level key
        if expid not in mergedQA[night]:
            mergedQA[night][expid] = {}
        if camera not in mergedQA[night][expid]:
            mergedQA[night][expid][camera] = {}
        if 'flavor' not in mergedQA[night][expid]:
            mergedQA[night][expid]['flavor'] = flavor
        mergedQA[night][expid][camera][pa] = {}
        mergedQA[night][expid][camera][pa]['PARAMS'] = {}
        mergedQA[night][expid][camera][pa]['METRICS'] = {}

        #- now merge PARAM and QA metrics for all QAs
        for qa in result[1]:
            if 'PARAMS' in result[1][qa]:
                mergedQA[night][expid][camera][pa]['PARAMS'].update(
                    result[1][qa]['PARAMS'])
            if 'METRICS' in result[1][qa]:
                mergedQA[night][expid][camera][pa]['METRICS'].update(
                    result[1][qa]['METRICS'])
    from desiutil.io import yamlify
    qadict = yamlify(mergedQA)
    f = open('mergedQA-{}-{:08d}.yaml'.format(camera, expid),
             'w')  #- IO/file naming should move from here.
    f.write(yaml.dump(qadict))
    f.close()
    return
Exemplo n.º 16
0
def write_qa_ql(outfile, qaresult):
    """Write QL output files

       Args:
           outfile : str
             filename to be written (yaml)
           qaresult : dict
             QAresults from run_qa()

       Returns:
           outfile : str
    """
    import yaml
    from desiutil.io import yamlify
    # Take in QL input and output to yaml
    qadict = yamlify(qaresult)
    f = open(outfile, "w")
    f.write(yaml.dump(qadict))
    return outfile
Exemplo n.º 17
0
def write_qa_frame(outfile, qaframe):
    """Write QA for a given frame

    Args:
        outfile : str
          filename
        qa_exp : QA_Frame object, with the following attributes
            qa_data: dict of QA info
    """
    outfile = makepath(outfile, 'qa')

    # Generate the dict
    odict = {qaframe.night: {qaframe.expid: {qaframe.camera: {}, 'flavor': qaframe.flavor}}}
    odict[qaframe.night][qaframe.expid][qaframe.camera] = qaframe.qa_data
    ydict = yamlify(odict)
    # Simple yaml
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )

    return outfile
Exemplo n.º 18
0
def write_qa_multiexp(outroot, mdict, indent=True):
    """Write QA for a given production

    Args:
        outroot : str
          filename without format extension
        mdict : dict

    Returns:
        outfile: str
          output filename
    """
    log=get_logger()
    outfile = outroot+'.json'
    outfile = makepath(outfile, 'qa')

    ydict = yamlify(mdict)  # This works well for JSON too
    # Simple json
    with open(outfile, 'wt') as fh:
        json.dump(ydict, fh, indent=indent)
    log.info('Wrote QA Multi-Exposure file: {:s}'.format(outfile))

    return outfile
Exemplo n.º 19
0
def write_qa_multiexp(outroot, mdict, indent=True):
    """Write QA for a given production

    Args:
        outroot : str
          filename without format extension
        mdict : dict

    Returns:
        outfile: str
          output filename
    """
    log = get_logger()
    outfile = outroot + '.json'
    outfile = makepath(outfile, 'qa')

    ydict = yamlify(mdict)  # This works well for JSON too
    # Simple json
    with open(outfile, 'wt') as fh:
        json.dump(ydict, fh, indent=indent)
    log.info('Wrote QA Multi-Exposure file: {:s}'.format(outfile))

    return outfile
Exemplo n.º 20
0
 def getYaml(self):
     yres=yamlify(self.__schema)
     reOrderDict(yres)
     return yaml.dump(yres)
Exemplo n.º 21
0
 def writeTojsonFile(self,fileName):
     g=open(fileName.split('.yaml')[0]+'.json',"w")
     myDict = yamlify(self.__schema)
     reOrderDict(myDict)
     json.dump(myDict, g, sort_keys=True, indent=4)
     g.close()   
Exemplo n.º 22
0
 def getJson(self):
     import json
     return json.dumps(yamlify(self.__schema))
Exemplo n.º 23
0
def main(args):
    import os.path
    import sys
    import yaml
    import numpy as np
    import pdb
    import matplotlib
    matplotlib.use('agg')

    import desispec.io
    from desiutil.log import get_logger

    from desisim.spec_qa import redshifts as dsqa_z
    from desiutil.io import yamlify
    import desiutil.depend

    log = get_logger()

    # Initialize
    if args.qaprod_dir is not None:
        qaprod_dir = args.qaprod_dir
    else:
        qaprod_dir = desispec.io.meta.qaprod_root()


    if args.load_simz_table is not None:
        from astropy.table import Table
        log.info("Loading simz info from {:s}".format(args.load_simz_table))
        simz_tab = Table.read(args.load_simz_table)
    else:
        # Grab list of fibermap files
        fibermap_files = []
        zbest_files = []
        nights = desispec.io.get_nights()
        for night in nights:
            for exposure in desispec.io.get_exposures(night, raw=True, rawdata_dir=args.rawdir):
                # Ignore exposures with no fibermap, assuming they are calibration data.
                fibermap_path = desispec.io.findfile(filetype='fibermap', night=night,
                                                     expid=exposure, rawdata_dir=args.rawdir)
                if not os.path.exists(fibermap_path):
                    log.debug('Skipping exposure %08d with no fibermap.' % exposure)
                    continue
                # Load data
                fibermap_data = desispec.io.read_fibermap(fibermap_path)
                # Skip calib
                objtype = fibermap_data['OBJTYPE'][0]
                if objtype in ['FLAT', 'FLT', 'ARC', 'BIAS', 'BIA']:
                    continue
                elif objtype in ['SKY','TGT','BAD']:
                    pass
                else:
                    err_message = 'Unrecognized OBJTYPE {}'.format(objtype)
                    log.critical(err_message)
                    raise ValueError(err_message)

                # Append fibermap file
                fibermap_files.append(fibermap_path)
                # Slurp the zbest_files
                zbest_files += dsqa_z.find_zbest_files(fibermap_data)

        # Cut down zbest_files to unique ones
        zbest_files = list(set(zbest_files))

        if len(zbest_files) == 0:
            log.fatal('No zbest files found')
            sys.exit(1)

        # Write? Table
        simz_tab, zbtab = dsqa_z.load_z(fibermap_files, zbest_files=zbest_files)
        dsqa_z.match_truth_z(simz_tab, zbtab)
        if args.write_simz_table is not None:
            simz_tab.write(args.write_simz_table, overwrite=True)

    # Meta data
    meta = dict(
        DESISIM = desiutil.depend.getdep(simz_tab.meta, 'desisim'),
        SPECPROD = os.getenv('SPECPROD', 'unknown'),
        PIXPROD = os.getenv('PIXPROD', 'unknown'),
        )
    # Include specter version if it was used to generate input files
    # (it isn't used for specsim inputs so that dependency may not exist)
    try:
        meta['SPECTER'] = desiutil.depend.getdep(simz_tab.meta, 'specter')
    except KeyError:
        pass
    
    # Run stats
    log.info("Running stats..")
    summ_dict = dsqa_z.summ_stats(simz_tab)
    if args.yaml_file is not None:
        log.info("Generating yaml file of stats: {:s}".format(args.yaml_file))
        # yamlify
        # Write yaml
        desispec.io.util.makepath(args.yaml_file)
        with open(args.yaml_file, 'w') as outfile:
            outfile.write(yaml.dump(yamlify(meta), default_flow_style=False))
            outfile.write(yaml.dump(yamlify(summ_dict), default_flow_style=False))

    log.info("Generating QA files")
    # Summary for dz of all types
    outfile = qaprod_dir+'/QA_dzsumm.png'
    desispec.io.util.makepath(outfile)
    dsqa_z.dz_summ(simz_tab, outfile=outfile)
    # Summary of individual types
    #outfile = args.qafig_root+'_summ_fig.png'
    #dsqa_z.summ_fig(simz_tab, summ_dict, meta, outfile=outfile)
    for objtype in ['BGS', 'MWS', 'ELG','LRG', 'QSO_T', 'QSO_L']:
        outfile = qaprod_dir+'/QA_zfind_{:s}.png'.format(objtype)
        desispec.io.util.makepath(outfile)
        dsqa_z.obj_fig(simz_tab, objtype, summ_dict, outfile=outfile)
Exemplo n.º 24
0
def runpipeline(pl,convdict,conf):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
    """

    qlog=qllogger.QLLogger()
    log=qlog.getlog()
    hb=QLHB.QLHeartbeat(log,conf["Period"],conf["Timeout"])

    inp=convdict["rawimage"]
    singqa=conf["singleqa"]
    paconf=conf["PipeLine"]
    qlog=qllogger.QLLogger()
    log=qlog.getlog()
    passqadict=None #- pass this dict to QAs downstream
    schemaMerger=QL_QAMerger(conf['Night'],conf['Expid'],conf['Flavor'],conf['Camera'],conf['Program'],convdict)
    QAresults=[] 
    if singqa is None:
        for s,step in enumerate(pl):
            log.info("Starting to run step {}".format(paconf[s]["StepName"]))
            pa=step[0]
            pargs=mapkeywords(step[0].config["kwargs"],convdict)
            schemaStep=schemaMerger.addPipelineStep(paconf[s]["StepName"])
            try:
                hb.start("Running {}".format(step[0].name))
                oldinp=inp #-  copy for QAs that need to see earlier input
                inp=pa(inp,**pargs)
                if step[0].name == 'Initialize':
                    schemaStep.addMetrics(inp[1])
            except Exception as e:
                log.critical("Failed to run PA {} error was {}".format(step[0].name,e),exc_info=True)
                sys.exit("Failed to run PA {}".format(step[0].name))
            qaresult={}
            for qa in step[1]:
                try:
                    qargs=mapkeywords(qa.config["kwargs"],convdict)
                    hb.start("Running {}".format(qa.name))
                    qargs["dict_countbins"]=passqadict #- pass this to all QA downstream
    
                    if qa.name=="RESIDUAL" or qa.name=="Sky_Residual":
                        res=qa(inp[0],inp[1],**qargs)
                    else:
                        if isinstance(inp,tuple):
                            res=qa(inp[0],**qargs)
                        else:
                            res=qa(inp,**qargs)
    
                    if qa.name=="COUNTBINS" or qa.name=="CountSpectralBins":         
                        passqadict=res
                    if "qafile" in qargs:
                        qawriter.write_qa_ql(qargs["qafile"],res)
                    log.debug("{} {}".format(qa.name,inp))
                    qaresult[qa.name]=res
                    schemaStep.addParams(res['PARAMS'])
                    schemaStep.addMetrics(res['METRICS'])
                except Exception as e:
                    log.warning("Failed to run QA {}. Got Exception {}".format(qa.name,e),exc_info=True)
            hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
            QAresults.append([pa.name,qaresult])
        hb.stop("Pipeline processing finished. Serializing result")
    else:
        import numpy as np
        qa=None
        qas=[[],['Bias_From_Overscan','Get_RMS','Count_Pixels','Calc_XWSigma'],'Trace_Shifts','CountSpectralBins',['Sky_Continuum','Sky_Peaks'],['Calculate_SNR'],['Sky_Rband','Integrate_Spec']]

        singleqaperpa=['Bias_From_Overscan','Check_HDUs','Trace_Shifts','CountSpectralBins']
        for palg in range(len(qas)):
            if singqa in qas[palg]:
                pa=pl[palg][0]
                pac=paconf[palg]
                if singqa in singleqaperpa:
                    qa = pl[palg][1][0]
                else:
                    for qalg in range(len(qas[palg])):
                        if qas[palg][qalg] == singqa:
                            qa=pl[palg][1][qalg]
        if qa is None:
            log.critical("Unknown input QA... Valid QAs are: {}".format(qas))
            sys.exit()

        log.info("Starting to run step {}".format(pac["StepName"]))
        pargs=mapkeywords(pa.config["kwargs"],convdict)
        schemaStep=schemaMerger.addPipelineStep(pac["StepName"])
        qaresult={}
        try:
            qargs=mapkeywords(qa.config["kwargs"],convdict)
            hb.start("Running {}".format(qa.name))
            if singqa=="Sky_Residual":
                res=qa(inp[0],inp[1],**qargs)
            else:
                if isinstance(inp,tuple):
                    res=qa(inp[0],**qargs)
                else:
                    res=qa(inp,**qargs)
            if singqa=="CountSpectralBins":
                passqadict=res
            if "qafile" in qargs:
                qawriter.write_qa_ql(qargs["qafile"],res)
            log.debug("{} {}".format(qa.name,inp))
            schemaStep.addMetrics(res['METRICS'])
        except Exception as e:
            log.warning("Failed to run QA {}. Got Exception {}".format(qa.name,e),exc_info=True)
        if len(qaresult):
           if conf["DumpIntermediates"]:
                f = open(pac["OutputFile"],"w")
                f.write(yaml.dump(yamlify(qaresult)))
                log.info("{} finished".format(qa.name))

    #- merge QAs for this pipeline execution
    #- RS: don't write merged file if running single QA
    if singqa is None:
        log.debug("Dumping mergedQAs")
        from desispec.io import findfile
        specprod_dir=os.environ['QL_SPEC_REDUX'] if 'QL_SPEC_REDUX' in os.environ else ""
        destFile=findfile('ql_mergedQA_file',night=conf['Night'],
                          expid=conf['Expid'],
                          camera=conf['Camera'],
                          specprod_dir=specprod_dir)

        schemaMerger.writeTojsonFile(destFile)
        log.info("Wrote merged QA file {}".format(destFile))
        if isinstance(inp,tuple):
           return inp[0]
        else:
           return inp
Exemplo n.º 25
0
def runpipeline(pl, convdict, conf, mergeQA=False):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
        mergedQA: if True, outputs the merged QA after the execution of pipeline. Perhaps, this 
            should always be True, but leaving as option, until configuration and IO settles.
    """

    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    hb = QLHB.QLHeartbeat(log, conf["Period"], conf["Timeout"])

    inp = convdict["rawimage"]
    singqa = conf["singleqa"]
    paconf = conf["PipeLine"]
    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    passqadict = None  #- pass this dict to QAs downstream
    schemaMerger = QL_QAMerger(conf['Night'], conf['Expid'], conf['Flavor'],
                               conf['Camera'], conf['Program'])
    QAresults = [
    ]  #- merged QA list for the whole pipeline. This will be reorganized for databasing after the pipeline executes
    if singqa is None:
        for s, step in enumerate(pl):
            log.info("Starting to run step {}".format(paconf[s]["StepName"]))
            pa = step[0]
            pargs = mapkeywords(step[0].config["kwargs"], convdict)
            schemaStep = schemaMerger.addPipelineStep(paconf[s]["StepName"])
            try:
                hb.start("Running {}".format(step[0].name))
                oldinp = inp  #-  copy for QAs that need to see earlier input
                inp = pa(inp, **pargs)
            except Exception as e:
                log.critical("Failed to run PA {} error was {}".format(
                    step[0].name, e),
                             exc_info=True)
                sys.exit("Failed to run PA {}".format(step[0].name))
            qaresult = {}
            for qa in step[1]:
                try:
                    qargs = mapkeywords(qa.config["kwargs"], convdict)
                    hb.start("Running {}".format(qa.name))
                    qargs[
                        "dict_countbins"] = passqadict  #- pass this to all QA downstream

                    if qa.name == "RESIDUAL" or qa.name == "Sky_Residual":
                        res = qa(inp[0], inp[1], **qargs)
                    else:
                        if isinstance(inp, tuple):
                            res = qa(inp[0], **qargs)
                        else:
                            res = qa(inp, **qargs)

                    if qa.name == "COUNTBINS" or qa.name == "CountSpectralBins":  #TODO -must run this QA for now. change this later.
                        passqadict = res
                    if "qafile" in qargs:
                        qawriter.write_qa_ql(qargs["qafile"], res)
                    log.debug("{} {}".format(qa.name, inp))
                    qaresult[qa.name] = res
                    schemaStep.addParams(res['PARAMS'])
                    schemaStep.addMetrics(res['METRICS'])
                except Exception as e:
                    log.warning("Failed to run QA {}. Got Exception {}".format(
                        qa.name, e),
                                exc_info=True)
            if len(qaresult):
                if conf["DumpIntermediates"]:
                    f = open(paconf[s]["OutputFile"], "w")
                    f.write(yaml.dump(yamlify(qaresult)))
                    hb.stop("Step {} finished. Output is in {} ".format(
                        paconf[s]["StepName"], paconf[s]["OutputFile"]))
            else:
                hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
            QAresults.append([pa.name, qaresult])
        hb.stop("Pipeline processing finished. Serializing result")
    else:
        import numpy as np
        qa = None
        qas = [
            'Bias_From_Overscan', ['Get_RMS', 'Calc_XWSigma', 'Count_Pixels'],
            'CountSpectralBins', ['Sky_Continuum', 'Sky_Peaks'],
            ['Sky_Residual', 'Integrate_Spec', 'Calculate_SNR']
        ]
        for palg in range(len(qas)):
            if singqa in qas[palg]:
                pa = pl[palg][0]
                pac = paconf[palg]
                if singqa == 'Bias_From_Overscan' or singqa == 'CountSpectralBins':
                    qa = pl[palg][1][0]
                else:
                    for qalg in range(len(qas[palg])):
                        if qas[palg][qalg] == singqa:
                            qa = pl[palg][1][qalg]
        if qa is None:
            log.critical("Unknown input... Valid QAs are: {}".format(qas))
            sys.exit()

        log.info("Starting to run step {}".format(pac["StepName"]))
        pargs = mapkeywords(pa.config["kwargs"], convdict)
        schemaStep = schemaMerger.addPipelineStep(pac["StepName"])
        qaresult = {}
        try:
            qargs = mapkeywords(qa.config["kwargs"], convdict)
            hb.start("Running {}".format(qa.name))
            if singqa == "Sky_Residual":
                res = qa(inp[0], inp[1], **qargs)
            else:
                if isinstance(inp, tuple):
                    res = qa(inp[0], **qargs)
                else:
                    res = qa(inp, **qargs)
            if singqa == "CountSpectralBins":
                passqadict = res
            if "qafile" in qargs:
                qawriter.write_qa_ql(qargs["qafile"], res)
            log.debug("{} {}".format(qa.name, inp))
            schemaStep.addMetrics(res['METRICS'])
        except Exception as e:
            log.warning("Failed to run QA {}. Got Exception {}".format(
                qa.name, e),
                        exc_info=True)
        if len(qaresult):
            if conf["DumpIntermediates"]:
                f = open(pac["OutputFile"], "w")
                f.write(yaml.dump(yamlify(qaresult)))
                log.info("{} finished".format(qa.name))

    #- merge QAs for this pipeline execution
    if mergeQA is True:
        # from desispec.quicklook.util import merge_QAs
        # log.info("Merging all the QAs for this pipeline execution")
        # merge_QAs(QAresults,conf)
        log.debug("Dumping mergedQAs")
        from desispec.io import findfile
        ftype = 'ql_mergedQA_file'
        specprod_dir = os.environ[
            'QL_SPEC_REDUX'] if 'QL_SPEC_REDUX' in os.environ else ""
        if conf['Flavor'] == 'arcs':
            ftype = 'ql_mergedQAarc_file'
        destFile = findfile(ftype,
                            night=conf['Night'],
                            expid=conf['Expid'],
                            camera=conf['Camera'],
                            specprod_dir=specprod_dir)
        # this will overwrite the file. above function returns same name for different QL executions
        # results will be erased.
        schemaMerger.writeToFile(destFile)
        log.info("Wrote merged QA file {}".format(destFile))
        schemaMerger.writeTojsonFile(destFile)
        log.info("Wrote merged QA file {}".format(
            destFile.split('.yaml')[0] + '.json'))
    if isinstance(inp, tuple):
        return inp[0]
    else:
        return inp
Exemplo n.º 26
0
def main(args):
    import os.path
    import sys
    import yaml
    import numpy as np
    import pdb
    import matplotlib
    matplotlib.use('agg')

    import desispec.io
    from desiutil.log import get_logger

    from desisim.spec_qa import redshifts as dsqa_z
    from desiutil.io import yamlify
    import desiutil.depend
    from desimodel.footprint import radec2pix

    log = get_logger()

    if args.load_simz_table is not None:
        from astropy.table import Table
        log.info("Loading simz info from {:s}".format(args.load_simz_table))
        simz_tab = Table.read(args.load_simz_table)
    else:
        # Grab list of fibermap files
        fibermap_files = []
        zbest_files = []
        nights = desispec.io.get_nights()
        for night in nights:
            for exposure in desispec.io.get_exposures(night,
                                                      raw=True,
                                                      rawdata_dir=args.rawdir):
                # Ignore exposures with no fibermap, assuming they are calibration data.
                fibermap_path = desispec.io.findfile(filetype='fibermap',
                                                     night=night,
                                                     expid=exposure,
                                                     rawdata_dir=args.rawdir)
                if not os.path.exists(fibermap_path):
                    log.debug('Skipping exposure %08d with no fibermap.' %
                              exposure)
                    continue
                '''
                # Search for zbest files
                fibermap_data = desispec.io.read_fibermap(fibermap_path)
                flavor = fibermap_data.meta['FLAVOR']
                if flavor.lower() in ('arc', 'flat', 'bias'):
                    log.debug('Skipping calibration {} exposure {:08d}'.format(flavor, exposure))
                    continue

                brick_names = set(fibermap_data['BRICKNAME'])
                import pdb; pdb.set_trace()
                for brick in brick_names:
                    zbest_path=desispec.io.findfile('zbest', groupname=brick, specprod_dir=args.reduxdir)
                    if os.path.exists(zbest_path):
                        log.debug('Found {}'.format(os.path.basename(zbest_path)))
                        zbest_files.append(zbest_path)
                    else:
                        log.warn('Missing {}'.format(os.path.basename(zbest_path)))
                        #pdb.set_trace()
                '''
                # Load data
                fibermap_data = desispec.io.read_fibermap(fibermap_path)
                # Skip calib
                if fibermap_data['OBJTYPE'][0] in ['FLAT', 'ARC', 'BIAS']:
                    continue
                elif fibermap_data['OBJTYPE'][0] in ['SKY', 'STD', 'SCIENCE']:
                    pass
                else:
                    pdb.set_trace()
                # Append fibermap file
                fibermap_files.append(fibermap_path)
                # Search for zbest files with healpy
                ra_targ = fibermap_data['RA_TARGET'].data
                dec_targ = fibermap_data['DEC_TARGET'].data
                # Getting some NAN in RA/DEC
                good = np.isfinite(ra_targ) & np.isfinite(dec_targ)
                pixels = radec2pix(64, ra_targ[good], dec_targ[good])
                uni_pixels = np.unique(pixels)
                for uni_pix in uni_pixels:
                    zbest_files.append(
                        desispec.io.findfile('zbest',
                                             groupname=uni_pix,
                                             nside=64))

        # Cut down zbest_files to unique ones
        zbest_files = list(set(zbest_files))

        if len(zbest_files) == 0:
            log.fatal('No zbest files found')
            sys.exit(1)

        # Write? Table
        simz_tab = dsqa_z.load_z(fibermap_files, zbest_files)
        if args.write_simz_table is not None:
            simz_tab.write(args.write_simz_table, overwrite=True)

    # Meta data
    meta = dict(
        DESISIM=desiutil.depend.getdep(simz_tab.meta, 'desisim'),
        SPECTER=desiutil.depend.getdep(simz_tab.meta, 'specter'),
        SPECPROD=os.getenv('SPECPROD', 'unknown'),
        PIXPROD=os.getenv('PIXPROD', 'unknown'),
    )

    # Run stats
    log.info("Running stats..")
    summ_dict = dsqa_z.summ_stats(simz_tab)
    if args.qafile is not None:
        log.info("Generating yaml file: {:s}".format(args.qafile))
        # yamlify
        # Write yaml
        with open(args.qafile, 'w') as outfile:
            outfile.write(yaml.dump(yamlify(meta), default_flow_style=False))
            outfile.write(
                yaml.dump(yamlify(summ_dict), default_flow_style=False))

    if args.qafig_root is not None:
        log.info("Generating QA files")
        # Summary for dz of all types
        outfile = args.qafig_root + '_dzsumm.png'
        #dsqa_z.dz_summ(simz_tab, outfile=outfile)
        # Summary of individual types
        #outfile = args.qafig_root+'_summ_fig.png'
        #dsqa_z.summ_fig(simz_tab, summ_dict, meta, outfile=outfile)
        for objtype in ['BGS', 'MWS', 'ELG', 'LRG', 'QSO_T', 'QSO_L']:
            outfile = args.qafig_root + '_zfind_{:s}.png'.format(objtype)
            dsqa_z.obj_fig(simz_tab, objtype, summ_dict, outfile=outfile)
Exemplo n.º 27
0
def main(args):
    import os.path
    import sys
    import yaml
    import numpy as np
    import pdb
    import matplotlib
    matplotlib.use('agg')

    import desispec.io
    from desiutil.log import get_logger

    from desisim.spec_qa import redshifts as dsqa_z
    from desiutil.io import yamlify
    import desiutil.depend

    log = get_logger()

    # Initialize
    if args.qaprod_dir is not None:
        qaprod_dir = args.qaprod_dir
    else:
        qaprod_dir = desispec.io.meta.qaprod_root()

    if args.load_simz_table is not None:
        from astropy.table import Table
        log.info("Loading simz info from {:s}".format(args.load_simz_table))
        simz_tab = Table.read(args.load_simz_table)
    else:
        # Grab list of fibermap files
        fibermap_files = []
        zbest_files = []
        nights = desispec.io.get_nights()
        for night in nights:
            for exposure in desispec.io.get_exposures(night,
                                                      raw=True,
                                                      rawdata_dir=args.rawdir):
                # Ignore exposures with no fibermap, assuming they are calibration data.
                fibermap_path = desispec.io.findfile(filetype='fibermap',
                                                     night=night,
                                                     expid=exposure,
                                                     rawdata_dir=args.rawdir)
                if not os.path.exists(fibermap_path):
                    log.debug('Skipping exposure %08d with no fibermap.' %
                              exposure)
                    continue
                # Load data
                fibermap_data = desispec.io.read_fibermap(fibermap_path)
                # Skip calib
                objtype = fibermap_data['OBJTYPE'][0]
                if objtype in ['FLAT', 'FLT', 'ARC', 'BIAS', 'BIA']:
                    continue
                elif objtype in ['SKY', 'TGT', 'BAD']:
                    pass
                else:
                    err_message = 'Unrecognized OBJTYPE {}'.format(objtype)
                    log.critical(err_message)
                    raise ValueError(err_message)

                # Append fibermap file
                fibermap_files.append(fibermap_path)
                # Slurp the zbest_files
                zbest_files += dsqa_z.find_zbest_files(fibermap_data)

        # Cut down zbest_files to unique ones
        zbest_files = list(set(zbest_files))

        if len(zbest_files) == 0:
            log.fatal('No zbest files found')
            sys.exit(1)

        # Write? Table
        simz_tab, zbtab = dsqa_z.load_z(fibermap_files,
                                        zbest_files=zbest_files)
        dsqa_z.match_truth_z(simz_tab, zbtab)
        if args.write_simz_table is not None:
            simz_tab.write(args.write_simz_table, overwrite=True)

    # Meta data
    meta = dict(
        DESISIM=desiutil.depend.getdep(simz_tab.meta, 'desisim'),
        SPECPROD=os.getenv('SPECPROD', 'unknown'),
        PIXPROD=os.getenv('PIXPROD', 'unknown'),
    )
    # Include specter version if it was used to generate input files
    # (it isn't used for specsim inputs so that dependency may not exist)
    try:
        meta['SPECTER'] = desiutil.depend.getdep(simz_tab.meta, 'specter')
    except KeyError:
        pass

    # Run stats
    log.info("Running stats..")
    summ_dict = dsqa_z.summ_stats(simz_tab)
    if args.yaml_file is not None:
        log.info("Generating yaml file of stats: {:s}".format(args.yaml_file))
        # yamlify
        # Write yaml
        desispec.io.util.makepath(args.yaml_file)
        with open(args.yaml_file, 'w') as outfile:
            outfile.write(yaml.dump(yamlify(meta), default_flow_style=False))
            outfile.write(
                yaml.dump(yamlify(summ_dict), default_flow_style=False))

    log.info("Generating QA files")
    # Summary for dz of all types
    outfile = qaprod_dir + '/QA_dzsumm.png'
    desispec.io.util.makepath(outfile)
    dsqa_z.dz_summ(simz_tab, outfile=outfile)
    # Summary of individual types
    #outfile = args.qafig_root+'_summ_fig.png'
    #dsqa_z.summ_fig(simz_tab, summ_dict, meta, outfile=outfile)
    for objtype in ['BGS', 'MWS', 'ELG', 'LRG', 'QSO_T', 'QSO_L']:
        outfile = qaprod_dir + '/QA_zfind_{:s}.png'.format(objtype)
        desispec.io.util.makepath(outfile)
        dsqa_z.obj_fig(simz_tab, objtype, summ_dict, outfile=outfile)
Exemplo n.º 28
0
def runpipeline(pl, convdict, conf):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
    """

    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    hb = QLHB.QLHeartbeat(log, conf["Period"], conf["Timeout"])

    inp = convdict["rawimage"]
    singqa = conf["singleqa"]
    paconf = conf["PipeLine"]
    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    passqadict = None  #- pass this dict to QAs downstream
    schemaMerger = QL_QAMerger(conf['Night'], conf['Expid'], conf['Flavor'],
                               conf['Camera'], conf['Program'], convdict)
    QAresults = []
    if singqa is None:
        for s, step in enumerate(pl):
            log.info("Starting to run step {}".format(paconf[s]["StepName"]))
            pa = step[0]
            pargs = mapkeywords(step[0].config["kwargs"], convdict)
            schemaStep = schemaMerger.addPipelineStep(paconf[s]["StepName"])
            try:
                hb.start("Running {}".format(step[0].name))
                oldinp = inp  #-  copy for QAs that need to see earlier input
                inp = pa(inp, **pargs)
                if step[0].name == 'Initialize':
                    schemaStep.addMetrics(inp[1])
            except Exception as e:
                log.critical("Failed to run PA {} error was {}".format(
                    step[0].name, e),
                             exc_info=True)
                sys.exit("Failed to run PA {}".format(step[0].name))
            qaresult = {}
            for qa in step[1]:
                try:
                    qargs = mapkeywords(qa.config["kwargs"], convdict)
                    hb.start("Running {}".format(qa.name))
                    qargs[
                        "dict_countbins"] = passqadict  #- pass this to all QA downstream

                    if qa.name == "RESIDUAL" or qa.name == "Sky_Residual":
                        res = qa(inp[0], inp[1], **qargs)
                    else:
                        if isinstance(inp, tuple):
                            res = qa(inp[0], **qargs)
                        else:
                            res = qa(inp, **qargs)

                    if qa.name == "COUNTBINS" or qa.name == "CountSpectralBins":
                        passqadict = res
                    if "qafile" in qargs:
                        qawriter.write_qa_ql(qargs["qafile"], res)
                    log.debug("{} {}".format(qa.name, inp))
                    qaresult[qa.name] = res
                    schemaStep.addParams(res['PARAMS'])
                    schemaStep.addMetrics(res['METRICS'])
                except Exception as e:
                    log.warning("Failed to run QA {}. Got Exception {}".format(
                        qa.name, e),
                                exc_info=True)
            hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
            QAresults.append([pa.name, qaresult])
        hb.stop("Pipeline processing finished. Serializing result")
    else:
        import numpy as np
        qa = None
        qas = [[],
               [
                   'Bias_From_Overscan', 'Get_RMS', 'Count_Pixels',
                   'Calc_XWSigma'
               ], 'Trace_Shifts', 'CountSpectralBins',
               ['Sky_Continuum', 'Sky_Peaks'], ['Calculate_SNR'],
               ['Sky_Rband', 'Integrate_Spec']]

        singleqaperpa = [
            'Bias_From_Overscan', 'Check_HDUs', 'Trace_Shifts',
            'CountSpectralBins'
        ]
        for palg in range(len(qas)):
            if singqa in qas[palg]:
                pa = pl[palg][0]
                pac = paconf[palg]
                if singqa in singleqaperpa:
                    qa = pl[palg][1][0]
                else:
                    for qalg in range(len(qas[palg])):
                        if qas[palg][qalg] == singqa:
                            qa = pl[palg][1][qalg]
        if qa is None:
            log.critical("Unknown input QA... Valid QAs are: {}".format(qas))
            sys.exit()

        log.info("Starting to run step {}".format(pac["StepName"]))
        pargs = mapkeywords(pa.config["kwargs"], convdict)
        schemaStep = schemaMerger.addPipelineStep(pac["StepName"])
        qaresult = {}
        try:
            qargs = mapkeywords(qa.config["kwargs"], convdict)
            hb.start("Running {}".format(qa.name))
            if singqa == "Sky_Residual":
                res = qa(inp[0], inp[1], **qargs)
            else:
                if isinstance(inp, tuple):
                    res = qa(inp[0], **qargs)
                else:
                    res = qa(inp, **qargs)
            if singqa == "CountSpectralBins":
                passqadict = res
            if "qafile" in qargs:
                qawriter.write_qa_ql(qargs["qafile"], res)
            log.debug("{} {}".format(qa.name, inp))
            schemaStep.addMetrics(res['METRICS'])
        except Exception as e:
            log.warning("Failed to run QA {}. Got Exception {}".format(
                qa.name, e),
                        exc_info=True)
        if len(qaresult):
            if conf["DumpIntermediates"]:
                f = open(pac["OutputFile"], "w")
                f.write(yaml.dump(yamlify(qaresult)))
                log.info("{} finished".format(qa.name))

    #- merge QAs for this pipeline execution
    #- RS: don't write merged file if running single QA
    if singqa is None:
        log.debug("Dumping mergedQAs")
        from desispec.io import findfile
        specprod_dir = os.environ[
            'QL_SPEC_REDUX'] if 'QL_SPEC_REDUX' in os.environ else ""
        destFile = findfile('ql_mergedQA_file',
                            night=conf['Night'],
                            expid=conf['Expid'],
                            camera=conf['Camera'],
                            specprod_dir=specprod_dir)

        schemaMerger.writeTojsonFile(destFile)
        log.info("Wrote merged QA file {}".format(destFile))
        if isinstance(inp, tuple):
            return inp[0]
        else:
            return inp
Exemplo n.º 29
0
def runpipeline(pl,convdict,conf,mergeQA=False):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like desispec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
        mergedQA: if True, outputs the merged QA after the execution of pipeline. Perhaps, this 
            should always be True, but leaving as option, until configuration and IO settles.
    """

    qlog=qllogger.QLLogger("QuickLook",20)
    log=qlog.getlog()
    hb=QLHB.QLHeartbeat(log,conf["Period"],conf["Timeout"])

    inp=convdict["rawimage"]
    paconf=conf["PipeLine"]
    qlog=qllogger.QLLogger("QuickLook",0)
    log=qlog.getlog()
    passqadict=None #- pass this dict to QAs downstream

    QAresults=[] #- merged QA list for the whole pipeline. This will be reorganized for databasing after the pipeline executes
    for s,step in enumerate(pl):
        log.info("Starting to run step {}".format(paconf[s]["StepName"]))
        pa=step[0]
        pargs=mapkeywords(step[0].config["kwargs"],convdict)
        try:
            hb.start("Running {}".format(step[0].name))
            oldinp=inp #-  copy for QAs that need to see earlier input
            inp=pa(inp,**pargs)
        except Exception as e:
            log.critical("Failed to run PA {} error was {}".format(step[0].name,e))
            sys.exit("Failed to run PA {}".format(step[0].name))
        qaresult={}
        for qa in step[1]:
            try:
                qargs=mapkeywords(qa.config["kwargs"],convdict)
                hb.start("Running {}".format(qa.name))
                qargs["dict_countbins"]=passqadict #- pass this to all QA downstream

                if qa.name=="RESIDUAL" or qa.name=="Sky_Residual":
                    res=qa(inp[0],inp[1],**qargs)
                    
                else:
                    if isinstance(inp,tuple):
                        res=qa(inp[0],**qargs)
                    else:
                        res=qa(inp,**qargs)

                if qa.name=="COUNTBINS" or qa.name=="CountSpectralBins":         #TODO -must run this QA for now. change this later.
                    passqadict=res
                log.debug("{} {}".format(qa.name,inp))
                qaresult[qa.name]=res

            except Exception as e:
                log.warning("Failed to run QA {} error was {}".format(qa.name,e))
        if len(qaresult):
            if conf["DumpIntermediates"]:
                f = open(paconf[s]["OutputFile"],"w")
                f.write(yaml.dump(yamlify(qaresult)))
                hb.stop("Step {} finished. Output is in {} ".format(paconf[s]["StepName"],paconf[s]["OutputFile"]))
        else:
            hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
        QAresults.append([pa.name,qaresult])
    hb.stop("Pipeline processing finished. Serializing result")

    #- merge QAs for this pipeline execution
    if mergeQA is True:
        from desispec.quicklook.util import merge_QAs
        log.info("Merging all the QAs for this pipeline execution")
        merge_QAs(QAresults)

    if isinstance(inp,tuple):
       return inp[0]
    else:
       return inp