Exemple #1
0
def write_qa_prod(outroot, qaprod, indent=True):
    """Write QA for a given production

    Args:
        outroot : str
          filename without format extension
        qa_prod : QA_Prod object

    Returns:
        outfile: str
          output filename
    """
    from lvmutil.io import combine_dicts
    log=get_logger()
    outfile = outroot+'.json'
    outfile = makepath(outfile, 'qa')

    # Loop on exposures
    odict = {}
    for qaexp in qaprod.qa_exps:
        # Get the exposure dict
        idict = write_qa_exposure('foo', qaexp, ret_dict=True)
        odict = combine_dicts(odict, idict)
    ydict = yamlify(odict)  # This works well for JSON too
    # Simple json
    with open(outfile, 'wt') as fh:
        json.dump(ydict, fh, indent=indent)
    log.info('Wrote QA_Prod file: {:s}'.format(outfile))

    return outfile
Exemple #2
0
def write_qa_exposure(outroot, qaexp, ret_dict=False):
    """Write QA for a given exposure

    Args:
        outroot : str
          filename without format extension
        qa_exp : QA_Exposure object
        ret_dict : bool, optional
          Return dict only?  [for qa_prod, mainly]
    Returns:
        outfile or odict : str or dict
    """
    # Generate the dict
    odict = {qaexp.night: {qaexp.expid: {}}}
    odict[qaexp.night][qaexp.expid]['flavor'] = qaexp.flavor
    odict[qaexp.night][qaexp.expid]['meta'] = qaexp.meta
    cameras = list(qaexp.data['frames'].keys())
    for camera in cameras:
        odict[qaexp.night][qaexp.expid][camera] = qaexp.data['frames'][camera]
    # Return dict only?
    if ret_dict:
        return odict
    # Simple yaml
    ydict = yamlify(odict)
    outfile = outroot+'.yaml'
    outfile = makepath(outfile, 'qa')
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )

    return outfile
Exemple #3
0
def write_qa_brick(outfile, qabrick):
    """Write QA for a given exposure

    Args:
        outfile : filename
        qabrick : QA_Brick object
            _data: dict of QA info
    """
    outfile = makepath(outfile, 'qa')

    # Simple yaml
    ydict = yamlify(qabrick.data)
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )

    return outfile
Exemple #4
0
def write_qa_ql(outfile, qaresult):
    """Write QL output files

       Args:
           outfile : str
             filename to be written (yaml)
           qaresult : dict
             QAresults from run_qa()

       Returns:
           outfile : str
    """
    import yaml
    from lvmutil.io import yamlify
    # Take in QL input and output to yaml
    qadict = yamlify(qaresult)
    f=open(outfile,"w")
    f.write(yaml.dump(qadict))
    return outfile
Exemple #5
0
def write_qa_frame(outfile, qaframe, verbose=False):
    """Write QA for a given frame

    Args:
        outfile : str
          filename
        qa_exp : QA_Frame object, with the following attributes
            qa_data: dict of QA info
    """
    log=get_logger()
    outfile = makepath(outfile, 'qa')

    # Generate the dict
    odict = {qaframe.night: {qaframe.expid: {qaframe.camera: {}, 'flavor': qaframe.flavor}}}
    odict[qaframe.night][qaframe.expid][qaframe.camera] = qaframe.qa_data
    ydict = yamlify(odict)
    # Simple yaml
    with open(outfile, 'w') as yamlf:
        yamlf.write( yaml.dump(ydict))#, default_flow_style=True) )
    if verbose:
        log.info("Wrote QA frame file: {:s}".format(outfile))

    return outfile
Exemple #6
0
def main(args):
    import os.path
    import sys
    import yaml
    import numpy as np
    import pdb
    import matplotlib
    matplotlib.use('agg')

    import lvmspec.io
    from lvmutil.log import get_logger

    from lvmsim.spec_qa import redshifts as dsqa_z
    from lvmutil.io import yamlify
    import lvmutil.depend
    from lvmmodel.footprint import radec2pix

    log = get_logger()

    if args.load_simz_table is not None:
        from astropy.table import Table
        log.info("Loading simz info from {:s}".format(args.load_simz_table))
        simz_tab = Table.read(args.load_simz_table)
    else:
        # Grab list of fibermap files
        fibermap_files = []
        zbest_files = []
        nights = lvmspec.io.get_nights()
        for night in nights:
            for exposure in lvmspec.io.get_exposures(night,
                                                     raw=True,
                                                     rawdata_dir=args.rawdir):
                # Ignore exposures with no fibermap, assuming they are calibration data.
                fibermap_path = lvmspec.io.findfile(filetype='fibermap',
                                                    night=night,
                                                    expid=exposure,
                                                    rawdata_dir=args.rawdir)
                if not os.path.exists(fibermap_path):
                    log.debug('Skipping exposure %08d with no fibermap.' %
                              exposure)
                    continue
                '''
                # Search for zbest files
                fibermap_data = lvmspec.io.read_fibermap(fibermap_path)
                flavor = fibermap_data.meta['FLAVOR']
                if flavor.lower() in ('arc', 'flat', 'bias'):
                    log.debug('Skipping calibration {} exposure {:08d}'.format(flavor, exposure))
                    continue

                brick_names = set(fibermap_data['BRICKNAME'])
                import pdb; pdb.set_trace()
                for brick in brick_names:
                    zbest_path=lvmspec.io.findfile('zbest', groupname=brick, specprod_dir=args.reduxdir)
                    if os.path.exists(zbest_path):
                        log.debug('Found {}'.format(os.path.basename(zbest_path)))
                        zbest_files.append(zbest_path)
                    else:
                        log.warn('Missing {}'.format(os.path.basename(zbest_path)))
                        #pdb.set_trace()
                '''
                # Load data
                fibermap_data = lvmspec.io.read_fibermap(fibermap_path)
                # Skip calib
                if fibermap_data['OBJTYPE'][0] in ['FLAT', 'ARC', 'BIAS']:
                    continue
                elif fibermap_data['OBJTYPE'][0] in [
                        'SKY', 'STD', 'SCIENCE', 'BGS', 'MWS_STAR', 'ELG',
                        'LRG', 'QSO'
                ]:
                    pass
                else:
                    pdb.set_trace()
                # Append fibermap file
                fibermap_files.append(fibermap_path)
                # Search for zbest files with healpy
                ra_targ = fibermap_data['RA_TARGET'].data
                dec_targ = fibermap_data['DEC_TARGET'].data
                # Getting some NAN in RA/DEC
                good = np.isfinite(ra_targ) & np.isfinite(dec_targ)
                pixels = radec2pix(64, ra_targ[good], dec_targ[good])
                uni_pixels = np.unique(pixels)
                for uni_pix in uni_pixels:
                    zbest_files.append(
                        lvmspec.io.findfile('zbest',
                                            groupname=uni_pix,
                                            nside=64))

        # Cut down zbest_files to unique ones
        zbest_files = list(set(zbest_files))

        if len(zbest_files) == 0:
            log.fatal('No zbest files found')
            sys.exit(1)

        # Write? Table
        simz_tab = dsqa_z.load_z(fibermap_files, zbest_files)
        if args.write_simz_table is not None:
            simz_tab.write(args.write_simz_table, overwrite=True)

    # Meta data
    meta = dict(
        DESISIM=lvmutil.depend.getdep(simz_tab.meta, 'lvmsim'),
        SPECPROD=os.getenv('SPECPROD', 'unknown'),
        PIXPROD=os.getenv('PIXPROD', 'unknown'),
    )
    # Include specter version if it was used to generate input files
    # (it isn't used for specsim inputs so that dependency may not exist)
    try:
        meta['SPECTER'] = lvmutil.depend.getdep(simz_tab.meta, 'specter')
    except KeyError:
        pass

    # Run stats
    log.info("Running stats..")
    summ_dict = dsqa_z.summ_stats(simz_tab)
    if args.qafile is not None:
        log.info("Generating yaml file: {:s}".format(args.qafile))
        # yamlify
        # Write yaml
        with open(args.qafile, 'w') as outfile:
            outfile.write(yaml.dump(yamlify(meta), default_flow_style=False))
            outfile.write(
                yaml.dump(yamlify(summ_dict), default_flow_style=False))

    if args.qafig_root is not None:
        log.info("Generating QA files")
        # Summary for dz of all types
        outfile = args.qafig_root + '_dzsumm.png'
        #dsqa_z.dz_summ(simz_tab, outfile=outfile)
        # Summary of individual types
        #outfile = args.qafig_root+'_summ_fig.png'
        #dsqa_z.summ_fig(simz_tab, summ_dict, meta, outfile=outfile)
        for objtype in ['BGS', 'MWS', 'ELG', 'LRG', 'QSO_T', 'QSO_L']:
            outfile = args.qafig_root + '_zfind_{:s}.png'.format(objtype)
            dsqa_z.obj_fig(simz_tab, objtype, summ_dict, outfile=outfile)
Exemple #7
0
 def getJson(self):
     import json
     return json.dumps(yamlify(self.__schema))
Exemple #8
0
 def getYaml(self):
     yres = yamlify(self.__schema)
     return yaml.dump(yres)
Exemple #9
0
def runpipeline(pl, convdict, conf, mergeQA=False):
    """
    Runs the quicklook pipeline as configured

    Args:
        pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding
            qas for that pa
        convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file
            but convdict["IMAGE"] is like lvmspec.image.Image object and so on.
            details in setup_pipeline method below for examples.
        conf: a configured dictionary, read from the configuration yaml file.
            e.g: conf=configdict=yaml.load(open('configfile.yaml','rb'))
        mergedQA: if True, outputs the merged QA after the execution of pipeline. Perhaps, this
            should always be True, but leaving as option, until configuration and IO settles.
    """

    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    hb = QLHB.QLHeartbeat(log, conf["Period"], conf["Timeout"])

    inp = convdict["rawimage"]
    paconf = conf["PipeLine"]
    qlog = qllogger.QLLogger()
    log = qlog.getlog()
    passqadict = None  #- pass this dict to QAs downstream
    schemaMerger = QL_QAMerger(conf['Night'], conf['Expid'], conf['Flavor'],
                               conf['Camera'])
    QAresults = [
    ]  #- merged QA list for the whole pipeline. This will be reorganized for databasing after the pipeline executes
    for s, step in enumerate(pl):
        log.info("Starting to run step {}".format(paconf[s]["StepName"]))
        pa = step[0]
        pargs = mapkeywords(step[0].config["kwargs"], convdict)
        schemaStep = schemaMerger.addPipelineStep(paconf[s]["StepName"])
        try:
            hb.start("Running {}".format(step[0].name))
            oldinp = inp  #-  copy for QAs that need to see earlier input
            inp = pa(inp, **pargs)
        except Exception as e:
            log.critical("Failed to run PA {} error was {}".format(
                step[0].name, e),
                         exc_info=True)
            sys.exit("Failed to run PA {}".format(step[0].name))
        qaresult = {}
        for qa in step[1]:
            try:
                qargs = mapkeywords(qa.config["kwargs"], convdict)
                hb.start("Running {}".format(qa.name))
                qargs[
                    "dict_countbins"] = passqadict  #- pass this to all QA downstream

                if qa.name == "RESIDUAL" or qa.name == "Sky_Residual":
                    res = qa(inp[0], inp[1], **qargs)

                else:
                    if isinstance(inp, tuple):
                        res = qa(inp[0], **qargs)
                    else:
                        res = qa(inp, **qargs)

                if qa.name == "COUNTBINS" or qa.name == "CountSpectralBins":  #TODO -must run this QA for now. change this later.
                    passqadict = res
                if "qafile" in qargs:
                    qawriter.write_qa_ql(qargs["qafile"], res)
                log.debug("{} {}".format(qa.name, inp))
                qaresult[qa.name] = res
                schemaStep.addParams(res['PARAMS'])
                schemaStep.addMetrics(res['METRICS'])
            except Exception as e:
                log.warning("Failed to run QA {}. Got Exception {}".format(
                    qa.name, e),
                            exc_info=True)
        if len(qaresult):
            if conf["DumpIntermediates"]:
                f = open(paconf[s]["OutputFile"], "w")
                f.write(yaml.dump(yamlify(qaresult)))
                hb.stop("Step {} finished. Output is in {} ".format(
                    paconf[s]["StepName"], paconf[s]["OutputFile"]))
        else:
            hb.stop("Step {} finished.".format(paconf[s]["StepName"]))
        QAresults.append([pa.name, qaresult])
    hb.stop("Pipeline processing finished. Serializing result")

    #- merge QAs for this pipeline execution
    if mergeQA is True:
        # from lvmspec.quicklook.util import merge_QAs
        # log.info("Merging all the QAs for this pipeline execution")
        # merge_QAs(QAresults,conf)
        log.debug("Dumping mergedQAs")
        from lvmspec.io import findfile
        ftype = 'ql_mergedQA_file'
        specprod_dir = os.environ[
            'QL_SPEC_REDUX'] if 'QL_SPEC_REDUX' in os.environ else ""
        if conf['Flavor'] == 'arcs':
            ftype = 'ql_mergedQAarc_file'
        destFile = findfile(ftype,
                            night=conf['Night'],
                            expid=conf['Expid'],
                            camera=conf['Camera'],
                            specprod_dir=specprod_dir)
        # this will overwrite the file. above function returns same name for different QL executions
        # results will be erased.
        schemaMerger.writeToFile(destFile)
        log.info("Wrote merged QA file {}".format(destFile))
    if isinstance(inp, tuple):
        return inp[0]
    else:
        return inp