Example #1
0
def spline_fit(output_wave,input_wave,input_flux,required_resolution,input_ivar=None,order=3,max_resolution=None):
    """Performs spline fit of input_flux vs. input_wave and resamples at output_wave

    Args:
        output_wave : 1D array of output wavelength samples
        input_wave : 1D array of input wavelengths
        input_flux : 1D array of input flux density
        required_resolution (float) : resolution for spline knot placement (same unit as wavelength)

    Options:
        input_ivar : 1D array of weights for input_flux
        order (int) : spline order
        max_resolution (float) : if not None and first fit fails, try once this resolution

    Returns:
        output_flux : 1D array of flux sampled at output_wave
    """
    if input_ivar is not None :
        selection=np.where(input_ivar>0)[0]
        if selection.size < 2 :
            log=get_logger()
            log.error("cannot do spline fit because only {0:d} values with ivar>0".format(selection.size))
            raise ValueError
        w1=input_wave[selection[0]]
        w2=input_wave[selection[-1]]
    else :
        w1=input_wave[0]
        w2=input_wave[-1]

    res=required_resolution
    n=int((w2-w1)/res)
    res=(w2-w1)/(n+1)
    knots=w1+res*(0.5+np.arange(n))

    ## check that nodes are close to pixels
    dknots = abs(knots[:,None]-input_wave)
    mins = np.amin(dknots,axis=1)
    w=mins<res
    knots = knots[w]
    try :
        toto=scipy.interpolate.splrep(input_wave,input_flux,w=input_ivar,k=order,task=-1,t=knots)
        output_flux = scipy.interpolate.splev(output_wave,toto)
    except ValueError as err :
        log=get_logger()
        if max_resolution is not None  and required_resolution < max_resolution :
            log.warning("spline fit failed with resolution={}, retrying with {}".format(required_resolution,max_resolution))
            return spline_fit(output_wave,input_wave,input_flux,max_resolution,input_ivar=input_ivar,order=3,max_resolution=None)
        else :
            log.error("spline fit failed")
            raise ValueError
    return output_flux
Example #2
0
def load_qa_frame(filename, frame=None, flavor=None):
    """ Load an existing QA_Frame or generate one, as needed

    Args:
        filename: str
        frame: Frame object, optional
        flavor: str, optional
            Type of QA_Frame

    Returns:
        qa_frame: QA_Frame object
    """
    from lvmspec.qa.qa_frame import QA_Frame
    log=get_logger()
    if os.path.isfile(filename): # Read from file, if it exists
        qaframe = read_qa_frame(filename)
        log.info("Loaded QA file {:s}".format(filename))
        # Check against frame, if provided
        if frame is not None:
            for key in ['camera','expid','night','flavor']:
                assert getattr(qaframe, key) == frame.meta[key.upper()]
    else:  # Init
        if frame is None:
            log.error("QA file {:s} does not exist.  Expecting frame input".format(filename))
        qaframe = QA_Frame(frame)
    # Set flavor?
    if flavor is not None:
        qaframe.flavor = flavor
    # Return
    return qaframe
Example #3
0
def write_qa_prod(outroot, qaprod, indent=True):
    """Write QA for a given production

    Args:
        outroot : str
          filename without format extension
        qa_prod : QA_Prod object

    Returns:
        outfile: str
          output filename
    """
    from lvmutil.io import combine_dicts
    log=get_logger()
    outfile = outroot+'.json'
    outfile = makepath(outfile, 'qa')

    # Loop on exposures
    odict = {}
    for qaexp in qaprod.qa_exps:
        # Get the exposure dict
        idict = write_qa_exposure('foo', qaexp, ret_dict=True)
        odict = combine_dicts(odict, idict)
    ydict = yamlify(odict)  # This works well for JSON too
    # Simple json
    with open(outfile, 'wt') as fh:
        json.dump(ydict, fh, indent=indent)
    log.info('Wrote QA_Prod file: {:s}'.format(outfile))

    return outfile
Example #4
0
def main(args):

    log = get_logger()

    #- Generate obsconditions with args.program, then override as needed
    args.program = args.program.upper()
    if args.program in ['ARC', 'FLAT']:
        obsconditions = None
    else:
        obsconditions = lvmsim.simexp.reference_conditions[args.program]
        if args.airmass is not None:
            obsconditions['AIRMASS'] = args.airmass
        if args.seeing is not None:
            obsconditions['SEEING'] = args.seeing
        if args.exptime is not None:
            obsconditions['EXPTIME'] = args.exptime
        if args.moonfrac is not None:
            obsconditions['MOONFRAC'] = args.moonfrac
        if args.moonalt is not None:
            obsconditions['MOONALT'] = args.moonalt
        if args.moonsep is not None:
            obsconditions['MOONSEP'] = args.moonsep

    sim, fibermap, meta, obs = lvmsim.obs.new_exposure(
        args.program,
        nspec=args.nspec,
        night=args.night,
        expid=args.expid,
        tileid=args.tileid,
        nproc=args.nproc,
        seed=args.seed,
        obsconditions=obsconditions,
        outdir=args.outdir)
Example #5
0
def sim(night, nspec=25, clobber=False):
    """
    Simulate data as part of the integration test.

    Args:
        night (str): YEARMMDD
        nspec (int, optional): number of spectra to include
        clobber (bool, optional): rerun steps even if outputs already exist

    Raises:
        RuntimeError if any script fails
    """
    log = logging.get_logger()
    output_dir = os.path.join('$LVM_SPECTRO_REDUX', 'calib2d')

    # Create input fibermaps, spectra, and quickgen data

    for expid, program in zip([0, 1, 2], ['flat', 'arc', 'dark']):
        cmd = "newexp-random --program {program} --nspec {nspec} --night {night} --expid {expid}".format(
            expid=expid, program=program, nspec=nspec, night=night)

        simspec = lvmsim.io.findfile('simspec', night, expid)
        fibermap = '{}/fibermap-{:08d}.fits'.format(os.path.dirname(simspec),
                                                    expid)
        if runcmd(cmd, clobber=clobber) != 0:
            raise RuntimeError('newexp failed for {} exposure {}'.format(
                program, expid))

        cmd = "quickgen --simspec {} --fibermap {}".format(simspec, fibermap)
        if runcmd(cmd, clobber=clobber) != 0:
            raise RuntimeError('quickgen failed for {} exposure {}'.format(
                program, expid))

    return
Example #6
0
File: task.py Project: sdss/lvmspec
def default_options(extra={}):
    """
    Get the default options for all workers.

    Args:
        extra (dict): optional extra options to add to the
            default options for each worker class.

    Returns (dict):
        the default options dictionary, suitable for writing
        to the default options.yaml file.
    """

    log = get_logger()

    allopts = {}

    for step in step_types:
        defwork = default_workers[step]
        allopts["{}_worker".format(step)] = defwork
        if defwork in extra:
            allopts["{}_worker_opts".format(step)] = extra[defwork]
        else:
            allopts["{}_worker_opts".format(step)] = {}
        worker = get_worker(step, None, {})
        allopts[step] = worker.default_options()

    return allopts
Example #7
0
def main():
    """Entry point for :command:`desi_dts_delivery`.

    Returns
    -------
    :class:`int`
        An integer suitable for passing to :func:`sys.exit`.
    """
    from os import environ
    from os.path import dirname, join
    from subprocess import Popen
    from lvmutil.log import get_logger
    log = get_logger()
    options = parse_delivery()
    remote_command = ['desi_{0.nightStatus}_night {0.night}'.format(options)]
    if options.prefix is not None:
        remote_command = options.prefix + remote_command
    remote_command = ('(' +
                      '; '.join([c + ' &> /dev/null'
                                 for c in remote_command]) + ' &)')
    command = ['ssh', '-n', '-q', options.nersc_host, remote_command]
    log.info("Received file {0.filename} with exposure number {0.exposure:d}.".
             format(options))
    dst = join(environ['LVM_SPECTRO_DATA'], options.night)
    log.info("Using {0} as raw data directory.".format(dst))
    move_file(options.filename, dst)
    exposure_arrived = check_exposure(dst, options.exposure)
    if options.nightStatus in ('start', 'end') or exposure_arrived:
        log.info("Calling: {0}.".format(' '.join(command)))
        proc = Popen(command)
    return 0
Example #8
0
    def make_frameqa(self, make_plots=False, clobber=True):
        """ Work through the Production and make QA for all frames

        Parameters:
            make_plots: bool, optional
              Remake the plots too?
            clobber: bool, optional
        Returns:

        """
        # imports
        from lvmspec.io import meta
        from lvmspec.io.qa import load_qa_frame, write_qa_frame
        from lvmspec.io.fiberflat import read_fiberflat
        from lvmspec.io.sky import read_sky
        from lvmspec.io.fluxcalibration import read_flux_calibration
        from lvmspec.qa import qa_plots
        from lvmspec.qa.qa_frame import qaframe_from_frame
        from lvmspec.io.fluxcalibration import read_stdstar_models
        log = get_logger()

        # Loop on nights
        path_nights = glob.glob(self.specprod_dir+'/exposures/*')
        nights = [ipathn[ipathn.rfind('/')+1:] for ipathn in path_nights]
        for night in nights:
            for exposure in get_exposures(night, specprod_dir = self.specprod_dir):
                # Object only??
                frames_dict = get_files(filetype = str('frame'), night = night,
                        expid = exposure, specprod_dir = self.specprod_dir)
                for camera,frame_fil in frames_dict.items():
                    # Load frame
                    qaframe_from_frame(frame_fil, make_plots=make_plots)
                    '''
Example #9
0
File: task.py Project: sdss/lvmspec
    def run(self, grph, task, opts, comm=None):
        """
        Run the PSF combining.

        This is a serial call to libspecex to combine the PSF files.

        Args:
            grph (dict): pruned graph with this task and dependencies.
            task (str): the name of this task.
            opts (dict): options to use for this task.
            comm (mpi4py.MPI.Comm): optional MPI communicator.
        """
        if comm is not None:
            if comm.size > 1:
                raise RuntimeError(
                    "PSFCombine worker should only be called with one process")

        log = get_logger()

        node = grph[task]

        outfile = graph_path(task)
        infiles = []
        for input in node["in"]:
            infiles.append(graph_path(input))

        specex.mean_psf(infiles, outfile)

        return
Example #10
0
    def init_fluxcalib(self, re_init=False):
        """ Initialize parameters for FLUXCALIB QA
        Args:
            re_init: bool, (optional)
              Re-initialize  parameter dict

        Returns:

        """
        log = get_logger()
        assert self.flavor == 'science'

        # Standard FLUXCALIB input parameters
        flux_dict = dict(
            ZP_WAVE=0.,  # Wavelength for ZP evaluation (camera dependent)
            MAX_ZP_OFF=0.2,  # Max offset in ZP for individual star
        )

        if self.camera[0] == 'b':
            flux_dict['ZP_WAVE'] = 4800.  # Ang
        elif self.camera[0] == 'r':
            flux_dict['ZP_WAVE'] = 6500.  # Ang
        elif self.camera[0] == 'z':
            flux_dict['ZP_WAVE'] = 8250.  # Ang
        else:
            log.error("Not ready for camera {}!".format(self.camera))

        # Init
        self.init_qatype('FLUXCALIB', flux_dict, re_init=re_init)
Example #11
0
def search_for_framefile(frame_file):
    """ Search for an input frame_file in the lvmspec redux hierarchy
    Args:
        frame_file:  str

    Returns:
        mfile: str,  full path to frame_file if found else raise error

    """
    log = get_logger()
    # Parse frame file
    path, ifile = os.path.split(frame_file)
    splits = ifile.split('-')
    root = splits[0]
    camera = splits[1]
    fexposure = int(splits[2].split('.')[0])

    # Loop on nights
    nights = get_nights()
    for night in nights:
        for exposure in get_exposures(night):
            if exposure == fexposure:
                mfile = findfile(root,
                                 camera=camera,
                                 night=night,
                                 expid=exposure)
                if os.path.isfile(mfile):
                    return mfile
                else:
                    log.error("Expected file {:s} not found..".format(mfile))
Example #12
0
def move_file(filename, dst):
    """Move delivered file from the DTS spool to the final raw data area.

    This function will ensure that the destination directory exists.

    Parameters
    ----------
    filename : :class:`str`
        The name, including full path, of the file to move.
    dst : :class:`str`
        The destination *directory*.

    Returns
    -------
    :class:`str`
        The value returned by :func:`shutil.move`.
    """
    from os import mkdir
    from os.path import exists, isdir
    from shutil import move
    from lvmutil.log import get_logger
    log = get_logger()
    if not exists(dst):
        log.info("mkdir('{0}', 0o2770)".format(dst))
        mkdir(dst, 0o2770)
    log.info("move('{0}', '{1}')".format(filename, dst))
    return move(filename, dst)
Example #13
0
def update_logger(options):
    """Reconfigure the default logging object.

    Parameters
    ----------
    options : :class:`argparse.Namespace`
        The parsed command-line options.

    Returns
    -------
    :class:`logging.Logger`
        The updated object.
    """
    from os import environ
    from os.path import join
    from logging import FileHandler
    from lvmutil.log import get_logger
    log = get_logger()
    fmt = None
    try:
        filename = join(environ['LVM_SPECTRO_DATA'], options.night,
                        'desi_{0.stage}_night_{0.night}.log'.format(options))
    except (AttributeError, KeyError):
        # This can happen during tests.
        return log
    if log.hasHandlers():
        for h in log.handlers:
            if fmt is None:
                fmt = h.formatter
            log.removeHandler(h)
    h = FileHandler(filename)
    h.setFormatter(fmt)
    log.addHandler(h)
    return log
Example #14
0
def write_targets(filename,
                  data,
                  indir=None,
                  qso_selection=None,
                  sandboxcuts=False,
                  nside=None):
    """Write a target catalogue.

    Parameters
    ----------
    filename : output target selection file
    data     : numpy structured array of targets to save
    nside: :class:`int`
        If passed, add a column to the targets array popluated
        with HEALPix pixels at resolution nside
    """
    # FIXME: assert data and tsbits schema

    #ADM set up the default logger
    from lvmutil.log import get_logger
    log = get_logger()

    #ADM use RELEASE to determine the release string for the input targets
    if len(data) == 0:
        #ADM if there are no targets, then we don't know the Data Release
        drstring = 'unknowndr'
    else:
        drint = np.max(data['RELEASE'] // 1000)
        drstring = 'dr' + str(drint)

    #- Create header to include versions, etc.
    hdr = fitsio.FITSHDR()
    depend.setdep(hdr, 'lvmtarget', lvmtarget_version)
    depend.setdep(hdr, 'lvmtarget-git', gitversion())
    depend.setdep(hdr, 'sandboxcuts', sandboxcuts)
    depend.setdep(hdr, 'photcat', drstring)

    if indir is not None:
        depend.setdep(hdr, 'tractor-files', indir)

    if qso_selection is None:
        log.warning('qso_selection method not specified for output file')
        depend.setdep(hdr, 'qso-selection', 'unknown')
    else:
        depend.setdep(hdr, 'qso-selection', qso_selection)

    #ADM add HEALPix column, if requested by input
    if nside is not None:
        theta, phi = np.radians(90 - data["DEC"]), np.radians(data["RA"])
        hppix = hp.ang2pix(nside, theta, phi, nest=True)
        data = rfn.append_fields(data, 'HPXPIXEL', hppix, usemask=False)
        hdr['HPXNSIDE'] = nside
        hdr['HPXNEST'] = True

    #ADM add PHOTSYS column, mapped from RELEASE
    photsys = release_to_photsys(data["RELEASE"])
    data = rfn.append_fields(data, 'PHOTSYS', photsys, usemask=False)

    fitsio.write(filename, data, extname='TARGETS', header=hdr, clobber=True)
Example #15
0
def encode_mtl_targetid(targets):
    """
    Sets targetid used in MTL, which encode both the target class and
    arbitrary tracibility data propagated from individual input sources.

    Allows rows in final MTL (and hence fibre map) to be mapped to input
    sources.
    """
    #ADM set up the default logger
    from lvmutil.log import get_logger
    log = get_logger()

    encoded_targetid = targets['TARGETID'].copy()

    # Validate incoming target ids
    if not np.all(encoded_targetid <= ENCODE_MTL_USER_MASK):
        log.error('Invalid range of user-specfied targetid: cannot exceed {}'
                    .format(ENCODE_MTL_USER_MASK))

    desi_target = targets['LVM_TARGET'] != 0
    bgs_target  = targets['BGS_TARGET']  != 0
    mws_target  = targets['MWS_TARGET']  != 0

    # Assumes surveys are mutually exclusive.
    assert(np.max(np.sum([desi_target,bgs_target,mws_target],axis=0)) == 1)

    # Set the survey bits
    #encoded_targetid[desi_target] += TARGETID_SURVEY_INDEX['desi'] << SOURCE_END
    #encoded_targetid[bgs_target ] += TARGETID_SURVEY_INDEX['bgs']  << SOURCE_END
    #encoded_targetid[mws_target]  += TARGETID_SURVEY_INDEX['mws']  << SOURCE_END

    encoded_targetid[desi_target] += encode_survey_source(TARGETID_SURVEY_INDEX['desi'],0,0)
    encoded_targetid[bgs_target ] += encode_survey_source(TARGETID_SURVEY_INDEX['bgs'],0,0)
    encoded_targetid[mws_target]  += encode_survey_source(TARGETID_SURVEY_INDEX['mws'],0,0)

    # Set the source bits. Will be different for each survey.
    desi_sources = ['ELG','LRG','QSO']
    bgs_sources  = ['BGS_FAINT','BGS_BRIGHT']
    mws_sources  = ['MWS_MAIN','MWS_WD','MWS_NEARBY']

    for name in desi_sources:
        ii  = (targets['LVM_TARGET'] & desi_mask[name]) != 0
        assert(desi_mask[name] <= SOURCE_MAX)
        encoded_targetid[ii] += encode_survey_source(0,desi_mask[name],0)

    for name in bgs_sources:
        ii  = (targets['BGS_TARGET'] & bgs_mask[name]) != 0
        assert(bgs_mask[name] <= SOURCE_MAX)
        encoded_targetid[ii] += encode_survey_source(0,bgs_mask[name],0)

    for name in mws_sources:
        ii  = (targets['MWS_TARGET'] & mws_mask[name]) != 0
        assert(mws_mask[name] <= SOURCE_MAX)
        encoded_targetid[ii] += encode_survey_source(0,mws_mask[name],0)

    # FIXME (APC): expensive...
    assert(len(np.unique(encoded_targetid)) == len(encoded_targetid))
    return encoded_targetid
Example #16
0
File: task.py Project: sdss/lvmspec
    def run(self, grph, task, opts, comm=None):
        """
        Run Redrock on a spectral group.

        Args:
            grph (dict): pruned graph with this task and dependencies.
            task (str): the name of this task.
            opts (dict): options to use for this task.
            comm (mpi4py.MPI.Comm): optional MPI communicator.
        """
        nproc = 1
        rank = 0
        if comm is not None:
            nproc = comm.size
            rank = comm.rank

        log = get_logger()

        node = grph[task]

        spectra = "spectra-{}-{}".format(node["nside"], node["pixel"])
        specfile = graph_path(spectra)

        outfile = graph_path(task)
        outdir = os.path.dirname(outfile)
        details = os.path.join(outdir, "rrdetails_{}.h5".format(task))

        options = {}
        options["output"] = details
        options["zbest"] = outfile
        options.update(opts)
        optarray = option_list(options)
        optarray.append(specfile)

        # write out the equivalent commandline
        if rank == 0:
            com = ["RUN", "rrdesi_mpi"]
            com.extend(optarray)
            log.info(" ".join(com))

        failcount = 0
        try:
            rrdesi(options=optarray, comm=comm)
        except:
            failcount += 1
            sys.stdout.flush()

        if comm is not None:
            failcount = comm.allreduce(failcount)

        if failcount > 0:
            # all processes throw
            raise RuntimeError("some redshifts failed for task "
                               "{}".format(task))

        return
Example #17
0
File: task.py Project: sdss/lvmspec
    def run(self, grph, task, opts, comm=None):
        """
        Compute the sky model.

        Args:
            grph (dict): pruned graph with this task and dependencies.
            task (str): the name of this task.
            opts (dict): options to use for this task.
            comm (mpi4py.MPI.Comm): optional MPI communicator.
        """
        if comm is not None:
            if comm.size > 1:
                raise RuntimeError(
                    "Sky worker should only be called with one process")

        log = get_logger()

        node = grph[task]

        frm = []
        flat = []
        for input in node["in"]:
            inode = grph[input]
            if inode["type"] == "frame":
                frm.append(input)
            elif inode["type"] == "fiberflat":
                flat.append(input)
        if len(frm) != 1:
            raise RuntimeError("sky needs exactly one frame file")
        if len(flat) != 1:
            raise RuntimeError("sky needs exactly one fiberflat file")

        framefile = graph_path(frm[0])
        flatfile = graph_path(flat[0])
        outfile = graph_path(task)

        #qafile, qafig = qa_path(outfile)

        options = {}
        options["infile"] = framefile
        options["fiberflat"] = flatfile
        #options["qafile"] = qafile
        #options["qafig"] = qafig
        options["outfile"] = outfile
        options.update(opts)
        optarray = option_list(options)

        # at debug level, write out the equivalent commandline
        com = ["RUN", "desi_compute_sky"]
        com.extend(optarray)
        log.info(" ".join(com))

        args = skypkg.parse(optarray)
        skypkg.main(args)

        return
Example #18
0
def compatible(head1, head2):
    log = get_logger()
    for k in [
            "PSFTYPE", "NPIX_X", "NPIX_Y", "HSIZEX", "HSIZEY", "FIBERMIN",
            "FIBERMAX", "NPARAMS", "LEGDEG", "GHDEGX", "GHDEGY"
    ]:
        if (head1[k] != head2[k]):
            log.warning("different {} : {}, {}".format(k, head1[k], head2[k]))
            return False
    return True
Example #19
0
def get_source_types(fibermap):
    '''
    Return a list of specsim source types based upon fibermap['LVM_TARGET']

    Args:
        fibermap: fibermap Table including LVM_TARGET column

    Returns array of source_types 'sky', 'elg', 'lrg', 'qso', 'star'

    Unassigned fibers fibermap['TARGETID'] == -1 will be treated as 'sky'

    If fibermap.meta['FLAVOR'] = 'arc' or 'flat', returned source types will
    match that flavor, though specsim doesn't use those as source_types

    TODO: specsim/lvmmodel doesn't have a fiber input loss model for BGS yet,
    so BGS targets get source_type = 'lrg' (!)
    '''
    from lvmutil.log import get_logger
    log = get_logger('DEBUG')
    if 'LVM_TARGET' not in fibermap.dtype.names:
        log.warning(
            "LVM_TARGET not in fibermap table; using source_type='star' for everything"
        )
        return np.array([
            'star',
        ] * len(fibermap))

    source_type = np.zeros(len(fibermap), dtype='U4')
    assert np.all(source_type == '')

    tm = lvmtarget.desi_mask
    if 'TARGETID' in fibermap.dtype.names:
        unassigned = fibermap['TARGETID'] == -1
        source_type[unassigned] = 'sky'

    source_type[(fibermap['OBJTYPE'] == 'FLAT')] = 'FLAT'
    source_type[(fibermap['OBJTYPE'] == 'ARC')] = 'ARC'
    source_type[(fibermap['LVM_TARGET'] & tm.SKY) != 0] = 'sky'
    source_type[(fibermap['LVM_TARGET'] & tm.ELG) != 0] = 'elg'
    source_type[(fibermap['LVM_TARGET'] & tm.LRG) != 0] = 'lrg'
    source_type[(fibermap['LVM_TARGET'] & tm.QSO) != 0] = 'qso'
    source_type[(fibermap['LVM_TARGET'] & tm.BGS_ANY) != 0] = 'bgs'
    starmask = tm.STD_FSTAR | tm.STD_WD | tm.STD_BRIGHT | tm.MWS_ANY
    source_type[(fibermap['LVM_TARGET'] & starmask) != 0] = 'star'

    assert not np.any(source_type == '')

    for name in sorted(np.unique(source_type)):
        n = np.count_nonzero(source_type == name)
        log.debug('{} {} targets'.format(name, n))

    return source_type
Example #20
0
def list_tractorfiles(root):
    """Return a list of tractor files found under `root` directory.
    """
    from lvmutil.log import get_logger
    log = get_logger(timestamp=True)

    #ADM check for duplicate files in case the listing was run
    #ADM at too low a level in the directory structure
    check = [os.path.basename(x) for x in iter_tractorfiles(root)]
    if len(check) != len(set(check)):
        log.error("Duplicate Tractor files in root directory!")

    return [x for x in iter_tractorfiles(root)]
Example #21
0
File: task.py Project: sdss/lvmspec
 def run(self, grph, task, opts, comm=None):
     nproc = 1
     rank = 0
     if comm is not None:
         nproc = comm.size
         rank = comm.rank
     log = get_logger()
     node = grph[task]
     p = graph_path(task)
     log.info("NOOP Worker creating {}".format(p))
     with open(p, "w") as f:
         f.write("NOOP\n")
     return
Example #22
0
def main(args) :

    from lvmspec.io import meta
    from lvmspec.qa.qa_frame import qaframe_from_frame
    log=get_logger()

    log.info("starting")
    if args.reduxdir is None:
        specprod_dir = meta.specprod_root()
    else:
        specprod_dir = args.reduxdir

    # Generate qaframe (and figures?)
    _ = qaframe_from_frame(args.frame_file, specprod_dir=specprod_dir, make_plots=args.make_plots,
                           output_dir=args.output_dir)
Example #23
0
File: task.py Project: sdss/lvmspec
 def default_options(self):
     log = get_logger()
     opts = {}
     if self.starmodels is not None:
         opts["starmodels"] = self.starmodels
     else:
         if "LVM_ROOT" in os.environ:
             opts["starmodels"] = os.environ[
                 "LVM_ROOT"] + "/spectro/templates/star_templates/v2.1/star_templates_v2.1.fits"
         else:
             log.warning(
                 "$LVM_ROOT not set; using NERSC default /project/projectdirs/desi"
             )
             opts[
                 "starmodels"] = "/project/projectdirs/desi/spectro/templates/star_templates/v2.1/star_templates_v2.1.fits"
     return opts
Example #24
0
File: task.py Project: sdss/lvmspec
    def run(self, grph, task, opts, comm=None):
        """
        Compute the fiberflat.

        Args:
            grph (dict): pruned graph with this task and dependencies.
            task (str): the name of this task.
            opts (dict): options to use for this task.
            comm (mpi4py.MPI.Comm): optional MPI communicator.
        """
        if comm is not None:
            if comm.size > 1:
                raise RuntimeError(
                    "Fiberflat worker should only be called with one process")

        log = get_logger()

        node = grph[task]
        night, obj = graph_night_split(task)
        (temp, band, spec, expid) = graph_name_split(obj)
        cam = "{}{}".format(band, spec)

        if len(node["in"]) != 1:
            raise RuntimeError("fiberflat should have only one input frame")
        framefile = graph_path(node["in"][0])
        outfile = graph_path(task)

        #qafile, qafig = qa_path(outfile)

        options = {}
        options["infile"] = framefile
        #options["qafile"] = qafile
        #options["qafig"] = qafig
        options["outfile"] = outfile
        options.update(opts)
        optarray = option_list(options)

        # at debug level, write out the equivalent commandline
        com = ["RUN", "desi_compute_fiberflat"]
        com.extend(optarray)
        log.info(" ".join(com))

        args = fiberflat.parse(optarray)
        fiberflat.main(args)

        return
Example #25
0
def load_qa_brick(filename):
    """ Load an existing QA_Brick or generate one, as needed
    Args:
        filename: str

    Returns:
    qa_brick: QA_Brick object
    """
    from lvmspec.qa.qa_brick import QA_Brick
    log=get_logger()
    if os.path.isfile(filename): # Read from file, if it exists
        qabrick = read_qa_brick(filename)
        log.info("Loaded QA file {:s}".format(filename))
    else:  # Init
        qabrick = QA_Brick()
    # Return
    return qabrick
Example #26
0
def apply_fiberflat(frame, fiberflat):
    """Apply fiberflat to frame.  Modifies frame.flux and frame.ivar

    Args:
        frame : `lvmspec.Frame` object
        fiberflat : `lvmspec.FiberFlat` object

    The frame is divided by the fiberflat, except where the fiberflat=0.

    frame.mask gets bit specmask.BADFIBERFLAT set where
      * fiberflat.fiberflat == 0
      * fiberflat.ivar == 0
      * fiberflat.mask != 0
    """
    log=get_logger()
    log.info("starting")

    # check same wavelength, die if not the case
    if not np.allclose(frame.wave, fiberflat.wave):
        message = "frame and fiberflat do not have the same wavelength arrays"
        log.critical(message)
        raise ValueError(message)

    """
     F'=F/C
     Var(F') = Var(F)/C**2 + F**2*(  d(1/C)/dC )**2*Var(C)
             = 1/(ivar(F)*C**2) + F**2*(1/C**2)**2*Var(C)
             = 1/(ivar(F)*C**2) + F**2*Var(C)/C**4
             = 1/(ivar(F)*C**2) + F**2/(ivar(C)*C**4)
    """
    #- shorthand
    ff = fiberflat
    sp = frame  #- sp=spectra for this frame

    #- update sp.ivar first since it depends upon the original sp.flux
    sp.ivar=(sp.ivar>0)*(ff.ivar>0)*(ff.fiberflat>0)/( 1./((sp.ivar+(sp.ivar==0))*(ff.fiberflat**2+(ff.fiberflat==0))) + sp.flux**2/(ff.ivar*ff.fiberflat**4+(ff.ivar*ff.fiberflat==0)) )

    #- Then update sp.flux, taking care not to divide by 0
    ii = np.where(ff.fiberflat > 0)
    sp.flux[ii] = sp.flux[ii] / ff.fiberflat[ii]

    badff = (ff.fiberflat == 0.0) | (ff.ivar == 0) | (ff.mask != 0)
    sp.mask[badff] |= specmask.BADFIBERFLAT

    log.info("done")
Example #27
0
    def vet(self):
        """ Perform very basic checks on the frame
        Generally run before writing to disk (or when read)
        Args:
            index:

        Returns:
            diagnosis: int  (bitwise flag)
              0: Pass
              2**0: Improper meta data
              2**1: Improper data shapes

        """
        # Shapes
        log = get_logger()
        bad_shape = False
        if (self.nspec, self.nwave) != self.flux.shape:
            log.error(
                'Frame nspec {} nwave {} inconsistent with flux.shape {}'.
                format(self.nspec, self.nwave, self.flux.shape))
            bad_shape = True

        # Meta data
        bad_meta = False
        if self.meta is None:
            log.error('Frame.meta missing')
            bad_meta = True
        else:
            from lvmspec.io.params import read_params
            # Check flavor
            if 'FLAVOR' not in self.meta.keys():
                log.error('Frame.meta missing FLAVOR keyword')
                bad_meta = True
            else:
                desi_params = read_params()
                if self.meta['FLAVOR'] not in desi_params['frame_types']:
                    log.error("Frame.meta['FLAVOR'] = '{}' not in {}".format(
                        self.meta['FLAVOR'], desi_params['frame_types']))
                    bad_meta = True
        #if bad_meta:
        #    import pdb; pdb.set_trace()

        # Generate the flag
        diagnosis = 0 + 2**0 * bad_shape + 2**1 * bad_meta
        return diagnosis
Example #28
0
def check_env():
    """
    Check required environment variables; raise RuntimeException if missing
    """
    log = logging.get_logger()
    #- template locations
    missing_env = False
    if 'LVM_BASIS_TEMPLATES' not in os.environ:
        log.warning(
            'missing $LVM_BASIS_TEMPLATES needed for simulating spectra'.
            format(name))
        missing_env = True

    if not os.path.isdir(os.getenv('LVM_BASIS_TEMPLATES')):
        log.warning('missing $LVM_BASIS_TEMPLATES directory')
        log.warning(
            'e.g. see NERSC:/project/projectdirs/desi/spectro/templates/basis_templates/v2.2'
        )
        missing_env = True

    for name in ('LVM_SPECTRO_SIM', 'LVM_SPECTRO_REDUX', 'PIXPROD', 'SPECPROD',
                 'LVMMODEL'):
        if name not in os.environ:
            log.warning("missing ${0}".format(name))
            missing_env = True

    if missing_env:
        log.warning("Why are these needed?")
        log.warning("    Simulations written to $LVM_SPECTRO_SIM/$PIXPROD/")
        log.warning("    Raw data read from $LVM_SPECTRO_DATA/")
        log.warning(
            "    Spectro pipeline output written to $LVM_SPECTRO_REDUX/$SPECPROD/"
        )
        log.warning("    Templates are read from $LVM_BASIS_TEMPLATES")

    #- Wait until end to raise exception so that we report everything that
    #- is missing before actually failing
    if missing_env:
        log.critical("missing env vars; exiting without running pipeline")
        sys.exit(1)

    #- Override $LVM_SPECTRO_DATA to match $LVM_SPECTRO_SIM/$PIXPROD
    os.environ['LVM_SPECTRO_DATA'] = os.path.join(os.getenv('LVM_SPECTRO_SIM'),
                                                  os.getenv('PIXPROD'))
Example #29
0
def load_simulated_data(session, obs_pass=0):
    """Load simulated frame and brick data.

    Parameters
    ----------
    session : :class:`sqlalchemy.orm.session.Session`
        Database connection.
    obs_pass : :class:`int`, optional
        If set, only simulate one pass.
    """
    log = get_logger()
    tiles = get_all_tiles(session, obs_pass=obs_pass)
    status = 'succeeded'
    for t in tiles:
        for band in 'brz':
            for spectrograph in range(10):
                frame, bricks = t.simulate_frame(session, band, spectrograph)
                try:
                    q = session.query(Night).filter_by(night=frame.night).one()
                except NoResultFound:
                    session.add(Night(night=frame.night))
                try:
                    q = session.query(ExposureFlavor).filter_by(
                        flavor=frame.flavor).one()
                except NoResultFound:
                    session.add(ExposureFlavor(flavor=frame.flavor))
                # try:
                #     q = session.query(Status).filter_by(status=status).one()
                # except NoResultFound:
                #     session.add(Status(status=status))
                session.add(frame)
                session.add(
                    FrameStatus(frame_id=frame.id,
                                status=status,
                                stamp=frame.dateobs))
                for brick in bricks:
                    session.add(
                        BrickStatus(brick_id=brick.id,
                                    status=status,
                                    stamp=frame.dateobs))
                frame.bricks = bricks
        session.commit()
        log.info("Completed insert of tileid = {0:d}.".format(t.id))
    return
Example #30
0
def q3c_index(table):
    """Create a q3c index on a table.

    Parameters
    ----------
    table : :class:`str`
        Name of the table to index.
    """
    from lvmutil.log import get_logger
    log = get_logger()
    q3c_sql = """CREATE INDEX ix_{table}_q3c_ang2ipix ON {schema}.{table} (q3c_ang2ipix(ra, dec));
    CLUSTER {schema}.{table} USING ix_{table}_q3c_ang2ipix;
    ANALYZE {schema}.{table};
    """.format(schema=schemaname, table=table)
    log.info("Creating q3c index on %s.%s.", schemaname, table)
    dbSession.execute(q3c_sql)
    log.info("Finished q3c index on %s.%s.", schemaname, table)
    dbSession.commit()
    return