Пример #1
0
def main(args, comm=None):
    t1 = datetime.datetime.now()

    log = get_logger()

    rank = 0
    nproc = 1

    if comm is not None:
        rank = comm.rank
        nproc = comm.size

    # raw and production locations

    rawdir = os.path.abspath(io.rawdata_root())
    proddir = os.path.abspath(io.specprod_root())

    if rank == 0:
        log.info("starting at {}".format(time.asctime()))
        log.info("using raw dir {}".format(rawdir))
        log.info("using spectro production dir {}".format(proddir))

    # run it!

    pipe.run_steps(args.first,
                   args.last,
                   spectrographs=args.spectrographs,
                   nightstr=args.nights,
                   comm=comm)
    t2 = datetime.datetime.now()

    if rank == 0:
        if "STARTTIME" in os.environ:
            try:
                t0 = datetime.datetime.strptime(os.getenv("STARTTIME"),
                                                "%Y%m%d-%H:%M:%S")
                dt = t1 - t0
                minutes, seconds = dt.seconds // 60, dt.seconds % 60
                log.info("Python startup time: {} min {} sec".format(
                    minutes, seconds))
            except ValueError:
                log.error("unable to parse $STARTTIME={}".format(
                    os.getenv("STARTTIME")))
        else:
            log.info("python startup time unknown since $STARTTIME not set")

        dt = t2 - t1
        minutes, seconds = dt.seconds // 60, dt.seconds % 60
        log.info("Run time: {} min {} sec".format(minutes, seconds))
Пример #2
0
def main(args, comm=None):
    if args.verbose:
        import logging
        log.setLevel(logging.DEBUG)
    #we do this so we can use operator.itemgetter
    import operator
    #we do this so our print statements can have timestamps
    import time

    rank = 0
    nproc = 1
    if comm is not None:
        import mpi4py
        rank = comm.rank
        nproc = comm.size

    if rank == 0:
        log.info('Starting pixsim at {}'.format(asctime()))

    #no preflight check here, too complicated.   
    #we'll assume the user knows what he or she is doing...

    # Determine which nights we are using
    nights = None
    if args.nights is not None:
        nights = args.nights.split(",")
    else:
        rawdir = os.path.abspath(specio.rawdata_root())
        nights = []
        nightpat = re.compile(r"\d{8}")
        for root, dirs, files in os.walk(rawdir, topdown=True):
            for d in dirs:
                nightmat = nightpat.match(d)
                if nightmat is not None:
                    nights.append(d)

    # Get the list of exposures for each night
    night_expid = {}
    all_expid = []
    exp_to_night = {}
    for nt in nights:
        night_expid[nt] = specio.get_exposures(nt, raw=True)

    #get a list of tuples of (night,expid) that we can evenly divide between communicators
    night_exposure_list=list()
    if comm is None or comm.rank == 0:
        for nt in nights:
            for exp in night_expid[nt]:
                rawfile = desispec.io.findfile('raw', nt, exp)
                if not os.path.exists(rawfile):
                    night_exposure_list.append([nt,exp])
                elif args.overwrite:
                    log.warning('Overwriting pre-existing {}'.format(os.path.basename(rawfile)))
                    os.remove(rawfile)
                    night_exposure_list.append([nt,exp])
                else:
                    log.info('Skipping pre-existing {}'.format(os.path.basename(rawfile)))

        if args.nexp is not None:
            night_exposure_list = night_exposure_list[0:args.nexp]

    if comm is not None:
        night_exposure_list = comm.bcast(night_exposure_list, root=0)

    if len(night_exposure_list) == 0:
        if comm is None or comm.rank == 0:
            log.error('No exposures to process')
        sys.exit(1)

    # Get the list of cameras and make sure it's in the right format
    cams = []
    if args.cameras is not None:
        entry = args.cameras.split(',')
        for i in entry:
            cams.append(i)
    else:
        #do this with band first so we can avoid re-broadcasting cosmics
        for band in ['b', 'r', 'z']:
            for spec in range(10):
                cams.append('{}{}'.format(band, spec)) 

    #are we using cosmics?
    if args.cosmics is not None:
        addcosmics = True
    else:
        addcosmics = False

    ncameras=len(cams)
    nexposures=len(night_exposure_list)
    #call ultity function to figure out how many nodes we have
    nnodes=mpi_count_nodes(comm)

    #call utility functions to divide our workload
    if args.nodes_per_exp is not None:
        user_specified_nodes=args.nodes_per_exp
    else:
        user_specified_nodes=None

    nodes_per_comm_exp=get_nodes_per_exp(nnodes,nexposures,ncameras,user_specified_nodes)    
    #also figure out how many exposure communicators we have
    num_exp_comm = nnodes // nodes_per_comm_exp 
 
    #split the communicator into exposure communicators
    comm_exp, node_index_exp, num_nodes_exp = mpi_split_by_node(comm, nodes_per_comm_exp)
    #further splitting will happen automatically in simulate_exposure

    #based on this, figure out which simspecfiles and rawfiles are assigned to each communicator
    #find all specfiles
    #find all rawfiles
    rawfile_list=[]
    simspecfile_list=[]
    night_list=[]
    expid_list=[]
    for i in range(len(night_exposure_list)):
        night_list.append(night_exposure_list[i][0])
        expid_list.append(night_exposure_list[i][1])
        rawfile_list.append(desispec.io.findfile('raw', night_list[i], expid_list[i]))
        simspecfile_list.append(io.findfile('simspec', night_list[i], expid_list[i]))
 
    #now divy the rawfiles and specfiles between node communicators
    #there is onerawfile and one specfile for each exposure
    rawfile_comm_exp=[]
    simspecfile_comm_exp=[]
    for i in range(num_exp_comm):
        if node_index_exp == i: #assign rawfile, simspec file to one communicator at a time
            rawfile_comm_exp=rawfile_list[i::num_exp_comm]
            simspecfile_comm_exp=simspecfile_list[i::num_exp_comm] 
            night_comm_exp=night_list[i::num_exp_comm]
            expid_comm_exp=expid_list[i::num_exp_comm]
    
    comm.Barrier()

    #now wrap pixsim.simulate_exposure for each exposure (in desisim.pixsim)
    if comm_exp.rank == 0:
        log.info("Starting simulate_exposure for night {} expid {}".format(night_comm_exp, expid_comm_exp))
    for i in range(len(rawfile_comm_exp)):
        simulate_exposure(simspecfile_comm_exp[i], rawfile_comm_exp[i], cameras=cams,
            ccdshape=None, simpixfile=None, addcosmics=addcosmics, comm=comm_exp)


    comm.Barrier()

    if rank == 0:
        log.info('Finished pixsim nights {}'.format(args.nights, asctime()))
Пример #3
0
def integration_test(night=None, nspec=5, clobber=False):
    """Run an integration test from raw data simulations through redshifts

    Args:
        night (str, optional): YEARMMDD, defaults to current night
        nspec (int, optional): number of spectra to include
        clobber (bool, optional): rerun steps even if outputs already exist

    Raises:
        RuntimeError if any script fails

    """

    import argparse
    parser = argparse.ArgumentParser(usage="{prog} [options]")
    # parser.add_argument("-i", "--input", type=str,  help="input data")
    # parser.add_argument("-o", "--output", type=str,  help="output data")
    parser.add_argument("--skip-psf",
                        action="store_true",
                        help="Skip PSF fitting step")
    args = parser.parse_args()

    log = logging.get_logger()

    # YEARMMDD string, rolls over at noon not midnight
    if night is None:
        night = "20160726"

    # check for required environment variables
    check_env()

    # simulate inputs
    sim(night, nspec=nspec, clobber=clobber)

    # raw and production locations

    rawdir = os.path.abspath(io.rawdata_root())
    proddir = os.path.abspath(io.specprod_root())

    # create production

    if clobber and os.path.isdir(proddir):
        shutil.rmtree(proddir)

    dbfile = io.get_pipe_database()
    if not os.path.exists(dbfile):
        com = "desi_pipe create --db-sqlite"
        log.info('Running {}'.format(com))
        sp.check_call(com, shell=True)
    else:
        log.info("Using pre-existing production database {}".format(dbfile))

    # Modify options file to restrict the spectral range

    optpath = os.path.join(proddir, "run", "options.yaml")
    opts = pipe.prod.yaml_read(optpath)
    opts['extract']['specmin'] = 0
    opts['extract']['nspec'] = nspec
    opts['psf']['specmin'] = 0
    opts['psf']['nspec'] = nspec
    opts['traceshift']['nfibers'] = nspec
    pipe.prod.yaml_write(optpath, opts)

    if args.skip_psf:
        #- Copy desimodel psf into this production instead of fitting psf
        import shutil
        for channel in ['b', 'r', 'z']:
            refpsf = '{}/data/specpsf/psf-{}.fits'.format(
                os.getenv('DESIMODEL'), channel)
            nightpsf = io.findfile('psfnight', night, camera=channel + '0')
            shutil.copy(refpsf, nightpsf)
            for expid in [0, 1, 2]:
                exppsf = io.findfile('psf', night, expid, camera=channel + '0')
                shutil.copy(refpsf, exppsf)

        #- Resync database to current state
        dbpath = io.get_pipe_database()
        db = pipe.load_db(dbpath, mode="w")
        db.sync(night)

    # Run the pipeline tasks in order
    from desispec.pipeline.tasks.base import default_task_chain
    for tasktype in default_task_chain:
        #- if we skip psf/psfnight/traceshift, update state prior to extractions
        if tasktype == 'traceshift' and args.skip_psf:
            db.getready()
        run_pipeline_step(tasktype)

    # #-----
    # #- Did it work?
    # #- (this combination of fibermap, simspec, and zbest is a pain)
    expid = 2
    fmfile = io.findfile('fibermap', night=night, expid=expid)
    fibermap = io.read_fibermap(fmfile)
    simdir = os.path.dirname(fmfile)
    simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid)
    siminfo = fits.getdata(simspec, 'TRUTH')
    try:
        elginfo = fits.getdata(simspec, 'TRUTH_ELG')
    except:
        elginfo = None

    from desimodel.footprint import radec2pix
    nside = 64
    pixels = np.unique(
        radec2pix(nside, fibermap['TARGET_RA'], fibermap['TARGET_DEC']))

    num_missing = 0
    for pix in pixels:
        zfile = io.findfile('zbest', groupname=pix)
        if not os.path.exists(zfile):
            log.error('Missing {}'.format(zfile))
            num_missing += 1

    if num_missing > 0:
        log.critical('{} zbest files missing'.format(num_missing))
        sys.exit(1)

    print()
    print("--------------------------------------------------")
    print("Pixel     True  z        ->  Class  z        zwarn")
    # print("3338p190  SKY   0.00000  ->  QSO    1.60853   12   - ok")
    for pix in pixels:
        zfile = io.findfile('zbest', groupname=pix)
        if not os.path.exists(zfile):
            log.error('Missing {}'.format(zfile))
            continue

        zfx = fits.open(zfile, memmap=False)
        zbest = zfx['ZBEST'].data
        for i in range(len(zbest['Z'])):
            objtype = zbest['SPECTYPE'][i]
            z, zwarn = zbest['Z'][i], zbest['ZWARN'][i]

            j = np.where(fibermap['TARGETID'] == zbest['TARGETID'][i])[0][0]
            truetype = siminfo['OBJTYPE'][j]
            oiiflux = 0.0
            if truetype == 'ELG':
                k = np.where(elginfo['TARGETID'] == zbest['TARGETID'][i])[0][0]
                oiiflux = elginfo['OIIFLUX'][k]

            truez = siminfo['REDSHIFT'][j]
            dv = C_LIGHT * (z - truez) / (1 + truez)
            status = None
            if truetype == 'SKY' and zwarn > 0:
                status = 'ok'
            elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17:
                status = 'ok ([OII] flux {:.2g})'.format(oiiflux)
            elif zwarn == 0:
                if truetype == 'LRG' and objtype == 'GALAXY' and abs(dv) < 150:
                    status = 'ok'
                elif truetype == 'ELG' and objtype == 'GALAXY':
                    if abs(dv) < 150:
                        status = 'ok'
                    elif oiiflux < 8e-17:
                        status = 'ok ([OII] flux {:.2g})'.format(oiiflux)
                    else:
                        status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux)
                elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750:
                    status = 'ok'
                elif truetype in ('STD', 'FSTD') and objtype == 'STAR':
                    status = 'ok'
                else:
                    status = 'OOPS'
            else:
                status = 'OOPS'
            print('{0:<8d}  {1:4s} {2:8.5f}  -> {3:5s} {4:8.5f} {5:4d}  - {6}'.
                  format(pix, truetype, truez, objtype, z, zwarn, status))

    print("--------------------------------------------------")
Пример #4
0
def main(args, comm=None):
    rank = 0
    nproc = 1
    if comm is not None:
        rank = comm.rank
        nproc = comm.size

    # Determine which nights we are using
    nights = None
    if args.nights is not None:
        nights = args.nights.split(",")
    else:
        if rank == 0:
            rawdir = os.path.abspath(specio.rawdata_root())
            nights = []
            nightpat = re.compile(r"\d{8}")
            for root, dirs, files in os.walk(rawdir, topdown=True):
                for d in dirs:
                    nightmat = nightpat.match(d)
                    if nightmat is not None:
                        nights.append(d)
                break
        if comm is not None:
            nights = comm.bcast(nights, root=0)

    # Get the list of exposures for each night
    night_expid = {}
    all_expid = []
    exp_to_night = {}
    if rank == 0:
        for nt in nights:
            night_expid[nt] = specio.get_exposures(nt, raw=True)
            all_expid.extend(night_expid[nt])
            for ex in night_expid[nt]:
                exp_to_night[ex] = nt
    if comm is not None:
        night_expid = comm.bcast(night_expid, root=0)
        all_expid = comm.bcast(all_expid, root=0)
        exp_to_night = comm.bcast(exp_to_night, root=0)

    expids = np.array(all_expid, dtype=np.int32)
    nexp = len(expids)

    # Get the list of cameras
    cams = None
    if args.cameras is not None:
        cams = args.cameras.split(",")
    else:
        cams = []
        for band in ['b', 'r', 'z']:
            for spec in range(10):
                cams.append('{}{}'.format(band, spec))

    # number of cameras
    ncamera = len(cams)

    # check that our communicator is an appropriate size

    if comm is not None:
        if ncamera * args.camera_procs > comm.size:
            if comm.rank == 0:
                print("Communicator size ({}) too small for {} cameras each with {} procs".format(comm.size, ncamera, args.camera_procs), flush=True)
                comm.Abort()

    # create a set of reproducible seeds for each exposure
    np.random.seed(args.seed)
    maxexp = np.max(expids)
    allseeds = np.random.randint(2**32, size=(maxexp+1))
    seeds = allseeds[-nexp:]

    taskproc = ncamera * args.camera_procs

    comm_group = comm
    comm_rank = None
    group = comm.rank
    ngroup = comm.size
    group_rank = 0
    if comm is not None:
        from mpi4py import MPI
        if taskproc > 1:
            ngroup = int(comm.size / taskproc)
            group = int(comm.rank / taskproc)
            group_rank = comm.rank % taskproc
            comm_group = comm.Split(color=group, key=group_rank)
            comm_rank = comm.Split(color=group_rank, key=group)
        else:
            comm_group = MPI.COMM_SELF
            comm_rank = comm

    myexpids = np.array_split(expids, ngroup)[group]

    for ex in myexpids:
        nt = exp_to_night[ex]
        
        # path to raw file
        simspecfile = simio.findfile('simspec', nt, ex)
        rawfile = specio.findfile('raw', nt, ex)
        rawfile = os.path.join(os.path.dirname(simspecfile), rawfile)
        
        # Is this exposure already finished?
        done = True
        if group_rank == 0:
            if not os.path.isfile(rawfile):
                done = False
            if args.preproc:
                for c in cams:
                    pixfile = specio.findfile('pix', night=nt,
                        expid=ex, camera=c)
                    if not os.path.isfile(pixfile):
                        done = False
        if comm_group is not None:
            done = comm_group.bcast(done, root=0)
        if done and not args.overwrite:
            if group_rank == 0:
                print("Skipping completed exposure {:08d} on night {}".format(ex, nt))
            continue

        # Write per-process logs to a separate directory,
        # since there are so many of them.
        logdir = "{}_logs".format(rawfile)
        if group_rank == 0:
            if not os.path.isdir(logdir):
                os.makedirs(logdir)
        if comm_group is not None:
            comm_group.barrier()
        tasklog = os.path.join(logdir, "pixsim")

        with stdouterr_redirected(to=tasklog, comm=comm_group):
            try:
                options = {}
                options["night"] = nt
                options["expid"] = int(ex)
                options["cosmics"] = args.cosmics
                options["seed"] = seeds[ex]
                options["cameras"] = ",".join(cams)
                options["mpi_camera"] = args.camera_procs
                options["verbose"] = args.verbose
                options["preproc"] = args.preproc
                optarray = option_list(options)
                pixargs = pixsim.parse(optarray)
                pixsim.main(pixargs, comm_group)
            except:
                exc_type, exc_value, exc_traceback = sys.exc_info()
                lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
                print("".join(lines), flush=True)
Пример #5
0
def integration_test(night=None, nspec=5, clobber=False):
    """Run an integration test from raw data simulations through redshifts

    Args:
        night (str, optional): YEARMMDD, defaults to current night
        nspec (int, optional): number of spectra to include
        clobber (bool, optional): rerun steps even if outputs already exist

    Raises:
        RuntimeError if any script fails

    """

    import argparse
    parser = argparse.ArgumentParser(usage = "{prog} [options]")
    # parser.add_argument("-i", "--input", type=str,  help="input data")
    # parser.add_argument("-o", "--output", type=str,  help="output data")
    parser.add_argument("--skip-psf", action="store_true", help="Skip PSF fitting step")
    args = parser.parse_args()

    log = logging.get_logger()

    # YEARMMDD string, rolls over at noon not midnight
    if night is None:
        night = "20160726"

    # check for required environment variables
    check_env()

    # simulate inputs
    sim(night, nspec=nspec, clobber=clobber)

    # raw and production locations

    rawdir = os.path.abspath(io.rawdata_root())
    proddir = os.path.abspath(io.specprod_root())

    # create production

    if clobber and os.path.isdir(proddir):
        shutil.rmtree(proddir)

    dbfile = io.get_pipe_database()
    if not os.path.exists(dbfile):
        com = "desi_pipe create --db-sqlite"
        log.info('Running {}'.format(com))
        sp.check_call(com, shell=True)
    else:
        log.info("Using pre-existing production database {}".format(dbfile))

    # Modify options file to restrict the spectral range

    optpath = os.path.join(proddir, "run", "options.yaml")
    opts = pipe.prod.yaml_read(optpath)
    opts['extract']['specmin'] = 0
    opts['extract']['nspec'] = nspec
    opts['psf']['specmin'] = 0
    opts['psf']['nspec'] = nspec
    opts['traceshift']['nfibers'] = nspec
    pipe.prod.yaml_write(optpath, opts)

    if args.skip_psf:
        #- Copy desimodel psf into this production instead of fitting psf
        import shutil
        for channel in ['b', 'r', 'z']:
            refpsf = '{}/data/specpsf/psf-{}.fits'.format(
                    os.getenv('DESIMODEL'), channel)
            nightpsf = io.findfile('psfnight', night, camera=channel+'0')
            shutil.copy(refpsf, nightpsf)
            for expid in [0,1,2]:
                exppsf = io.findfile('psf', night, expid, camera=channel+'0')
                shutil.copy(refpsf, exppsf)

        #- Resync database to current state
        dbpath = io.get_pipe_database()
        db = pipe.load_db(dbpath, mode="w")
        db.sync(night)

    # Run the pipeline tasks in order
    from desispec.pipeline.tasks.base import default_task_chain
    for tasktype in default_task_chain:
        #- if we skip psf/psfnight/traceshift, update state prior to extractions
        if tasktype == 'traceshift' and args.skip_psf:
            db.getready()
        run_pipeline_step(tasktype)

    # #-----
    # #- Did it work?
    # #- (this combination of fibermap, simspec, and zbest is a pain)
    expid = 2
    fmfile = io.findfile('fibermap', night=night, expid=expid)
    fibermap = io.read_fibermap(fmfile)
    simdir = os.path.dirname(fmfile)
    simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid)
    siminfo = fits.getdata(simspec, 'TRUTH')
    try:
        elginfo = fits.getdata(simspec, 'TRUTH_ELG')
    except:
        elginfo = None

    from desimodel.footprint import radec2pix
    nside=64
    pixels = np.unique(radec2pix(nside, fibermap['TARGET_RA'], fibermap['TARGET_DEC']))

    num_missing = 0
    for pix in pixels:
        zfile = io.findfile('zbest', groupname=pix)
        if not os.path.exists(zfile):
            log.error('Missing {}'.format(zfile))
            num_missing += 1

    if num_missing > 0:
        log.critical('{} zbest files missing'.format(num_missing))
        sys.exit(1)

    print()
    print("--------------------------------------------------")
    print("Pixel     True  z        ->  Class  z        zwarn")
    # print("3338p190  SKY   0.00000  ->  QSO    1.60853   12   - ok")
    for pix in pixels:
        zfile = io.findfile('zbest', groupname=pix)
        if not os.path.exists(zfile):
            log.error('Missing {}'.format(zfile))
            continue

        zfx = fits.open(zfile, memmap=False)
        zbest = zfx['ZBEST'].data
        for i in range(len(zbest['Z'])):
            objtype = zbest['SPECTYPE'][i]
            z, zwarn = zbest['Z'][i], zbest['ZWARN'][i]

            j = np.where(fibermap['TARGETID'] == zbest['TARGETID'][i])[0][0]
            truetype = siminfo['OBJTYPE'][j]
            oiiflux = 0.0
            if truetype == 'ELG':
                k = np.where(elginfo['TARGETID'] == zbest['TARGETID'][i])[0][0]
                oiiflux = elginfo['OIIFLUX'][k]

            truez = siminfo['REDSHIFT'][j]
            dv = C_LIGHT*(z-truez)/(1+truez)
            status = None
            if truetype == 'SKY' and zwarn > 0:
                status = 'ok'
            elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17:
                status = 'ok ([OII] flux {:.2g})'.format(oiiflux)
            elif zwarn == 0:
                if truetype == 'LRG' and objtype == 'GALAXY' and abs(dv) < 150:
                    status = 'ok'
                elif truetype == 'ELG' and objtype == 'GALAXY':
                    if abs(dv) < 150:
                        status = 'ok'
                    elif oiiflux < 8e-17:
                        status = 'ok ([OII] flux {:.2g})'.format(oiiflux)
                    else:
                        status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux)
                elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750:
                    status = 'ok'
                elif truetype in ('STD', 'FSTD') and objtype == 'STAR':
                    status = 'ok'
                else:
                    status = 'OOPS'
            else:
                status = 'OOPS'
            print('{0:<8d}  {1:4s} {2:8.5f}  -> {3:5s} {4:8.5f} {5:4d}  - {6}'.format(
                pix, truetype, truez, objtype, z, zwarn, status))

    print("--------------------------------------------------")
Пример #6
0
def integration_test(night=None, nspec=5, clobber=False):
    """Run an integration test from raw data simulations through redshifts

    Args:
        night (str, optional): YEARMMDD, defaults to current night
        nspec (int, optional): number of spectra to include
        clobber (bool, optional): rerun steps even if outputs already exist

    Raises:
        RuntimeError if any script fails

    """
    log = logging.get_logger()
    log.setLevel(logging.DEBUG)

    # YEARMMDD string, rolls over at noon not midnight
    # TODO: fix usage of night to be something other than today
    if night is None:
        #night = time.strftime('%Y%m%d', time.localtime(time.time()-12*3600))
        night = "20160726"

    # check for required environment variables
    check_env()

    # simulate inputs
    sim(night, nspec=nspec, clobber=clobber)

    # create production

    # FIXME:  someday run PSF estimation too...
    ### com = "desi_pipe --spectrographs 0 --fakeboot --fakepsf"
    com = "desi_pipe --spectrographs 0 --fakeboot --fakepsf"
    sp.check_call(com, shell=True)

    # raw and production locations

    rawdir = os.path.abspath(io.rawdata_root())
    proddir = os.path.abspath(io.specprod_root())

    # Modify options file to restrict the spectral range

    optpath = os.path.join(proddir, "run", "options.yaml")
    opts = pipe.yaml_read(optpath)
    opts['extract']['specmin'] = 0
    opts['extract']['nspec'] = nspec
    pipe.yaml_write(optpath, opts)

    # run the generated shell scripts

    # FIXME:  someday run PSF estimation too...

    # print("Running bootcalib script...")
    # com = os.path.join(proddir, "run", "scripts", "bootcalib_all.sh")
    # sp.check_call(["bash", com])

    # print("Running specex script...")
    # com = os.path.join(proddir, "run", "scripts", "specex_all.sh")
    # sp.check_call(["bash", com])

    # print("Running psfcombine script...")
    # com = os.path.join(proddir, "run", "scripts", "psfcombine_all.sh")
    # sp.check_call(["bash", com])

    com = os.path.join(proddir, "run", "scripts", "run_shell.sh")
    print("Running extraction through calibration: " + com)
    sp.check_call(["bash", com])

    com = os.path.join(proddir, "run", "scripts", "spectra.sh")
    print("Running spectral regrouping: " + com)
    sp.check_call(["bash", com])

    com = os.path.join(proddir, "run", "scripts", "redshift.sh")
    print("Running redshift script " + com)
    sp.check_call(["bash", com])

    # #-----
    # #- Did it work?
    # #- (this combination of fibermap, simspec, and zbest is a pain)
    expid = 2
    fmfile = io.findfile('fibermap', night=night, expid=expid)
    fibermap = io.read_fibermap(fmfile)
    simdir = os.path.dirname(fmfile)
    simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid)
    siminfo = fits.getdata(simspec, 'METADATA')

    brickdirs = glob.glob(os.path.join(proddir, "bricks", "*"))
    bricks = [os.path.basename(x) for x in brickdirs]

    print()
    print("--------------------------------------------------")
    print("Brick     True  z        ->  Class  z        zwarn")
    # print("3338p190  SKY   0.00000  ->  QSO    1.60853   12   - ok")
    for b in bricks:
        zbest = io.read_zbest(io.findfile('zbest', brickname=b))
        for i in range(len(zbest.z)):
            if zbest.spectype[i] == 'ssp_em_galaxy':
                objtype = 'GAL'
            elif zbest.spectype[i] == 'spEigenStar':
                objtype = 'STAR'
            else:
                objtype = zbest.spectype[i]

            z, zwarn = zbest.z[i], zbest.zwarn[i]

            j = np.where(fibermap['TARGETID'] == zbest.targetid[i])[0][0]
            truetype = siminfo['OBJTYPE'][j]
            oiiflux = siminfo['OIIFLUX'][j]
            truez = siminfo['REDSHIFT'][j]
            dv = 3e5 * (z - truez) / (1 + truez)
            if truetype == 'SKY' and zwarn > 0:
                status = 'ok'
            elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17:
                status = 'ok ([OII] flux {:.2g})'.format(oiiflux)
            elif zwarn == 0:
                if truetype == 'LRG' and objtype == 'GAL' and abs(dv) < 150:
                    status = 'ok'
                elif truetype == 'ELG' and objtype == 'GAL':
                    if abs(dv) < 150 or oiiflux < 8e-17:
                        status = 'ok ([OII] flux {:.2g})'.format(oiiflux)
                    else:
                        status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux)
                elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750:
                    status = 'ok'
                elif truetype in ('STD', 'FSTD') and objtype == 'STAR':
                    status = 'ok'
                else:
                    status = 'OOPS'
            else:
                status = 'OOPS'
            print('{0}  {1:4s} {2:8.5f}  -> {3:5s} {4:8.5f} {5:4d}  - {6}'.
                  format(b, truetype, truez, objtype, z, zwarn, status))

    print("--------------------------------------------------")
Пример #7
0
def main(args, comm=None):
    t1 = datetime.datetime.now()

    log = get_logger()

    rank = 0
    nproc = 1
    if comm is not None:
        rank = comm.rank
        nproc = comm.size

    # Check start up time.

    if rank == 0:
        if "STARTTIME" in os.environ:
            try:
                t0 = datetime.datetime.strptime(os.getenv("STARTTIME"), "%Y%m%d-%H%M%S")
                dt = t1 - t0
                minutes, seconds = dt.seconds//60, dt.seconds%60
                log.info("Python startup time: {} min {} sec".format(minutes, seconds))
            except ValueError:
                log.error("unable to parse $STARTTIME={}".format(os.getenv("STARTTIME")))
        else:
            log.info("Python startup time unknown since $STARTTIME not set")
        sys.stdout.flush()

    # raw and production locations

    rawdir = os.path.abspath(io.rawdata_root())
    proddir = os.path.abspath(io.specprod_root())

    if rank == 0:
        log.info("Starting at {}".format(time.asctime()))
        log.info("  Using raw dir {}".format(rawdir))
        log.info("  Using spectro production dir {}".format(proddir))
        sys.stdout.flush()

    # Get task list from disk or from STDIN

    tasklist = None
    if args.taskfile is not None:
        # One process reads the file and broadcasts
        if rank == 0:
            tasklist = pipe.prod.task_read(args.taskfile)
        if comm is not None:
            tasklist = comm.bcast(tasklist, root=0)
    else:
        # Every process has the same STDIN contents.
        tasklist = list()
        for line in sys.stdin:
            tasklist.append(line.rstrip())

    # Do we actually have any tasks?
    if len(tasklist) == 0:
        warnings.warn("Task list is empty", RuntimeWarning)

    # run it!

    (db, opts) = pipe.load_prod("w")

    ntask = len(tasklist)
    ready = None
    done = None
    failed = None
    if args.nodb:
        ready, done, failed = pipe.run_task_list(args.tasktype, tasklist, opts,
                                           comm=comm, db=None)
    else:
        ready, done, failed = pipe.run_task_list(args.tasktype, tasklist, opts,
                                           comm=comm, db=db)

    t2 = datetime.datetime.now()

    if rank == 0:
        log.info("  {} tasks already done, {} tasks were ready, and {} failed".format(done, ready, failed))
        dt = t2 - t1
        minutes, seconds = dt.seconds//60, dt.seconds%60
        log.info("Run time: {} min {} sec".format(minutes, seconds))
        sys.stdout.flush()

    if comm is not None:
        comm.barrier()

    # Did we have any ready tasks that were not already done?
    # Note: if there were no ready tasks, but some were already
    # done, then we want to exit with a "0" error code.  This will
    # allow the calling script to continue with other pipeline steps
    # and / or allow other dependent jobs run.
    if done == 0:
        # nothing is done
        if ready == 0:
            if rank == 0:
                warnings.warn("No tasks were ready or done", RuntimeWarning)
            sys.exit(1)
        if (failed == ready) and (failed > 1) :
            # all tasks failed (and there are more than one)
            if rank == 0:
                warnings.warn("All tasks that were run failed", RuntimeWarning)
            sys.exit(1)
    else:
        # At least some tasks were done- we return zero so that future
        # jobs can run.
        if (ready > 0) and (failed == ready):
            if rank == 0:
                warnings.warn("All tasks that were run failed", RuntimeWarning)
            sys.exit(1)

    return
Пример #8
0
def main(args, comm=None):
    t1 = datetime.datetime.now()

    log = get_logger()

    rank = 0
    nproc = 1
    if comm is not None:
        rank = comm.rank
        nproc = comm.size

    # Check start up time.

    if rank == 0:
        if "STARTTIME" in os.environ:
            try:
                t0 = datetime.datetime.strptime(os.getenv("STARTTIME"),
                                                "%Y%m%d-%H%M%S")
                dt = t1 - t0
                minutes, seconds = dt.seconds // 60, dt.seconds % 60
                log.info("Python startup time: {} min {} sec".format(
                    minutes, seconds))
            except ValueError:
                log.error("unable to parse $STARTTIME={}".format(
                    os.getenv("STARTTIME")))
        else:
            log.info("Python startup time unknown since $STARTTIME not set")
        sys.stdout.flush()

    # raw and production locations

    rawdir = os.path.abspath(io.rawdata_root())
    proddir = os.path.abspath(io.specprod_root())

    if rank == 0:
        log.info("Starting at {}".format(time.asctime()))
        log.info("  Using raw dir {}".format(rawdir))
        log.info("  Using spectro production dir {}".format(proddir))
        sys.stdout.flush()

    # Get task list from disk or from STDIN

    tasklist = None
    if args.taskfile is not None:
        # One process reads the file and broadcasts
        if rank == 0:
            tasklist = pipe.prod.task_read(args.taskfile)
        if comm is not None:
            tasklist = comm.bcast(tasklist, root=0)
    else:
        # Every process has the same STDIN contents.
        tasklist = list()
        for line in sys.stdin:
            tasklist.append(line.rstrip())

    # Do we actually have any tasks?
    if len(tasklist) == 0:
        warnings.warn("Task list is empty", RuntimeWarning)

    # run it!

    (db, opts) = pipe.load_prod("w")

    ntask = len(tasklist)
    ready = None
    done = None
    failed = None
    if args.nodb:
        ready, done, failed = pipe.run_task_list(args.tasktype,
                                                 tasklist,
                                                 opts,
                                                 comm=comm,
                                                 db=None)
    else:
        ready, done, failed = pipe.run_task_list(args.tasktype,
                                                 tasklist,
                                                 opts,
                                                 comm=comm,
                                                 db=db)

    t2 = datetime.datetime.now()

    if rank == 0:
        log.info("  {} tasks already done, {} tasks were ready, and {} failed".
                 format(done, ready, failed))
        dt = t2 - t1
        minutes, seconds = dt.seconds // 60, dt.seconds % 60
        log.info("Run time: {} min {} sec".format(minutes, seconds))
        sys.stdout.flush()

    if comm is not None:
        comm.barrier()

    # Did we have any ready tasks that were not already done?
    # Note: if there were no ready tasks, but some were already
    # done, then we want to exit with a "0" error code.  This will
    # allow the calling script to continue with other pipeline steps
    # and / or allow other dependent jobs run.
    if done == 0:
        # nothing is done
        if ready == 0:
            if rank == 0:
                warnings.warn("No tasks were ready or done", RuntimeWarning)
            sys.exit(1)
        if failed == ready:
            # all tasks failed
            if rank == 0:
                warnings.warn("All tasks that were run failed", RuntimeWarning)
            sys.exit(1)
    else:
        # At least some tasks were done- we return zero so that future
        # jobs can run.
        if (ready > 0) and (failed == ready):
            if rank == 0:
                warnings.warn("All tasks that were run failed", RuntimeWarning)
            sys.exit(1)

    return
Пример #9
0
def integration_test(night=None, nspec=5, clobber=False):
    """Run an integration test from raw data simulations through redshifts
    
    Args:
        night (str, optional): YEARMMDD, defaults to current night
        nspec (int, optional): number of spectra to include
        clobber (bool, optional): rerun steps even if outputs already exist
        
    Raises:
        RuntimeError if any script fails
      
    """
    log = logging.get_logger()
    log.setLevel(logging.DEBUG)

    # YEARMMDD string, rolls over at noon not midnight
    # TODO: fix usage of night to be something other than today
    if night is None:
        #night = time.strftime('%Y%m%d', time.localtime(time.time()-12*3600))
        night = "20160726"

    # check for required environment variables
    check_env()

    # simulate inputs
    sim(night, nspec=nspec, clobber=clobber)

    # create production

    # FIXME:  someday run PSF estimation too...
    ### com = "desi_pipe --env env.txt --spectrographs 0 --fakeboot --fakepsf"
    rawdir = os.path.join(os.getenv('DESI_SPECTRO_SIM'), os.getenv('PIXPROD'))    
    com = "desi_pipe --spectrographs 0 --fakeboot --fakepsf --raw {}".format(rawdir)
    sp.check_call(com, shell=True)

    # raw and production locations

    rawdir = os.path.abspath(io.rawdata_root())
    proddir = os.path.abspath(io.specprod_root())

    # Modify options file to restrict the spectral range

    optpath = os.path.join(proddir, "run", "options.yaml")
    opts = pipe.read_options(optpath)
    opts['extract']['specmin'] = 0
    opts['extract']['nspec'] = nspec
    pipe.write_options(optpath, opts)

    # run the generated shell scripts

    # FIXME:  someday run PSF estimation too...

    # print("Running bootcalib script...")
    # com = os.path.join(proddir, "run", "scripts", "bootcalib_all.sh")
    # sp.check_call(["bash", com])

    # print("Running specex script...")
    # com = os.path.join(proddir, "run", "scripts", "specex_all.sh")
    # sp.check_call(["bash", com])

    # print("Running psfcombine script...")
    # com = os.path.join(proddir, "run", "scripts", "psfcombine_all.sh")
    # sp.check_call(["bash", com])

    com = os.path.join(proddir, "run", "scripts", "extract_all.sh")
    print("Running extraction script "+com)
    sp.check_call(["bash", com])

    com = os.path.join(proddir, "run", "scripts", "fiberflat-procexp_all.sh")
    print("Running calibration script "+com)
    sp.check_call(["bash", com])

    com = os.path.join(proddir, "run", "scripts", "bricks.sh")
    print("Running makebricks script "+com)
    sp.check_call(["bash", com])

    com = os.path.join(proddir, "run", "scripts", "zfind_all.sh")
    print("Running zfind script "+com)
    sp.check_call(["bash", com])

    # #-----
    # #- Did it work?
    # #- (this combination of fibermap, simspec, and zbest is a pain)
    expid = 2
    fmfile = io.findfile('fibermap', night=night, expid=expid)
    fibermap = io.read_fibermap(fmfile)
    simdir = os.path.dirname(fmfile)
    simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid)
    siminfo = fits.getdata(simspec, 'METADATA')

    brickdirs = glob.glob(os.path.join(proddir, "bricks", "*"))
    bricks = [ os.path.basename(x) for x in brickdirs ]

    print()
    print("--------------------------------------------------")
    print("Brick     True  z        ->  Class  z        zwarn")
    # print("3338p190  SKY   0.00000  ->  QSO    1.60853   12   - ok")
    for b in bricks:
        zbest = io.read_zbest(io.findfile('zbest', brickname=b))        
        for i in range(len(zbest.z)):
            if zbest.spectype[i] == 'ssp_em_galaxy':
                objtype = 'GAL'
            elif zbest.spectype[i] == 'spEigenStar':
                objtype = 'STAR'
            else:
                objtype = zbest.spectype[i]

            z, zwarn = zbest.z[i], zbest.zwarn[i]

            j = np.where(fibermap['TARGETID'] == zbest.targetid[i])[0][0]
            truetype = siminfo['OBJTYPE'][j]
            truez = siminfo['REDSHIFT'][j]
            dv = 3e5*(z-truez)/(1+truez)
            if truetype == 'SKY' and zwarn > 0:
                status = 'ok'
            elif zwarn == 0:
                if truetype == 'LRG' and objtype == 'GAL' and abs(dv) < 150:
                    status = 'ok'
                elif truetype == 'ELG' and objtype == 'GAL' and abs(dv) < 150:
                    status = 'ok'
                elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750:
                    status = 'ok'
                elif truetype == 'STD' and objtype == 'STAR':
                    status = 'ok'
                else:
                    status = 'OOPS'
            else:
                status = 'OOPS'
            print('{0}  {1:4s} {2:8.5f}  -> {3:5s} {4:8.5f} {5:4d}  - {6}'.format(
                b, truetype, truez, objtype, z, zwarn, status))

    print("--------------------------------------------------")