Ejemplo n.º 1
0
    def run(self):
        # Run the IDL program!
        cmd = "ap1dvisit,'"+self.input().path+"'"
        ret = subprocess.call(["idl","-e",cmd],shell=False)

        # Load the plan file
        # (Note: I'd suggest moving all yanny files to YAML format and/or just supply the plan file
        # inputs as variables to the task.)
        plan = yanny.yanny(self.input().path,np=True)
        exposures = plan['APEXP']
        visitdir = os.path.dirname(self.input().path)

        # Check that all of the apCframe files exist
        cframe_counter = 0
        for exp in exposures['name']:
            if type(exp) is not str:  exp=exp.decode()
            exists  = [os.path.exists(visitdir+"/apCframe-"+ch+"-"+str(exp)+".fits") for ch in ['a','b','c']]
            if np.sum(exists) == 3: cframe_counter += 1

        # Check if some apVisits have been made
        visitfiles = glob.glob(visitdir+"/"+self.prefix+"Visit-"+self.apred+"-"+str(self.plate)+"-"+str(self.mjd)+"-???.fits")

        # Check apVisitSum file
        sdss_path = path.Path()
        apvisitsum = sdss_path.full('apVisitSum',apred=self.apred,telescope=self.telescope,instrument=self.instrument,
                                    field=self.field,plate=self.plate,mjd=self.mjd,prefix=self.prefix)

        # Create "done" file if apVisits exist
        if (cframe_counter==len(exposures)) & (len(visitfiles)>50) & (os.path.exists(apvisitsum)==True):
            with open(self.output().path, "w") as fp:
                fp.write(" ")
Ejemplo n.º 2
0
    def run(self):
        # Run the IDL program!
        cmd = "ap3d,'"+self.input().path+"'"
        ret = subprocess.call(["idl","-e",cmd],shell=False)

        # Load the plan file
        # (Note: I'd suggest moving all yanny files to YAML format and/or just supply the plan file
        # inputs as variables to the task.)
        plan = yanny.yanny(self.input().path,np=True)
        exposures = plan['APEXP']

        # Check if three ap2D files per exposure were created
        # Get the exposures directory
        sdss_path = path.Path()
        expdir = os.path.dirname(sdss_path.full('ap2D',apred=self.apred,telescope=self.telescope,instrument=self.instrument,
                                                plate=self.plate,mjd=self.mjd,prefix=self.prefix,num=0,chip='a'))
        counter = 0
        for exp in exposures['name']:
            if type(exp) is not str:  exp=exp.decode()
            exists  = [os.path.exists(expdir+"/ap2D-"+ch+"-"+str(exp)+".fits") for ch in ['a','b','c']]
            if np.sum(exists) == 3: counter += 1

        # Create "done" file if 2D frames exist
        if counter == len(exposures):
            with open(self.output().path, "w") as fp:
                fp.write(" ")
Ejemplo n.º 3
0
 def __init__(self,
              dr=None,
              apred='r8',
              apstar='stars',
              aspcap='l31c',
              results='l31c.2',
              telescope='apo25m',
              instrument=None,
              verbose=False,
              pathfile=None):
     self.apred = apred
     self.apstar = apstar
     self.aspcap = aspcap
     self.results = results
     self.settelescope(telescope)
     if instrument is not None: self.instrument = instrument
     self.verbose = verbose
     if dr == 'dr10': self.dr10()
     elif dr == 'dr12': self.dr12()
     elif dr == 'dr13': self.dr13()
     elif dr == 'dr14': self.dr14()
     elif dr == 'dr16': self.dr16()
     # set up
     self.sdss_path = path.Path()
     self.http_access = HttpAccess(verbose=verbose)
     self.http_access.remote()
Ejemplo n.º 4
0
 def output(self):
     # Output is similar to apStar file
     sdss_path = path.Path()
     apstarfile = sdss_path.full('apStar',
                                 apred=self.apred,
                                 telescope=self.telescope,
                                 instrument=self.instrument,
                                 field=self.field,
                                 prefix=self.prefix,
                                 obj=self.star,
                                 apstar='stars')
     output_path_prefix, ext = os.path.splitext(apstarfile)
     return luigi.LocalTarget(f"{output_path_prefix}-doneRV")
Ejemplo n.º 5
0
    def run(self):
        # Run doppler_rv_star()
        rv.doppler_rv_star(self.star, self.apred, self.instrument, self.field)

        # Check that the apStar file was created
        sdss_path = path.Path()
        apstarfile = sdss_path.full('apStar',
                                    apred=self.apred,
                                    telescope=self.telescope,
                                    instrument=self.instrument,
                                    field=self.field,
                                    prefix=self.prefix,
                                    obj=self.star,
                                    apstar='stars')

        # Create "done" file if apStar file exists
        if os.path.exists(apstarfile) == True:
            with open(self.output().path, "w") as fp:
                fp.write(" ")
Ejemplo n.º 6
0
def mkplan(ims,
           plate,
           mjd,
           psfid,
           fluxid,
           apred=None,
           telescope=None,
           cal=False,
           dark=False,
           sky=False,
           plugid=None,
           fixfiberid=None,
           stars=None,
           names=None,
           onem=False,
           hmags=None,
           mapper_data=None,
           suffix=None,
           ignore=False,
           test=False,
           logger=None):
    """
    Makes plan files given input image numbers, MJD, psfid, fluxid
    includes options for dark frames, calibration frames, sky frames,
    ASDAF frames. This is called from the manually prepared MJD5.pro 
    procedures

    Parameters
    ----------
    ims : numpy int array
        List of array of exposure numbers to include in planfile.
    plate : int
        Plate number for this observation.
    mjd : int
        MJD number for this observation.
    psfid : int
        PSF cal exposure number.
    fluxid : int
        Flux cal frame exposure number.
    apred : str
        APOGEE reduction version.
    telescope : str
        APOGEE telescope.
    cal : bool, optional
        This is a calibration plan file.
    dark : bool, optional
        This is a dark sequence plan file.
    sky : bool, optional
        This is a sky flat sequence plan file.
    plugid : int, optional
        Base name of the plugmap filename.
    fixfiberid : int, optional
        Fiber fixing needed (1 or 2).
    stars : numpy int array, optional
        FiberID for apo1m or ASDAF observations.
    names : numpy int array, optional
        Name of the star for apo1m or ASDAF observations.
    onem : bool, optional
        This is for a apo1m observation.
    hmags : numpy float array, optional
        2MASS H-magnitude for star in apo1m observation.
    mapper_data : str, optional
        Directory for the mapper data.
    suffix : str, optional
        Extra suffix to use (before the extension) on the planfile name.
    ignore : bool, optional
        Ignore warnings and continue.
    test : bool, optional
        Just a test.

    Returns
    -------
    planfile : str
         The name of the plan file created
    This creates a planfile with the given inputs and places it in
    the appropriate place in the SDSS/APOGEE directory tree.  For
    science visits this will be in $APOGEE_REDUX/{apred}/visit/{telescope}/{field}/{plate}/{mjd}/.
    Calibration, dark and sky plan files live in
    $APOGEE_REDUX/{apred}/cal/{instrument}/plan/{mjd}/

    Examples
    --------
    mkplan.mkplan(ims,plate,mjd,psfid,fluxid,apred=apred,telescope=telescope)

    By J.Holtzman, 2011?
    translated to python, D.Nidever  Oct 2020
    """

    # Logger
    if logger is None: logger = dln.basiclogger()

    if apred is None:
        raise ValueError('apred must be input')
    if telescope is None:
        raise ValueError('telescope must be input')

    logger.info('Making plan for MJD: ' + str(mjd))

    # Set up directories, plate, and MJD variables
    load = apload.ApLoad(apred=apred, telescope=telescope)
    caldir = os.environ['APOGEE_DRP_DIR'] + '/data/cal/'
    calfile = caldir + load.instrument + '.par'

    # Mapper directory
    if mapper_data is None:
        if load.instrument == 'apogee-n':
            mapper_data = os.environ['MAPPER_DATA_N']
        else:
            mapper_data = os.environ['MAPPER_DATA_S']

    # Planfile name and directory
    if cal == True:
        planfile = load.filename('CalPlan', mjd=mjd)
    elif dark == True:
        planfile = load.filename('DarkPlan', mjd=mjd)
    elif onem == True:
        planfile = load.filename('Plan',
                                 plate=plate,
                                 reduction=names[0],
                                 mjd=mjd)
        if suffix is not None:
            planfile = os.path.dirname(planfile) + '/' + os.path.splitext(
                os.path.basename(planfile, '.yaml'))[0] + suffix + '.yaml'
    else:
        planfile = load.filename('Plan', plate=plate, mjd=mjd)
    # Make sure the file ends with .yaml
    if planfile.endswith('.yaml') == False:
        planfile = planfile.replace(os.path.splitext(planfile)[-1],
                                    '.yaml')  # TEMPORARY KLUDGE!
    outdir = os.path.dirname(planfile) + '/'
    if os.path.exists(outdir) == False:
        os.makedirs(outdir)

    # Get calibration files for this date
    if fixfiberid is not None:
        fix0 = fixfiberid
    else:
        fix0 = None
    caldata = mkcal.getcal(calfile, mjd)
    if fix0 is not None:
        caldata['fixfiber'] = fix0

    # outplan plan file name
    if (stars is not None) & (onem == False):
        planfile = os.path.dirname(planfile) + '/' + os.path.basename(
            planfile, '.yaml') + 'star.yaml'
    else:
        if sky == True:
            planfile = os.path.dirname(planfile) + '/' + os.path.basename(
                planfile, '.yaml') + 'sky.yaml'

    if sky == True:
        logger.info('apdailycals')
        dailycals(lsfs=ims, psf=psfid, apred=apred, telescope=telescope)
    logger.info(planfile)

    # open plan file and write header
    if os.path.exists(planfile): os.remove(planfile)
    out = {}
    out['apogee_drp_ver'] = os.environ['APOGEE_DRP_VER']
    out['telescope'] = telescope
    out['instrument'] = load.instrument
    out['plateid'] = plate
    out['mjd'] = mjd
    out['planfile'] = os.path.basename(planfile)
    out['logfile'] = 'apDiag-' + str(plate) + '-' + str(mjd) + '.log'
    out['plotfile'] = 'apDiag-' + str(plate) + '-' + str(mjd) + '.ps'

    # apred_vers keyword will override strict versioning using the plan file!
    out['apred_vers'] = apred

    # apo1m
    if onem == True:
        out['data_dir'] = datadir + '/'
        out['raw_dir'] = datadir + str(mjd) + '/'
        out['plate_dir'] = outdir
        out['star_dir'] = spectro_dir + '/fields/apo1m/'
        out['survey'] = 'apo1m'
        out['name'] = str(names[0]).strip()
        out['fiber'] = stars[0]
        if hmags is not None:
            out['hmag'] = hmags[0]
        out['telliter'] = 1
        if suffix != '':
            out['mjdfrac'] = 1

    # platetype
    if stars is not None:
        out['platetype'] = 'single'
    elif cal == True:
        out['platetype'] = 'cal'
    elif sky == True:
        out['platetype'] = 'sky'
    elif dark == True:
        out['platetype'] = 'dark'
    elif test == True:
        out['platetype'] = 'test'
    else:
        out['platetype'] = 'normal'

    # Note that q3fix is now done in ap3d.pro, not here!!
    if (mjd > 56930) & (mjd < 57600):
        out['q3fix'] = 1

    sdss_path = path.Path()
    rawfile = sdss_path.full('apR', num=ims[0], chip='a', mjd=mjd)
    #rawfile = load.filename('R',chip='a',num=ims[0])
    if os.path.exists(rawfile) == False:
        raise ValueError('Cannot find file ' + rawfile)
    head = fits.getheader(rawfile, 1)
    plateid = head['PLATEID']
    if (ignore == False):
        if (plate != 0) & (plate != plateid):
            raise ValueError('plateid in header does not match plate!')

    # plugmap
    logger.info(str(plugid))
    if plugid is None:
        rawfile = sdss_path.full('apR', num=ims[0], chip='a', mjd=mjd)
        #rawfile = load.filename('R',chip='a',num=ims[0])
        if os.path.exists(rawfile) == True:
            head = fits.getheader(rawfile, 1)
            plugid = head['NAME']
            if type(plugid) is not str:
                plugid = 'header'
        else:
            plugid = 'header'
    logger.info(str(ims[0]))
    logger.info(str(plugid))
    if (cal == False) & (dark == False) & (onem == False):
        tmp = plugid.split('-')
        if os.path.exists(mapper_data + '/' + tmp[1] + '/plPlugMapM-' +
                          plugid + '.par') == False:
            logger.info('Cannot find plugmap file ' + str(plugid))
            #spawn,'"ls" '+mapper_data+'/'+tmp[1]+'/plPlugMapA*'
            if ignore is False:
                raise Exception
        if sky == False:
            logger.info('getting plate data')
            plug = platedata.getdata(plate,
                                     mjd,
                                     plugid=plugid,
                                     noobject=True,
                                     mapper_data=mapper_data,
                                     apred=apred,
                                     telescope=telescope)
            loc = plug['locationid']
            spectro_dir = os.environ['APOGEE_REDUX'] + '/' + apred + '/'
            if os.path.exists(spectro_dir + 'fields/' + telescope + '/' +
                              str(loc)) == False:
                os.makedirs(spectro_dir + 'fields/' + telescope + '/' +
                            str(loc))
            field, survey, program = apload.apfield(plate, plug['locationid'])
            out['survey'] = survey
            out['field'] = field
            with open(
                    spectro_dir + 'fields/' + telescope + '/' + str(loc) +
                    '/plan-' + str(loc) + '.lis', 'w+') as file:
                file.write(telescope + '/' + str(plate) + '/' + str(mjd) +
                           '/' + os.path.basename(planfile))
            file.close()
    out['plugmap'] = plugid

    # Calibration frames to use
    calnames = [
        'det', 'bpm', 'littrow', 'persist', 'persistmodel', 'dark', 'flat',
        'sparse', 'fiber', 'badfiber', 'fixfiber', 'response', 'wave', 'lsf'
    ]
    for c in calnames:
        val = caldata[c]
        if str(val).isdigit(): val = int(val)
        out[c + 'id'] = val
    # We use multiwaveid for waveid
    waveid = caldata['multiwave']
    if str(waveid).isdigit(): waveid = int(waveid)
    out['waveid'] = waveid
    # Input PSFID and FLUXID
    out['psfid'] = psfid
    out['fluxid'] = fluxid

    # object frames
    aplist = []
    for i in range(len(ims)):
        aplist1 = {}
        if ims[i] > 0:
            if stars is not None:
                star = stars[i]
                name = names[i]
            else:
                star = -1
                name = 'none'
        aplist1 = {
            'plateid': plate,
            'mjd': mjd,
            'flavor': 'object',
            'name': ims[i],
            'single': star,
            'singlename': name
        }
        aplist.append(aplist1)
    out['APEXP'] = aplist

    # Write to yaml file
    with open(planfile, 'w') as ofile:
        dum = yaml.dump(out, ofile, default_flow_style=False, sort_keys=False)
    os.chmod(planfile, 0o664)

    return planfile
Ejemplo n.º 7
0
def getfiles(apred, apstar, aspcap, results, cannon, field, aspcapStar=False):

    # construct list of apStar files to normalize and corresponding output files
    root = os.environ['APOGEE_REDUX'] + '/' + apred + '/' + apstar
    sdss_path = path.Path()
    paths = []

    apfieldfile = root + '/' + field + '/apField-' + os.path.basename(
        field) + '.fits'
    try:
        apfield = fits.open(apfieldfile)[1].data
    except:
        print('ERROR reading file', apfieldfile)
        raise
        return
    if aspcapStar:
        root = 'aspcapStar'
    else:
        root = 'apStar'

    for star in apfield['APOGEE_ID']:
        if apfield['TELESCOPE'][0] == 'apo1m':
            infile = sdss_path.full(root + '-1m',
                                    apred=apred,
                                    apstar=apstar,
                                    aspcap=aspcap,
                                    results=results,
                                    prefix='ap',
                                    field=apfield['FIELD'][0],
                                    reduction=star,
                                    telescope=apfield['TELESCOPE'][0])
            outfile = sdss_path.full('cannonStar-1m',
                                     apred=apred,
                                     apstar=apstar,
                                     aspcap=aspcap,
                                     results=results,
                                     cannon=cannon,
                                     field=apfield['FIELD'][0],
                                     reduction=star,
                                     telescope=apfield['TELESCOPE'][0])
        else:
            infile = sdss_path.full(root,
                                    apred=apred,
                                    apstar=apstar,
                                    aspcap=aspcap,
                                    results=results,
                                    prefix='ap',
                                    field=apfield['FIELD'][0],
                                    obj=star,
                                    telescope=apfield['TELESCOPE'][0])
            outfile = sdss_path.full('cannonStar',
                                     apred=apred,
                                     apstar=apstar,
                                     aspcap=aspcap,
                                     results=results,
                                     cannon=cannon,
                                     field=apfield['FIELD'][0],
                                     obj=star,
                                     telescope=apfield['TELESCOPE'][0])
        paths.append((star, infile, outfile))
    return paths
Ejemplo n.º 8
0
def train(planfile,
          skip=1,
          threads=8,
          xh=None,
          model_name=None,
          censor=None,
          sim=False,
          gb=None,
          mh=None):
    '''
    Define training set and train Cannon
    '''

    p = yanny.yanny(planfile, np=True)
    apred = p['apred_vers'].strip("'")
    apstar = getval(p, 'apstar_vers', 'stars').strip("'")
    aspcap_vers = getval(p, 'aspcap_vers', 'aspcap').strip("'")
    results = getval(p, 'results_vers', 'results').strip("'")
    cannon = getval(p, 'cannon_vers', 'cannon_aspcap')
    if model_name is None:
        model_name = getval(p, 'model_name', 'apogee-dr14-giants')
    model_order = int(getval(p, 'model_order', '2'))
    model_scale_factor = float(getval(p, 'model_scale_factor', '1.0'))
    model_regularization = float(getval(p, 'model_regularization', '0.0'))
    threads = int(getval(p, 'ncpus', threads))
    if xh is None: xh = int(getval(p, 'xh', False))
    if censor is None: censor = int(getval(p, 'censor', False))
    logg = getrange(getval(p, 'logg', '-1 3.9'))
    teff = getrange(getval(p, 'teff', '3500 5500'))
    if gb is None: gb = getval(p, 'gb', 0)
    if mh is None: mh = getrange(getval(p, 'mh', '-3. 1.'))
    alpha = getrange(getval(p, 'alpha', '-0.5 1.'))

    # label names
    elems = aspcap.elems()[0]
    #model_labels = ['TEFF','LOGG','M_H']
    model_labels = ['TEFF', 'LOGG', 'M_H', 'ALPHA_M', 'FE_H']
    input_labels = ['TEFF', 'LOGG', 'M_H', 'ALPHA_M', 'FE_H']
    for el in elems:
        d = elem.dr14cal(el)
        if el is not 'Fe' and d['elemfit'] >= 0:
            if xh:
                model_labels.append(el.upper() + '_H')
            else:
                model_labels.append(el.upper() + '_FE')
            input_labels.append(el.upper() + '_FE')

    apl = apload.ApLoad(apred=apred,
                        apstar=apstar,
                        aspcap=aspcap_vers,
                        results=results)

    if sim:
        allstar = fits.open('allStar.fits')[1].data
        gd = apselect.select(allstar,
                             logg=logg,
                             teff=teff,
                             mh=mh,
                             alpha=alpha,
                             sn=[100, 10000])
        model_labels = ['TEFF', 'LOGG', 'M_H']
        input_labels = ['TEFF', 'LOGG', 'M_H']
        model_labels = sim
        input_labels = sim
        if gb:
            gd2 = np.where(
                np.abs((allstar['TEFF'][gd] - 3500) * 4 / 2000. -
                       allstar['LOGG'][gd]) < float(gb))[0]
            gd = gd[gd2]
    else:
        allstar = apl.allStar()[1].data
        gd = apselect.select(
            allstar,
            badval=['STAR_BAD'],
            sn=[100, 10000],
            logg=logg,
            teff=teff,
            mh=mh,
            alpha=alpha,
            badstar=['PERSIST_HIGH', 'PERSIST_MED', 'PERSIST_LOW'],
            gb=gb)

        gcstars = ascii.read(os.environ['IDLWRAP_DIR'] +
                             '/data/gc_szabolcs.dat')
        bd = np.where(gcstars['pop'] != 1)[0]
        jc = [
            x for x in gd if allstar[x]['APOGEE_ID'] not in gcstars['id'][bd]
        ]
        gd = jc

        # down select stars using HR+[M/H] sampling
        i1, i2 = cal.hrsample(allstar, allstar[gd], raw=False)
        # make sure all labels are good
        gd = []
        for i in i1:
            good = True
            for label in input_labels:
                # special handling for NA in DR14
                if label == 'NA_FE' and allstar[label][i] < -5 and allstar[
                        'FE_H'][i] < -1:
                    allstar[label][i] = 0.
                if allstar[label][i] < -5:
                    good = False
                    print('reject', allstar['APOGEE_ID'][i], label,
                          allstar[label][i])
                    break
            if good: gd.append(i)

    print('selected ', len(gd), ' training set stars')
    root = os.environ['APOGEE_ASPCAP'] + '/' + apred + '/' + cannon + '/'
    training_set = os.path.join(root,
                                "{}-training-set.fits".format(model_name))
    if not os.path.exists(os.path.dirname(training_set)):
        os.makedirs(os.path.dirname(training_set))
    struct.wrfits(np.array(allstar[gd]), training_set)

    # The label names to use in the model.

    model_filename = os.path.join(root, "{}.model".format(model_name))
    initial_filename = os.path.join(root, "{}.initial".format(model_name))

    clobber_model = True
    labelled_set = Table.read(training_set)[0:-1:skip]
    N_labelled = len(labelled_set)
    if xh:
        for el in elems:
            d = elem.dr14cal(el)
            if el is not 'Fe' and d['elemfit'] >= 0:
                labelled_set[el.upper() +
                             '_H'] = labelled_set[el.upper() +
                                                  '_FE'] + labelled_set['FE_H']

    # TODO: something's wrong with our dispersion that we extracted.
    #with open(os.path.join(CANNON_DATA_DIR, "dispersion.pkl"), "rb") as fp:
    #    dispersion = pickle.load(fp)
    #P = dispersion.size
    dispersion = None
    P = 8575  # MAGIC

    # These defaults (flux = 1, ivar = 0) will mean that even if we don't find a
    # spectrum for a single star in the training set, then that star will just have
    # no influence on the training (since ivar = 0 implies infinite error on flux).

    normalized_flux = np.ones((N_labelled, P), dtype=float)
    normalized_ivar = np.zeros((N_labelled, P), dtype=float)

    # Enable logging.
    logger = logging.getLogger("apogee.dr14.tc")
    logger.setLevel(logging.INFO)

    handler = logging.StreamHandler()
    handler.setFormatter(
        logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
    logger.addHandler(handler)

    sdss_path = path.Path()
    ngd = 0
    for i, row in enumerate(labelled_set):

        logger.info("Reading labelled set spectra ({}/{})".format(
            i + 1, N_labelled))
        if row['TELESCOPE'] == 'apo1m':
            filename = sdss_path.full('cannonStar-1m',
                                      apred=apred,
                                      apstar=apstar,
                                      aspcap=aspcap_vers,
                                      results=results,
                                      cannon=cannon,
                                      field=row['FIELD'],
                                      reduction=row['REDUCTION_ID'],
                                      telescope=row['TELESCOPE'])
        else:
            filename = sdss_path.full('cannonStar',
                                      apred=apred,
                                      apstar=apstar,
                                      aspcap=aspcap_vers,
                                      results=results,
                                      cannon=cannon,
                                      field=row['FIELD'],
                                      obj=row['APOGEE_ID'],
                                      telescope=row['TELESCOPE'])

        if not os.path.exists(filename):
            logger.warn("Could not find filename for labelled set star {}: {}"\
                .format(row["APOGEE_ID"], filename))
            continue

        with open(filename, "rb") as fp:
            #flux, ivar = pickle.load(fp)
            metadata, data = pickle.load(fp)
            flux, ivar = data

        if (np.isfinite(flux).all()) & (np.isfinite(ivar).all()):
            normalized_flux[i, :] = flux
            normalized_ivar[i, :] = ivar
        else:
            print('non-finite values in', row['APOGEE_ID'])
            normalized_flux[i, :] = 0.
            normalized_ivar[i, :] = 0.
            #pdb.set_trace()

    # TODO: Cache the normalized_flux and normalized_ivar into a single file so that
    #       it is faster to read in next time?
    assert  np.isfinite(normalized_flux).all(), \
            "Non-finite values in normalized_flux!"
    assert  np.isfinite(normalized_ivar).all(), \
            "Non-finite values in normalized_ivar!"

    # Exclude labelled set stars where there is no spectrum, only because it
    # will get annoying later on when we are doing 1-to-1 and cross-validation
    keep = np.any(normalized_ivar > 0, axis=1)
    if not np.all(keep):
        logger.info(
            "Excluding {} labelled set stars where there was no information in "
            "the spectrum".format(np.sum(~keep)))
        labelled_set = labelled_set[keep]
        normalized_flux = normalized_flux[keep]
        normalized_ivar = normalized_ivar[keep]

    # Construct and train a model. #
    model = tc.L1RegularizedCannonModel(labelled_set,
                                        normalized_flux,
                                        normalized_ivar,
                                        dispersion,
                                        threads=threads)

    model.vectorizer = tc.vectorizer.NormalizedPolynomialVectorizer(
        labelled_set,
        tc.vectorizer.polynomial.terminator(model_labels, model_order),
        scale_factor=model_scale_factor)

    if censor:
        for label in model_labels:
            for el in elems:
                d = elem.dr14cal(el)
                if xh:
                    lab = el.upper() + '_H'
                else:
                    lab = el.upper() + '_FE'
                if lab == label:
                    model.censors[label] = getcensor(
                        el,
                        maskdir=os.environ['SPECLIB_DIR'] +
                        '/lib/filters_26042016/',
                        length=P)
                    print(
                        label,
                        getcensor(el,
                                  maskdir=os.environ['SPECLIB_DIR'] +
                                  '/lib/filters_26042016/'))

    model.s2 = 0
    model.regularization = model_regularization

    model.train()
    model._set_s2_by_hogg_heuristic()

    model.save(model_filename,
               include_training_data=False,
               overwrite=clobber_model)
    model.save(model_filename + '.full',
               include_training_data=True,
               overwrite=clobber_model)

    # Make some 1-to-1 plots just to show sensible behaviour.
    #X = model.labels_array()
    X = model.labels_array
    Y = model.fit(model.normalized_flux, model.normalized_ivar)
    out = Table(
        np.transpose([
            np.mean(model.labels_array, axis=0),
            np.nanmean(Y - X, axis=0),
            np.nanstd(Y - X, axis=0),
            np.array(model_labels)
        ]))
    out.write(initial_filename, overwrite=True, format='ascii')
    #np.savetxt(initial_filename, [np.mean(model.labels_array, axis=0).reshape(-1, 1), np.nanmean(Y-X,axis=0), np.nanstd(Y-X,axis=0), model_labels] )

    try:
        os.makedirs(os.path.join(root, 'plots'))
    except:
        pass
    it = model_labels.index('TEFF')
    ig = model_labels.index('LOGG')
    iz = model_labels.index('M_H')

    def plotit(ax, x, y, z, label):
        plots.plotc(ax, x, y - x, z, xt=label, yt='inferred-labelled')
        lims = ax.get_xlim()
        ax.plot(lims, [0., 0.], c="#666666", zorder=-1, linestyle=":")
        mean, rms = np.nanmean(y - x), np.nanstd(y - x)
        title = "{}: ({:.2f}, {:.2f})".format(label, mean, rms)
        ax.set_title(title)

    fig, ax = plots.multi(2, 3)
    plotit(ax[0, 0], X[:, it], Y[:, it], X[:, iz], 'TEFF')
    plotit(ax[0, 1], X[:, ig], Y[:, ig], X[:, it], 'LOGG')
    plotit(ax[1, 0], X[:, iz], Y[:, iz], X[:, it], 'M_H')
    plots.plotc(ax[1, 1],
                X[:, it],
                X[:, ig],
                X[:, iz],
                xr=[6000, 3500],
                yr=[5, -0.5],
                xt='TEFF',
                yt='LOGG')
    gd = np.where(model.normalized_ivar.flatten() > 1)[0]
    diff = np.abs(model.normalized_flux - model.predict(Y))
    ax[2, 0].hist(diff.flatten()[gd],
                  cumulative=True,
                  normed=True,
                  bins=10.**np.arange(-8, 0, 0.05),
                  histtype='step')
    ax[2, 0].set_xlabel('|Model-true|')
    ax[2, 0].set_ylim(0., 1.)
    ax[2, 0].set_xscale('log')

    figure_path = os.path.join(root, "plots/{}-1to1.png".format(model_name))
    fig.tight_layout()
    fig.savefig(figure_path, dpi=300)
    plt.close()

    for i, label_name in enumerate(model_labels):

        x = X[:, i]
        y = Y[:, i]

        fig, ax = plt.subplots()
        if label_name == 'TEFF':
            ax.scatter(x, y, c=X[:, iz], alpha=0.5)
        else:
            ax.scatter(x, y, c=X[:, it], alpha=0.5)

        lims = np.array([ax.get_xlim(), ax.get_ylim()])
        lims = (lims.min(), lims.max())
        ax.plot(lims, lims, c="#666666", zorder=-1, linestyle=":")
        ax.set_xlim(lims)
        ax.set_ylim(lims)

        ax.set_xlabel("Labelled")
        ax.set_ylabel("Inferred")

        mean, rms = np.nanmean(y - x), np.nanstd(y - x)
        title = "{}: ({:.2f}, {:.2f})".format(label_name, mean, rms)
        ax.set_title(title)
        logger.info("Mean and RMS for {}".format(title))

        figure_path = os.path.join(
            root, "plots/{}-{}-1to1.png".format(model_name, label_name))
        fig.tight_layout()
        fig.savefig(figure_path, dpi=300)
        plt.close()

        logger.info("Created 1-to-1 figure for {} at {}".format(
            label_name, figure_path))