예제 #1
0
    def _on_select(self, xmin, xmax):
        wvln = self._ui['textbox'].text().strip()

        if wvln == '':
            self._ui['textbox'].setText('Error: please enter a wavelength value before selecting')
            return

        wave_val = float(wvln)

        # if line_list specified, find closest line from list:
        if self.line_list is not None:
            absdiff = np.abs(self.line_list - wave_val)
            idx = absdiff.argmin()
            if absdiff[idx] > 1.:
                logger.error("Couldn't find precise line corresponding to "
                             "input {:.3f}".format(wave_val))
                return

            logger.info("Snapping input wavelength {:.3f} to line list "
                        "value {:.3f}".format(wave_val, self.line_list[idx]))
            wave_val = self.line_list[idx]
            self._done_wavel_idx.append(idx)

        # line_props, line_cov = self.get_line_props(xmin, xmax)
        line_props,_ = self.get_line_props(xmin, xmax)
        if line_props is None:
            return

        self.draw_line_marker(line_props, wave_val, xmin, xmax)

        self.fig.suptitle('')
        plt.draw()
        self.fig.canvas.draw()

        self._map_dict['wavel'].append(wave_val)
        self._map_dict['pixel'].append(line_props['x0'])
예제 #2
0
def main(proc_path, overwrite=False):
    """ """

    proc_path = path.realpath(path.expanduser(proc_path))
    if not path.exists(proc_path):
        raise IOError("Path '{}' doesn't exist".format(proc_path))

    if path.isdir(proc_path):
        data_file = None
        logger.info("Reading data from path: {}".format(proc_path))

    elif path.isfile(proc_path):
        data_file = proc_path
        base_path, name = path.split(proc_path)
        proc_path = base_path
        logger.info("Reading file: {}".format(data_file))

    else:
        raise RuntimeError("how?!")

    # file to keep track of RV measurements
    rv_file = path.join(proc_path, 'rvs.ecsv')
    if path.exists(rv_file):
        rv_tbl = Table.read(rv_file, format='ecsv')
    else:
        rv_tbl = None

    # ========================
    # Compute wavelength grids
    # ========================

    if data_file is not None:  # filename passed - only operate on that
        solve_radial_velocity(data_file, coef, done_list=None, plot=True)

    else:  # a path was passed - operate on all 1D extracted files
        proc_ic = GlobImageFileCollection(proc_path, glob_include='1d_proc_*')
        logger.info("{} 1D extracted spectra found".format(len(proc_ic.files)))

        logger.info("Beginning wavelength calibration...")
        for base_fname in proc_ic.files_filtered(imagetyp='OBJECT'):
            fname = path.join(proc_ic.location, base_fname)
            solve_radial_velocity(fname)
예제 #3
0
def main(db_path, run_root_path, drop_all=False, overwrite=False, **kwargs):

    # Make sure the specified paths actually exist
    db_path = path.abspath(db_path)
    run_root_path = path.abspath(run_root_path)
    for path_ in [path.dirname(db_path), run_root_path]:
        if not path.exists(path_):
            raise ValueError("Path '{0}' doesn't exist!".format(path_))

    # --------------------------------------------------------------------------
    # These are relative paths, so the script needs to be run from the
    #   scripts path...

    # ID table for mapping group index to TGAS row
    ID_tbl = Table.read('../data/star_identifier.csv')

    # TGAS table
    logger.debug("Loading TGAS data...")
    tgas = Table.read('../../gaia-comoving-stars/data/stacked_tgas.fits')

    # Catalog of velocities for Bensby's HIP stars:
    bensby = Table.read('../data/bensbyrv_bestunique.csv')

    # --------------------------------------------------------------------------

    # connect to the database
    engine = db_connect(db_path, ensure_db_exists=True)
    # engine.echo = True
    logger.debug("Connected to database at '{}'".format(db_path))

    if drop_all: # remove all tables and replace
        Base.metadata.drop_all()
        Base.metadata.create_all()

    # create a new session for interacting with the database
    session = Session()

    logger.debug("Loading SpectralLineInfo table")

    line_info = OrderedDict()
    # air wavelength of Halpha -- wavelength calibration from comp lamp is done
    #   at air wavelengths, so this is where Halpha should be, right?
    line_info['Halpha'] = 6562.8*u.angstrom

    # [OI] emission lines -- wavelengths from:
    #   http://physics.nist.gov/PhysRefData/ASD/lines_form.html
    line_info['[OI] 5577'] = 5577.3387*u.angstrom
    line_info['[OI] 6300'] = 6300.304*u.angstrom
    line_info['[OI] 6364'] = 6363.776*u.angstrom

    for name, wvln in line_info.items():
        n = session.query(SpectralLineInfo).filter(SpectralLineInfo.name == name).count()
        if n == 0:
            logger.debug('Loading line {0} at {1}'.format(name, wvln))
            line = SpectralLineInfo(name=name, wavelength=wvln)
            session.add(line)
            session.commit()
        else:
            logger.debug('Line {0} already loaded'.format(name))

    # Create an entry for this observing run
    data_path, run_name = path.split(run_root_path)
    logger.info("Path to night paths: {0}".format(data_path))
    n = session.query(Run).filter(Run.name == run_name).count()
    if n == 0:
        logger.debug('Adding run {0} to database'.format(run_name))
        run = Run(name=run_name)
        session.add(run)
        session.commit()

    elif n == 1:
        logger.debug('Loading run from database'.format(run_name))
        run = session.query(Run).filter(Run.name == run_name).limit(1).one()

    else:
        raise RuntimeError("F**k.")

    # Now we need to go through each processed night of data and load all of the
    # relevant observations of sources.

    # First we get the column names for the Observation and TGASSource tables
    obs_columns = [str(c).split('.')[1] for c in Observation.__table__.columns]
    tgassource_columns = [str(c).split('.')[1]
                          for c in TGASSource.__table__.columns]

    # Here's where there's a bit of hard-coded bewitchery - the nights (within
    # each run) have to be labeled 'n1', 'n2', and etc. Sorry.
    glob_pattr_proc = path.join(data_path, 'processed', run_name, 'n?')
    for proc_night_path in glob.glob(glob_pattr_proc):
        night = path.basename(proc_night_path)
        night_id = int(night[1])
        logger.debug('Loading night {0}...'.format(night_id))

        observations = []
        tgas_sources = []
        prior_rvs = []

        glob_pattr_1d = path.join(proc_night_path, '1d_*.fit')
        for path_1d in ProgressBar(glob.glob(glob_pattr_1d)):
            hdr = fits.getheader(path_1d)

            # skip all except OBJECT observations
            if hdr['IMAGETYP'] != 'OBJECT':
                continue

            basename = path.basename(path_1d)[3:]
            logger.log(1, 'loading row for {0}'.format(basename))

            kw = dict()

            # construct filenames using hard-coded bullshit
            kw['filename_raw'] = basename
            kw['filename_p'] = 'p_' + basename
            kw['filename_1d'] = '1d_' + basename

            # check if this filename is already in the database, if so, drop it
            base_query = session.query(Observation)\
                                .filter(Observation.filename_raw == kw['filename_raw'])
            already_loaded = base_query.count() > 0

            if already_loaded and overwrite:
                base_query.delete()
                session.commit()

            elif already_loaded:
                logger.debug('Object {0} [{1}] already loaded'
                             .format(hdr['OBJECT'],
                                     path.basename(kw['filename_raw'])))
                continue

            # read in header of 1d file and store keywords that exist as columns
            kw.update(fits_header_to_cols(hdr, obs_columns))

            # HACK: skip empty object name
            if len(str(hdr['OBJECT'])) == 0:
                logger.warning('SKIPPING - empty OBJECT key')
                continue

            # get group id from object name
            if '-' in str(hdr['OBJECT']):
                # Per APW and SMOH's convention

                split_name = hdr['OBJECT'].split('-')
                kw['group_id'] = int(split_name[0])

                # because: reasons
                if kw['group_id'] == 10:
                    tgas_row_idx = int(split_name[1])
                else:
                    smoh_idx = int(split_name[1])
                    tgas_row_idx = ID_tbl[smoh_idx]['tgas_row']
                tgas_row = tgas[tgas_row_idx]

                # query Simbad to get all possible names for this target
                if tgas_row['hip'] > 0:
                    object_name = 'HIP{0}'.format(tgas_row['hip'])
                else:
                    object_name = 'TYC {0}'.format(tgas_row['tycho2_id'])
                logger.log(1, 'common name: {0}'.format(object_name))

                try:
                    all_ids = Simbad.query_objectids(object_name)['ID'].astype(str)
                except Exception as e:
                    logger.warning('Simbad query_objectids failed for "{0}" '
                                   'with error: {1}'
                                   .format(object_name, str(e)))
                    all_ids = []

                logger.log(1, 'this is a group object')

                if len(all_ids) > 0:
                    logger.log(1, 'other names for this object: {0}'
                               .format(', '.join(all_ids)))
                else:
                    logger.log(1, 'simbad names for this object could not be '
                               'retrieved')

            elif (isinstance(hdr['OBJECT'], int) or
                    str(hdr['OBJECT']).startswith('k') or
                    hdr['OBJECT'][0].isdigit()):
                # Assume it's a KIC number - per Ruth and Dan's convention

                if isinstance(hdr['OBJECT'], int):
                    object_name = 'KIC {0:d}'.format(hdr['OBJECT'])

                elif hdr['OBJECT'].startswith('k'):
                    object_name = 'KIC {0}'.format(hdr['OBJECT'][1:])

                else:
                    object_name = 'KIC {0}'.format(hdr['OBJECT'])

                # query Simbad to get all possible names for this target
                logger.log(1, 'common name: {0}'.format(object_name))

                try:
                    all_ids = Simbad.query_objectids(object_name)['ID'].astype(str)
                except Exception as e:
                    logger.warning('Simbad query_objectids failed for "{0}" '
                                   'with error: {1}'
                                   .format(object_name, str(e)))
                    all_ids = []

                logger.log(1, 'this is a KIC object')

                if len(all_ids) > 0:
                    logger.log(1, 'other names for this object: {0}'
                               .format(', '.join(all_ids)))
                else:
                    logger.log(1, 'simbad names for this object could not be '
                               'retrieved')

                # get the Tycho 2 ID, if it has one
                hip_id = [id_ for id_ in all_ids if 'HIP' in id_]
                tyc_id = [id_ for id_ in all_ids if 'TYC' in id_]
                if hip_id:
                    hip_id = int(hip_id[0].replace('HIP', '').strip())
                    logger.log(1, 'source has HIP id: {0}'.format(hip_id))
                    tgas_row_idx = np.where(tgas['hip'] == hip_id)[0]

                    if len(tgas_row_idx) == 0:
                        tgas_row_idx = None
                    else:
                        tgas_row = tgas[tgas_row_idx]

                elif tyc_id:
                    tyc_id = tyc_id[0].replace('TYC', '').strip()
                    logger.log(1, 'source has tycho 2 id: {0}'.format(tyc_id))
                    tgas_row_idx = np.where(tgas['tycho2_id'] == tyc_id)[0]

                    if len(tgas_row_idx) == 0:
                        tgas_row_idx = None
                    else:
                        tgas_row = tgas[tgas_row_idx]

                else:
                    logger.log(1, 'source has no HIP or TYC id.')
                    tgas_row_idx = None

                # result_table = Simbad.query_object(object_name)

            else:
                object_name = hdr['OBJECT']
                logger.log(1, 'common name: {0}'.format(object_name))
                logger.log(1, 'this is not a group object')

                # query Simbad to get all possible names for this target
                try:
                    all_ids = Simbad.query_objectids(object_name)['ID'].astype(str)
                except Exception as e:
                    logger.warning('SKIPPING: Simbad query_objectids failed for '
                                   '"{0}" with error: {1}'
                                   .format(object_name, str(e)))
                    continue

                # get the Tycho 2 ID, if it has one
                hip_id = [id_ for id_ in all_ids if 'HIP' in id_]
                tyc_id = [id_ for id_ in all_ids if 'TYC' in id_]
                if hip_id:
                    hip_id = int(hip_id[0].replace('HIP', '').strip())
                    logger.log(1, 'source has HIP id: {0}'.format(hip_id))
                    tgas_row_idx = np.where(tgas['hip'] == hip_id)[0]

                    if len(tgas_row_idx) == 0:
                        tgas_row_idx = None
                    else:
                        tgas_row = tgas[tgas_row_idx]

                elif tyc_id:
                    tyc_id = tyc_id[0].replace('TYC', '').strip()
                    logger.log(1, 'source has tycho 2 id: {0}'.format(tyc_id))
                    tgas_row_idx = np.where(tgas['tycho2_id'] == tyc_id)[0]

                    if len(tgas_row_idx) == 0:
                        tgas_row_idx = None
                    else:
                        tgas_row = tgas[tgas_row_idx]

                else:
                    logger.log(1, 'source has no tycho 2 id.')
                    tgas_row_idx = None

            # store relevant names / IDs
            simbad_info_kw = dict()
            for id_ in all_ids:
                if id_.lower().startswith('hd'):
                    simbad_info_kw['hd_id'] = id_[2:]

                elif id_.lower().startswith('hip'):
                    simbad_info_kw['hip_id'] = id_[3:]

                elif id_.lower().startswith('tyc'):
                    simbad_info_kw['tyc_id'] = id_[3:]

                elif id_.lower().startswith('2mass'):
                    simbad_info_kw['twomass_id'] = id_[5:]

            for k,v in simbad_info_kw.items():
                simbad_info_kw[k] = v.strip()

            simbad_info = SimbadInfo(**simbad_info_kw)

            # Compute barycenter velocity given coordinates of where the
            # telescope was pointing and observation time
            t = Time(hdr['JD'], format='jd', scale='utc')
            sc = coord.SkyCoord(ra=hdr['RA'], dec=hdr['DEC'],
                                unit=(u.hourangle, u.degree))
            kw['v_bary'] = bary_vel_corr(t, sc, location=kitt_peak)

            obs = Observation(night=night_id, **kw)
            obs.run = run

            # Get the TGAS data if the source is in TGAS
            if tgas_row_idx is not None:
                logger.log(1, 'TGAS row: {0}'.format(tgas_row_idx))

                tgas_kw = dict()
                tgas_kw['row_index'] = tgas_row_idx
                for name in tgas.colnames:
                    if name in tgassource_columns:
                        tgas_kw[name] = tgas_row[name]

                job = Gaia.launch_job(gaia_query.format(tgas_kw['source_id'][0]),
                                      dump_to_file=False)
                res = job.get_results()

                if len(res) == 0:
                    logger.warning("No 2MASS data found for: {0}"
                                   .format(tgas_kw['source_id']))

                elif len(res) == 1:
                    tgas_kw['J'] = res['j_m'][0]
                    tgas_kw['J_err'] = res['j_msigcom'][0]
                    tgas_kw['H'] = res['h_m'][0]
                    tgas_kw['H_err'] = res['h_msigcom'][0]
                    tgas_kw['Ks'] = res['ks_m'][0]
                    tgas_kw['Ks_err'] = res['ks_msigcom'][0]

                tgas_source = TGASSource(**tgas_kw)
                tgas_sources.append(tgas_source)

                obs.tgas_source = tgas_source

            else:
                logger.log(1, 'TGAS row could not be found.')

            obs.simbad_info = simbad_info
            observations.append(obs)

            # retrieve a previous measurement from the literature
            result = get_best_rv(obs)
            if result is not None:
                rv, rv_err, rv_qual, rv_bibcode, rv_source = result

                prv = PriorRV(rv=rv*u.km/u.s, err=rv_err*u.km/u.s,
                              qual=rv_qual, bibcode=rv_bibcode,
                              source=rv_source)
                obs.prior_rv = prv
                prior_rvs.append(prv)

            logger.log(1, '-'*68)

        session.add_all(observations)
        session.add_all(tgas_sources)
        session.add_all(prior_rvs)
        session.commit()

    # Last thing to do is cross-match with the Bensby catalog to
    #   replace velocities when they are better
    for sim_info in session.query(SimbadInfo)\
                           .filter(SimbadInfo.hip_id != None).all():
        hip_id = 'HIP' + str(sim_info.hip_id)
        row = bensby[bensby['OBJECT'] == hip_id]
        if len(row) > 0:
            sim_info.rv = row['velValue']
            sim_info.rv_qual = row['quality']
            sim_info.rv_bibcode = row['bibcode']
            session.flush()

    session.close()
예제 #4
0
def main(db_path,
         run_name,
         data_root_path=None,
         filename=None,
         overwrite=False,
         pool=None):

    if pool is None:
        pool = schwimmbad.SerialPool()

    # connect to the database
    engine = db_connect(db_path)
    # engine.echo = True
    logger.debug("Connected to database at '{}'".format(db_path))

    # create a new session for interacting with the database
    session = Session()

    root_path, _ = path.split(db_path)
    if data_root_path is None:
        data_root_path = root_path

    plot_path = path.join(root_path, 'plots', run_name)
    if not path.exists(plot_path):
        os.makedirs(plot_path, exist_ok=True)

    # TODO: there might be some bugs here...
    n_lines = session.query(SpectralLineInfo).count()
    Halpha = session.query(SpectralLineInfo)\
                    .filter(SpectralLineInfo.name == 'Halpha').one()
    OI_lines = session.query(SpectralLineInfo)\
                      .filter(SpectralLineInfo.name.contains('[OI]')).all()

    if filename is None:  # grab all unfinished sources
        observations = session.query(Observation).join(Run)\
                              .filter(Run.name == run_name).all()

    else:  # only process the observation corresponding to this filename
        observations = session.query(Observation).join(Run)\
                              .filter(Run.name == run_name)\
                              .filter(Observation.filename_raw == filename).all()

    for obs in observations:
        measurements = session.query(SpectralLineMeasurement)\
                              .join(Observation)\
                              .filter(Observation.id == obs.id).all()

        if len(measurements) == n_lines and not overwrite:
            logger.debug('All line measurements already complete for object '
                         '{0} in file {1}'.format(obs.object,
                                                  obs.filename_raw))
            continue

        # Read the spectrum data and get wavelength solution
        filebase, _ = path.splitext(obs.filename_1d)
        filename_1d = obs.path_1d(data_root_path)
        spec = Table.read(filename_1d)
        logger.debug('Loaded 1D spectrum for object {0} from file {1}'.format(
            obs.object, filename_1d))

        # Extract region around Halpha
        x, (flux, ivar) = extract_region(
            spec['wavelength'],
            center=Halpha.wavelength.value,
            width=100,
            arrs=[spec['source_flux'], spec['source_ivar']])

        # We start by doing maximum likelihood estimation to fit the line, then
        # use the best-fit parameters to initialize an MCMC run.
        # TODO: need to figure out if it's emission or absorption...for now just
        #   assume absorption
        absorp_emiss = -1.
        lf = VoigtLineFitter(x, flux, ivar, absorp_emiss=absorp_emiss)
        lf.fit()
        fit_pars = lf.get_gp_mean_pars()

        if (not lf.success
                or abs(fit_pars['x0'] - Halpha.wavelength.value) > 16.
                or  # 16 Å = ~700 km/s
                abs(fit_pars['amp']) < 10):  # minimum amplitude - MAGIC NUMBER
            # TODO: should try again with emission line
            logger.error('absorption line has tiny amplitude! did '
                         'auto-determination of absorption/emission fail?')
            # TODO: what now?
            continue

        fig = lf.plot_fit()
        fig.savefig(path.join(plot_path, '{}_maxlike.png'.format(filebase)),
                    dpi=256)
        plt.close(fig)

        # ----------------------------------------------------------------------

        # Run `emcee` instead to sample over GP model parameters:
        if fit_pars['std_G'] < 1E-2:
            lf.gp.freeze_parameter('mean:ln_std_G')

        initial = np.array(lf.gp.get_parameter_vector())
        if initial[4] < -10:  # TODO: ???
            initial[4] = -8.
        if initial[5] < -10:  # TODO: ???
            initial[5] = -8.
        ndim, nwalkers = len(initial), 64

        sampler = emcee.EnsembleSampler(nwalkers,
                                        ndim,
                                        log_probability,
                                        pool=pool,
                                        args=(lf.gp, flux))

        logger.debug("Running burn-in...")
        p0 = initial + 1e-6 * np.random.randn(nwalkers, ndim)
        p0, lp, _ = sampler.run_mcmc(p0, 128)

        logger.debug("Running 2nd burn-in...")
        sampler.reset()
        p0 = p0[lp.argmax()] + 1e-3 * np.random.randn(nwalkers, ndim)
        p0, lp, _ = sampler.run_mcmc(p0, 512)

        logger.debug("Running production...")
        sampler.reset()
        pos, lp, _ = sampler.run_mcmc(p0, 1024)

        fit_kw = dict()
        for i, par_name in enumerate(lf.gp.get_parameter_names()):
            if 'kernel' in par_name: continue

            # remove 'mean:'
            par_name = par_name[5:]

            # skip bg
            if par_name.startswith('bg'): continue

            samples = sampler.flatchain[:, i]

            if par_name.startswith('ln_'):
                par_name = par_name[3:]
                samples = np.exp(samples)

            MAD = np.median(np.abs(samples - np.median(samples)))
            fit_kw[par_name] = np.median(samples)
            fit_kw[par_name + '_error'] = 1.5 * MAD  # convert to ~stddev

        # remove all previous line measurements
        q = session.query(SpectralLineMeasurement).join(Observation)\
                   .filter(Observation.id == obs.id)
        if q.count() > 0:
            for meas in q.all():
                session.delete(meas)
            session.commit()

        slm = SpectralLineMeasurement(**fit_kw)
        slm.info = Halpha
        slm.observation = obs
        session.add(slm)
        session.commit()

        # --------------------------------------------------------------------
        # plot MCMC traces
        fig, axes = plt.subplots(2, 4, figsize=(18, 6))
        for i in range(sampler.dim):
            for walker in sampler.chain[..., i]:
                axes.flat[i].plot(walker,
                                  marker='',
                                  drawstyle='steps-mid',
                                  alpha=0.2)
            axes.flat[i].set_title(lf.gp.get_parameter_names()[i], fontsize=12)
        fig.tight_layout()
        fig.savefig(path.join(plot_path, '{}_mcmc_trace.png'.format(filebase)),
                    dpi=256)
        plt.close(fig)
        # --------------------------------------------------------------------

        # --------------------------------------------------------------------
        # plot samples
        fig, axes = plt.subplots(3, 1, figsize=(10, 10), sharex=True)

        samples = sampler.flatchain
        for s in samples[np.random.randint(len(samples), size=32)]:
            lf.gp.set_parameter_vector(s)
            lf.plot_fit(axes=axes, fit_alpha=0.2)

        fig.tight_layout()
        fig.savefig(path.join(plot_path, '{}_mcmc_fits.png'.format(filebase)),
                    dpi=256)
        plt.close(fig)
        # --------------------------------------------------------------------

        # --------------------------------------------------------------------
        # corner plot
        fig = corner.corner(
            sampler.flatchain[::10, :],
            labels=[x.split(':')[1] for x in lf.gp.get_parameter_names()])
        fig.savefig(path.join(plot_path, '{}_corner.png'.format(filebase)),
                    dpi=256)
        plt.close(fig)
        # --------------------------------------------------------------------

        # compute centroids for sky lines
        sky_centroids = []
        for j, sky_line in enumerate(OI_lines):
            wvln = sky_line.wavelength.value
            x, (flux, ivar) = extract_region(
                spec['wavelength'],
                center=wvln,
                width=32.,  # angstroms
                arrs=[spec['background_flux'], spec['background_ivar']])

            lf = GaussianLineFitter(x, flux, ivar,
                                    absorp_emiss=1.)  # all emission lines

            try:
                lf.fit()
                fit_pars = lf.get_gp_mean_pars()

            except Exception as e:
                logger.warn("Failed to fit sky line {0}:\n{1}".format(
                    sky_line, e))
                lf.success = False
                fit_pars = lf.get_init()
                # OMG this is the biggest effing hack
                fit_pars['amp'] = 0.
                fit_pars['bg_coef'] = None
                fit_pars['x0'] = 0.

            # HACK: hackish signal-to-noise
            max_ = fit_pars['amp'] / np.sqrt(2 * np.pi * fit_pars['std']**2)
            SNR = max_ / np.median(1 / np.sqrt(ivar))

            if (not lf.success or abs(fit_pars['x0'] - wvln) > 4
                    or fit_pars['amp'] < 10 or fit_pars['std'] > 4
                    or SNR < 2.5):
                # failed
                x0 = np.nan * u.angstrom
                title = 'f****d'
                fit_pars['amp'] = 0.

            else:
                x0 = fit_pars['x0'] * u.angstrom
                title = '{:.2f}'.format(fit_pars['amp'])

            if lf.success:
                fig = lf.plot_fit()
                fig.suptitle(title, y=0.95)
                fig.subplots_adjust(top=0.8)
                fig.savefig(path.join(
                    plot_path,
                    '{}_maxlike_sky_{:.0f}.png'.format(filebase, wvln)),
                            dpi=256)
                plt.close(fig)

            # store the sky line measurements
            fit_pars['std_G'] = fit_pars.pop('std')  # HACK
            fit_pars.pop('bg_coef')  # HACK
            slm = SpectralLineMeasurement(**fit_pars)
            slm.info = sky_line
            slm.observation = obs
            session.add(slm)
            session.commit()

            sky_centroids.append(x0)
        sky_centroids = u.Quantity(sky_centroids)

        logger.info('{} [{}]: x0={x0:.3f} σ={err:.3f}\n--------'.format(
            obs.object, filebase, x0=fit_kw['x0'], err=fit_kw['x0_error']))

        session.commit()

    pool.close()
예제 #5
0
    args = parser.parse_args()

    # Set logger level based on verbose flags
    if args.verbosity != 0:
        if args.verbosity == 1:
            logger.setLevel(logging.DEBUG)
        else:  # anything >= 2
            logger.setLevel(1)

    elif args.quietness != 0:
        if args.quietness == 1:
            logger.setLevel(logging.WARNING)
        else:  # anything >= 2
            logger.setLevel(logging.ERROR)

    else:  # default
        logger.setLevel(logging.INFO)

    if args.seed is not None:
        np.random.seed(args.seed)

    pool = choose_pool(mpi=args.mpi, processes=args.n_cores)
    logger.info("Using pool: {}".format(pool.__class__))

    main(db_path=args.db_path,
         data_root_path=args.data_root_path,
         run_name=args.run_name,
         filename=args.filename,
         overwrite=args.overwrite,
         pool=pool)
예제 #6
0
def main(night_path, skip_list_file, mask_file, overwrite=False, plot=False):
    """
    See argparse block at bottom of script for description of parameters.
    """

    night_path = path.realpath(path.expanduser(night_path))
    if not path.exists(night_path):
        raise IOError("Path '{}' doesn't exist".format(night_path))
    logger.info("Reading data from path: {}".format(night_path))

    base_path, night_name = path.split(night_path)
    data_path, run_name = path.split(base_path)
    output_path = path.realpath(
        path.join(data_path, 'processed', run_name, night_name))
    os.makedirs(output_path, exist_ok=True)
    logger.info("Saving processed files to path: {}".format(output_path))

    if plot:  # if we're making plots
        plot_path = path.realpath(path.join(output_path, 'plots'))
        logger.debug("Will make and save plots to: {}".format(plot_path))
        os.makedirs(plot_path, exist_ok=True)
    else:
        plot_path = None

    # check for files to skip (e.g., saturated or errored exposures)
    if skip_list_file is not None:  # a file containing a list of filenames to skip
        with open(skip_list_file, 'r') as f:
            skip_list = [x.strip() for x in f if x.strip()]
    else:
        skip_list = None

    # look for pixel mask file
    if mask_file is not None:
        with open(
                mask_file, 'r'
        ) as f:  # load YAML file specifying pixel masks for nearby sources
            pixel_mask_spec = yaml.load(f.read())
    else:
        pixel_mask_spec = None

    # generate the raw image file collection to process
    ic = GlobImageFileCollection(night_path, skip_filenames=skip_list)
    logger.info("Frames to process:")
    logger.info("- Bias frames: {}".format(
        len(ic.files_filtered(imagetyp='BIAS'))))
    logger.info("- Flat frames: {}".format(
        len(ic.files_filtered(imagetyp='FLAT'))))
    logger.info("- Comparison lamp frames: {}".format(
        len(ic.files_filtered(imagetyp='COMP'))))
    logger.info("- Object frames: {}".format(
        len(ic.files_filtered(imagetyp='OBJECT'))))

    # HACK:
    ic = GlobImageFileCollection(night_path, skip_filenames=skip_list)

    # ============================
    # Create the master bias frame
    # ============================

    # overscan region of the CCD, using FITS index notation
    oscan_fits_section = "[{}:{},:]".format(oscan_idx, oscan_idx + oscan_size)

    master_bias_file = path.join(output_path, 'master_bias.fits')

    if not os.path.exists(master_bias_file) or overwrite:
        # get list of overscan-subtracted bias frames as 2D image arrays
        bias_list = []
        for hdu, fname in ic.hdus(return_fname=True, imagetyp='BIAS'):
            logger.debug('Processing Bias frame: {0}'.format(fname))
            ccd = CCDData.read(path.join(ic.location, fname), unit='adu')
            ccd = ccdproc.gain_correct(ccd, gain=ccd_gain)
            ccd = ccdproc.subtract_overscan(ccd, overscan=ccd[:, oscan_idx:])
            ccd = ccdproc.trim_image(ccd,
                                     fits_section="[1:{},:]".format(oscan_idx))
            bias_list.append(ccd)

        # combine all bias frames into a master bias frame
        logger.info("Creating master bias frame")
        master_bias = ccdproc.combine(bias_list,
                                      method='average',
                                      clip_extrema=True,
                                      nlow=1,
                                      nhigh=1,
                                      error=True)
        master_bias.write(master_bias_file, overwrite=True)

    else:
        logger.info("Master bias frame file already exists: {}".format(
            master_bias_file))
        master_bias = CCDData.read(master_bias_file)

    if plot:
        # TODO: this assumes vertical CCD
        assert master_bias.shape[0] > master_bias.shape[1]
        aspect_ratio = master_bias.shape[1] / master_bias.shape[0]

        fig, ax = plt.subplots(1, 1, figsize=(10, 12 * aspect_ratio))
        vmin, vmax = zscaler.get_limits(master_bias.data)
        cs = ax.imshow(master_bias.data.T,
                       origin='bottom',
                       cmap=cmap,
                       vmin=max(0, vmin),
                       vmax=vmax)
        ax.set_title('master bias frame [zscale]')

        fig.colorbar(cs)
        fig.tight_layout()
        fig.savefig(path.join(plot_path, 'master_bias.png'))
        plt.close(fig)

    # ============================
    # Create the master flat field
    # ============================
    # HACK:
    ic = GlobImageFileCollection(night_path, skip_filenames=skip_list)

    master_flat_file = path.join(output_path, 'master_flat.fits')

    if not os.path.exists(master_flat_file) or overwrite:
        # create a list of flat frames
        flat_list = []
        for hdu, fname in ic.hdus(return_fname=True, imagetyp='FLAT'):
            logger.debug('Processing Flat frame: {0}'.format(fname))
            ccd = CCDData.read(path.join(ic.location, fname), unit='adu')
            ccd = ccdproc.gain_correct(ccd, gain=ccd_gain)
            ccd = ccdproc.ccd_process(ccd,
                                      oscan=oscan_fits_section,
                                      trim="[1:{},:]".format(oscan_idx),
                                      master_bias=master_bias)
            flat_list.append(ccd)

        # combine into a single master flat - use 3*sigma sigma-clipping
        logger.info("Creating master flat frame")
        master_flat = ccdproc.combine(flat_list,
                                      method='average',
                                      sigma_clip=True,
                                      low_thresh=3,
                                      high_thresh=3)
        master_flat.write(master_flat_file, overwrite=True)

        # TODO: make plot if requested?

    else:
        logger.info("Master flat frame file already exists: {}".format(
            master_flat_file))
        master_flat = CCDData.read(master_flat_file)

    if plot:
        # TODO: this assumes vertical CCD
        assert master_flat.shape[0] > master_flat.shape[1]
        aspect_ratio = master_flat.shape[1] / master_flat.shape[0]

        fig, ax = plt.subplots(1, 1, figsize=(10, 12 * aspect_ratio))
        vmin, vmax = zscaler.get_limits(master_flat.data)
        cs = ax.imshow(master_flat.data.T,
                       origin='bottom',
                       cmap=cmap,
                       vmin=max(0, vmin),
                       vmax=vmax)
        ax.set_title('master flat frame [zscale]')

        fig.colorbar(cs)
        fig.tight_layout()
        fig.savefig(path.join(plot_path, 'master_flat.png'))
        plt.close(fig)

    # =====================
    # Process object frames
    # =====================
    # HACK:
    ic = GlobImageFileCollection(night_path, skip_filenames=skip_list)

    logger.info("Beginning object frame processing...")
    for hdu, fname in ic.hdus(return_fname=True, imagetyp='OBJECT'):
        new_fname = path.join(output_path, 'p_{}'.format(fname))

        # -------------------------------------------
        # First do the simple processing of the frame
        # -------------------------------------------

        logger.debug("Processing '{}' [{}]".format(hdu.header['OBJECT'],
                                                   fname))
        if path.exists(new_fname) and not overwrite:
            logger.log(1, "\tAlready processed! {}".format(new_fname))
            ext = SourceCCDExtractor(filename=path.join(
                ic.location, new_fname),
                                     plot_path=plot_path,
                                     zscaler=zscaler,
                                     cmap=cmap,
                                     **ccd_props)
            nccd = ext.ccd

            # HACK: F**K this is a bad hack
            ext._filename_base = ext._filename_base[2:]

        else:
            # process the frame!
            ext = SourceCCDExtractor(filename=path.join(ic.location, fname),
                                     plot_path=plot_path,
                                     zscaler=zscaler,
                                     cmap=cmap,
                                     unit='adu',
                                     **ccd_props)

            _pix_mask = pixel_mask_spec.get(
                fname, None) if pixel_mask_spec is not None else None
            nccd = ext.process_raw_frame(pixel_mask_spec=_pix_mask,
                                         master_bias=master_bias,
                                         master_flat=master_flat)
            nccd.write(new_fname, overwrite=overwrite)

        # -------------------------------------------
        # Now do the 1D extraction
        # -------------------------------------------

        fname_1d = path.join(output_path, '1d_{0}'.format(fname))
        if path.exists(fname_1d) and not overwrite:
            logger.log(1, "\tAlready extracted! {}".format(fname_1d))
            continue

        else:
            logger.debug("\tExtracting to 1D")

            # first step is to fit a voigt profile to a middle-ish row to determine LSF
            lsf_p = ext.get_lsf_pars()  # MAGIC NUMBER

            try:
                tbl = ext.extract_1d(lsf_p)
            except Exception as e:
                logger.error('Failed! {}: {}'.format(e.__class__.__name__,
                                                     str(e)))
                continue

            hdu0 = fits.PrimaryHDU(header=nccd.header)
            hdu1 = fits.table_to_hdu(tbl)
            hdulist = fits.HDUList([hdu0, hdu1])

            hdulist.writeto(fname_1d, overwrite=overwrite)

        del ext

    # ==============================
    # Process comparison lamp frames
    # ==============================
    # HACK:
    ic = GlobImageFileCollection(night_path, skip_filenames=skip_list)

    logger.info("Beginning comp. lamp frame processing...")
    for hdu, fname in ic.hdus(return_fname=True, imagetyp='COMP'):
        new_fname = path.join(output_path, 'p_{}'.format(fname))

        logger.debug("\tProcessing '{}'".format(hdu.header['OBJECT']))

        if path.exists(new_fname) and not overwrite:
            logger.log(1, "\tAlready processed! {}".format(new_fname))
            ext = CompCCDExtractor(filename=path.join(ic.location, new_fname),
                                   plot_path=plot_path,
                                   zscaler=zscaler,
                                   cmap=cmap,
                                   **ccd_props)
            nccd = ext.ccd

            # HACK: F**K this is a bad hack
            ext._filename_base = ext._filename_base[2:]

        else:
            # process the frame!
            ext = CompCCDExtractor(filename=path.join(ic.location, fname),
                                   plot_path=plot_path,
                                   unit='adu',
                                   **ccd_props)

            _pix_mask = pixel_mask_spec.get(
                fname, None) if pixel_mask_spec is not None else None
            nccd = ext.process_raw_frame(
                pixel_mask_spec=_pix_mask,
                master_bias=master_bias,
                master_flat=master_flat,
            )
            nccd.write(new_fname, overwrite=overwrite)

        # -------------------------------------------
        # Now do the 1D extraction
        # -------------------------------------------

        fname_1d = path.join(output_path, '1d_{0}'.format(fname))
        if path.exists(fname_1d) and not overwrite:
            logger.log(1, "\tAlready extracted! {}".format(fname_1d))
            continue

        else:
            logger.debug("\tExtracting to 1D")

            try:
                tbl = ext.extract_1d()
            except Exception as e:
                logger.error('Failed! {}: {}'.format(e.__class__.__name__,
                                                     str(e)))
                continue

            hdu0 = fits.PrimaryHDU(header=nccd.header)
            hdu1 = fits.table_to_hdu(tbl)
            hdulist = fits.HDUList([hdu0, hdu1])

            hdulist.writeto(fname_1d, overwrite=overwrite)
예제 #7
0
def main(proc_path, init_file, linelist_file, overwrite=False):
    """ """

    proc_path = path.realpath(path.expanduser(proc_path))
    if not path.exists(proc_path):
        raise IOError("Path '{}' doesn't exist".format(proc_path))

    # read linelist if specified
    if linelist_file is not None:
        line_list = np.genfromtxt(linelist_file, usecols=[0], dtype=float)

    else:
        line_list = None

    if path.isdir(proc_path):
        wavelength_data_file = None
        output_path = path.abspath(path.join(proc_path, '..'))
        logger.info("Reading data from path: {}".format(proc_path))

    elif path.isfile(proc_path):
        wavelength_data_file = proc_path
        base_path, name = path.split(proc_path)
        output_path = path.abspath(path.join(base_path, '..'))
        logger.info("Reading from file: {}".format(proc_path))

    else:
        raise RuntimeError("how?!")

    logger.info("Saving processed files to path: {}".format(output_path))

    if wavelength_data_file is None: # find a COMP lamp:
        ic = GlobImageFileCollection(proc_path, glob_include='1d_*')

        hdu = None
        for hdu,wavelength_data_file in ic.hdus(return_fname=True, imagetyp='COMP'):
            break
        else:
            raise IOError("No COMP lamp file found in {}".format(proc_path))

        wavelength_data_file = path.join(ic.location, wavelength_data_file)

    logger.info("Using file: {}".format(wavelength_data_file))

    if init_file is not None:
        logger.info("Using initial guess at pixel-to-wavelength mapping from "
                    "initialization file: {}".format(init_file))

        d = np.genfromtxt(init_file, names=True, delimiter=',')

        init_map = dict()
        init_map['pixel'] = d['pixel']
        init_map['wavelength'] = d['wavelength']

    else:
        init_map = None

    # read 1D extracted comp lamp spectrum
    tbl = Table.read(wavelength_data_file)
    gui = GUIWavelengthSolver(tbl['pix'], tbl['flux'], flux_ivar=tbl['ivar'],
                              line_list=line_list, init_map=init_map)

    wav = gui.solution['wavelength']
    pix = gui.solution['pixel']

    # write the pixel-wavelength nodes out to file
    with open(path.join(output_path, 'wavelength_guess.csv'), 'w') as f:
        txt = ["# wavelength, pixel"]
        for row in zip(wav, pix):
            txt.append("{:.5f},{:.5f}".format(*row))
        f.write("\n".join(txt))
예제 #8
0
def main():
    # TODO: bad, hard-coded...
    # base_path = '/Volumes/ProjectData/gaia-comoving-followup/'
    base_path = '../../data/'
    db_path = path.join(base_path, 'db.sqlite')
    engine = db_connect(db_path)
    session = Session()

    chain_path = path.abspath('./isochrone_chains')
    os.makedirs(chain_path, exist_ok=True)

    # Check out the bottom of "Color-magnitude diagram.ipynb":
    interesting_group_ids = [1500, 1229, 1515]

    all_photometry = OrderedDict([
        ('1500-8455',
         OrderedDict([('J', (6.8379998, 0.021)),
                      ('H', (6.4640002, 0.017000001)),
                      ('K', (6.3369999, 0.017999999)),
                      ('W1', (6.2950001, 0.093000002)),
                      ('W2', (6.2490001, 0.026000001)),
                      ('W3', (6.3330002, 0.015)), ('B', (9.5950003, 0.022)),
                      ('V', (8.5120001, 0.014))])),
        ('1500-1804',
         OrderedDict([('J', (6.9039998, 0.041000001)),
                      ('H', (6.8559999, 0.027000001)),
                      ('K', (6.7989998, 0.017000001)),
                      ('W1', (6.803, 0.064999998)),
                      ('W2', (6.7600002, 0.018999999)),
                      ('W3', (6.8270001, 0.016000001)),
                      ('B', (7.4980001, 0.015)), ('V', (7.289, 0.011))])),
        ('1229-1366',
         OrderedDict([('J', (6.7290001, 0.024)), ('H', (6.2449999, 0.02)),
                      ('K', (6.1529999, 0.023)),
                      ('W1', (6.1799998, 0.096000001)), ('W2', (6.04, 0.035)),
                      ('W3', (6.132, 0.016000001)), ('B', (9.5539999, 0.021)),
                      ('V', (8.4619999, 0.014))])),
        ('1229-7470',
         OrderedDict([
             ('J', (9.1709995, 0.024)), ('H', (8.7959995, 0.026000001)),
             ('K', (8.7299995, 0.022)), ('W1', (8.6669998, 0.023)),
             ('W2', (8.7189999, 0.02)), ('W3', (8.6680002, 0.025)),
             ('B', (11.428, 0.054000001)), ('V', (10.614, 0.039999999))
         ])),
        ('1515-3584',
         OrderedDict([('J', (5.363999843597412, 0.024000000208616257)),
                      ('H', (4.965000152587891, 0.035999998450279236)),
                      ('K', (4.815999984741211, 0.032999999821186066)),
                      ('W1', (4.758, 0.215)), ('W2', (4.565, 0.115)),
                      ('W3', (4.771, 0.015)),
                      ('B', (8.347999572753906, 0.01600000075995922)),
                      ('V', (7.182000160217285, 0.009999999776482582))])),
        ('1515-1834',
         OrderedDict([('J', (8.855999946594238, 0.024000000208616257)),
                      ('H', (8.29699993133545, 0.020999999716877937)),
                      ('K', (8.178999900817871, 0.017999999225139618)),
                      ('W1', (8.117, 0.022)), ('W2', (8.15, 0.019)),
                      ('W3', (8.065, 0.02)),
                      ('B', (12.309000015258789, 0.11999999731779099)),
                      ('V', (11.069999694824219, 0.054999999701976776))]))
    ])

    for k in all_photometry:
        samples_file = path.join(chain_path, '{0}.hdf5'.format(k))

        if path.exists(samples_file):
            logger.info("skipping {0} - samples exist at {1}".format(
                k, samples_file))
            continue

        phot = all_photometry[k]
        obs = session.query(Observation).filter(Observation.object == k).one()
        plx = (obs.tgas_source.parallax, obs.tgas_source.parallax_error)

        # fit an isochrone
        model = StarModel(iso, use_emcee=True, parallax=plx, **phot)
        model.set_bounds(mass=(0.01, 20),
                         feh=(-1, 1),
                         distance=(0, 300),
                         AV=(0, 1))

        # initial conditions for emcee walkers
        nwalkers = 128

        p0 = []
        m0, age0, feh0 = model.ic.random_points(nwalkers,
                                                minmass=0.01,
                                                maxmass=10.,
                                                minfeh=-1,
                                                maxfeh=1)
        _, max_distance = model.bounds('distance')
        _, max_AV = model.bounds('AV')
        d0 = 10**(np.random.uniform(0, np.log10(max_distance), size=nwalkers))
        AV0 = np.random.uniform(0, max_AV, size=nwalkers)
        p0 += [m0]
        p0 += [age0, feh0, d0, AV0]

        p0 = np.array(p0).T
        npars = p0.shape[1]

        # run emcee
        ninit = 256
        nburn = 1024
        niter = 4096

        logger.debug('Running emcee - initial sampling...')
        sampler = emcee.EnsembleSampler(nwalkers, npars, model.lnpost)
        # pos, prob, state = sampler.run_mcmc(p0, ninit)

        for pos, prob, state in tqdm(sampler.sample(p0, iterations=ninit),
                                     total=ninit):
            pass

        # cull the weak walkers
        best_ix = sampler.flatlnprobability.argmax()
        best_p0 = (sampler.flatchain[best_ix][None] +
                   np.random.normal(0, 1E-5, size=(nwalkers, npars)))

        sampler.reset()
        logger.debug('burn-in...')
        for pos, prob, state in tqdm(sampler.sample(best_p0, iterations=nburn),
                                     total=nburn):
            pass
        # pos,_,_ = sampler.run_mcmc(best_p0, nburn)

        sampler.reset()
        logger.debug('sampling...')
        # _ = sampler.run_mcmc(pos, niter)
        for pos, prob, state in tqdm(sampler.sample(pos, iterations=niter),
                                     total=niter):
            pass

        model._sampler = sampler
        model._make_samples(0.08)

        model.samples.to_hdf(samples_file, key='samples')
        # np.save('isochrone_chains/chain.npy', sampler.chain)
        logger.debug('...done and saved!')
예제 #9
0
def main(night_path,
         wavelength_gp_path=None,
         comp_lamp_path=None,
         overwrite=False):

    night_path = path.realpath(path.expanduser(night_path))
    if not path.exists(night_path):
        raise IOError("Path '{}' doesn't exist".format(night_path))

    if path.isdir(night_path):
        data_file = None
        logger.info("Reading data from path: {}".format(night_path))

    elif path.isfile(night_path):
        data_file = night_path
        base_path, name = path.split(night_path)
        night_path = base_path
        logger.info("Reading file: {}".format(data_file))

    else:
        raise RuntimeError("how?!")

    plot_path = path.join(night_path, 'plots')

    # ===========================
    # GP model does not exist yet
    # ===========================
    if wavelength_gp_path is None:

        # filename to save the GP model
        wavelength_gp_path = path.join(night_path,
                                       'wavelength_GP_model.pickle')

        # see if a wavelength GP model file already exists
        if path.exists(wavelength_gp_path) and not overwrite:
            logger.info('Loading wavelength GP model from {}'.format(
                wavelength_gp_path))

            # GP model already exists -- just load it
            with open(wavelength_gp_path, 'rb') as f:
                model = pickle.load(f)

        else:
            logger.info('Generating wavelength GP model, saving to {}'.format(
                wavelength_gp_path))

            if comp_lamp_path is None:
                ic = GlobImageFileCollection(night_path, glob_include='1d_*')

                hdu = None
                for hdu, wavelength_data_file in ic.hdus(return_fname=True,
                                                         imagetyp='COMP'):
                    break
                else:
                    raise IOError(
                        "No COMP lamp file found in {}".format(night_path))

                comp_lamp_path = path.join(ic.location, wavelength_data_file)
                logger.info(
                    "No comp. lamp spectrum file specified - using: {}".format(
                        comp_lamp_path))

            model = generate_wavelength_model(comp_lamp_path, night_path,
                                              plot_path)

        # pickle the model
        with open(wavelength_gp_path, 'wb') as f:
            pickle.dump(model, f)

    # =======================================
    # GP model already exists -- just load it
    # =======================================
    else:
        logger.info(
            'Loading wavelength GP model from {}'.format(wavelength_gp_path))
        with open(wavelength_gp_path, 'rb') as f:
            model = pickle.load(f)

    # ========================
    # Compute wavelength grids
    # ========================

    # set the wavelength grid to NaN when the root-variance of the prediction is
    #   larger than this tolerance
    std_tol = 1. * u.angstrom

    if data_file is not None:  # filename passed - only operate on that
        add_wavelength(data_file,
                       model,
                       overwrite=overwrite,
                       std_tol=std_tol,
                       plot_path=plot_path)

    else:  # a path was passed - operate on all 1D extracted files
        proc_ic = GlobImageFileCollection(night_path, glob_include='1d_*')
        logger.info("{} 1D extracted spectra found".format(len(proc_ic.files)))

        logger.info("Beginning wavelength calibration...")
        for base_fname in proc_ic.files_filtered(imagetyp='OBJECT'):
            fname = path.join(proc_ic.location, base_fname)
            add_wavelength(fname,
                           model,
                           overwrite=overwrite,
                           std_tol=std_tol,
                           plot_path=plot_path)