Exemple #1
0
def flat_cubes(date, lbda_min=7000, lbda_max=9000, ref="dome"):
    """ """
    baseroot = io.CUBE_PROD_ROOTS["cube"]["root"]
    newroot = io.CUBE_PROD_ROOTS["flat"]["root"]

    # -------------- #
    # The Reference  #
    # -------------- #
    reffile = io.get_night_cubes(date, kind="cube", target=ref)

    if len(reffile) == 0:
        raise ValueError("No cube reference for target %s in night %s" %
                         (ref, date))

    refcube = get_sedmcube(reffile[0])
    flatfied = refcube.get_slice(lbda_min, lbda_max, usemean=True)
    print(flatfied.mean())

    # ----------------- #
    # Build flat cubes  #
    # ----------------- #
    def build_flat_cube(cubefile):
        cube_ = get_sedmcube(cubefile)
        print(cubefile)
        cube_.scale_by(flatfied)
        cube_.writeto(cube_.filename.replace(baseroot, newroot))

    from astropy.utils.console import ProgressBar
    cubefiles = io.get_night_cubes(date, kind="cube")
    print(cubefiles)
    ProgressBar.map(build_flat_cube, cubefiles)
 def _map(self, func, items):
     # FIXME: ProgressBar.map(..., multiprocess=True) uses imap_unordered,
     # but we want the result to come back in order. This should be fixed,
     # or at least correctly documented, in Astropy.
     if self.multiprocess:
         _, result = zip(*sorted(ProgressBar.map(_mapfunc(func),
                                                 list(enumerate(items)),
                                                 multiprocess=True)))
         return list(result)
     else:
         return ProgressBar.map(func, items, multiprocess=False)
Exemple #3
0
 def _find_and_fit_peaks(self):
     """Find and fit peaks in each PSD. This can be done in parallell if requested.
     
     Configuration Items:
     
     - ``FMTS.fitting`` The dictionary of parameters used for :func`find_and_fit_peaks`.
     - ``FMTS.multiprocess`` `(bool)` whether to parallelize.
     
     Peaks are stored in an object array, and the number of peaks at each mode is stored in a separate, parallel array.
     
     """
     from astropy.utils.console import ProgressBar
     
     kwargs = dict(self.config["FMTS.fitting"])
     psd = self.psd
     template = self.template_ft
     omega = self.omega
     
     args = [ ((k,l),psd[:,k,l],template,omega,kwargs) for k,l in itertools.product(range(self.psd.shape[1]),range(self.psd.shape[2])) ]
     peaks = ProgressBar.map(pool_find_and_fit_peaks_in_modes,args,multiprocess=self.config["FMTS.multiprocess"])
     for peak_mode,ident in peaks:
         k,l = ident
         self.peaks[k,l] = peak_mode
         self.npeaks[k,l] = len(peak_mode)
     self.log.info("Found %d peaks",np.sum(self.npeaks))
Exemple #4
0
def create_modelcube(self, njobs=1, verbose=True):
    """
    Generates a "clean" datacube from the scousepy decomposition. Returns a
    clean cube

    Parameters
    ----------
    self : instance of the scousepy class
    njobs : Number
        number of cpus
    verbose: bool
        verbose output

    """

    # Time it
    starttime = time.time()

    cube = self.cube
    x = np.array(cube.world[:, 0, 0][0])
    if (self.ppv_vol[0] is not None) & (self.ppv_vol[1] is not None):
        trimids = np.where((x > self.ppv_vol[0]) & (x < self.ppv_vol[1]))[0]

    _cube = cube[min(trimids):max(trimids) + 1, :, :]
    _modelcube = np.full_like(_cube, np.nan)

    if verbose:
        print("")
        print("Generating models:")
        print("")

    args = [self]
    inputs = [[key] + args for key in self.indiv_dict.keys()]
    if njobs == 1:
        mods = ProgressBar.map(genmodel, inputs)
    else:
        mods = parallel_map(genmodel, inputs, numcores=njobs)
    mergedmods = [mod for mod in mods]
    mergedmods = np.asarray(mergedmods)

    if verbose:
        print("")
        print("Creating model cube:")
        print("")
        progress_bar = ProgressBar(self.indiv_dict.keys())

    for i, key in enumerate(self.indiv_dict.keys()):
        _modelcube[:, self.indiv_dict[key].coordinates[0],
                   self.indiv_dict[key].coordinates[1]] = mergedmods[i]
        if verbose:
            progress_bar.update()

    endtime = time.time()
    if verbose:
        print("")
        print('Process completed in: {0} minutes'.format(
            (endtime - starttime) / 60.))
        print("")

    return SpectralCube(data=_modelcube, wcs=_cube.wcs)
Exemple #5
0
def calibrate_cubes(cubefiles,
                    date=None,
                    calibrated_reference=None,
                    multiprocess=True):
    """ """

    # internal routine
    def _build_cal_cubes_(cubefile):
        build_calibrated_sedmcube(cubefile,
                                  date=date,
                                  calibration_ref=calibrated_reference)

    # - The build
    if len(cubefiles) == 1:
        _build_cal_cubes_(cubefiles[0])
    else:
        from astropy.utils.console import ProgressBar
        ProgressBar.map(_build_cal_cubes_,
                        cubefiles,
                        multiprocess=multiprocess,
                        step=2)
Exemple #6
0
def update_mass_table(drpall, mass_table_old=None, limit=None, mlband='i'):
    '''
    '''
    
    # what galaxies are available to aggregate?
    res_fnames = glob(os.path.join(basedir, 'results/*-*/*-*_res.fits'))[:limit]

    # filter out whose that have not been done
    if mass_table_old is None:
        already_aggregated = [False for _ in range(len(res_fnames))]
    else:
        already_aggregated = [os.path.split(fn)[1].split('_')[0] in old_mass_table['plateifu']
                              for fn in res_fnames]
    res_fnames = [fn for done, fn in zip(already_aggregated, res_fnames)]

    # aggregate individual galaxies, and stack them 
    mass_tables_new = list(ProgressBar.map(
        partial(mass_agg_onegal, mlband=mlband), res_fnames, multiprocess=False, step=5))
    mass_table_new = t.vstack(mass_tables_new)

    # if there was an old mass table, stack it with the new one
    if mass_table_old is None:
        mass_table = mass_table_new
    else:
        mass_table = t.vstack([mass_table_old, mass_table_new], join_type='inner')

    cmlr = totalmass.cmlr_kwargs
    missing_flux =  (mass_table['nsa_absmag'].to(m.Mgy) - \
                     mass_table['ifu_absmag'].to(m.Mgy)).clip(
                        a_min=0.*m.Mgy, a_max=np.inf*m.Mgy)
    mag_missing_flux = missing_flux.to(u.ABmag)
    cb1, cb2 = cmlr['cb1'], cmlr['cb2']
    color_missing_flux = mag_missing_flux[:, totalmass.StellarMass.bands_ixs[cb1]] - \
                         mag_missing_flux[:, totalmass.StellarMass.bands_ixs[cb2]]
    color_missing_flux[~np.isfinite(color_missing_flux)] = np.inf
    mass_table['outer_ml_cmlr'] = np.polyval(cmlr['cmlr_poly'], color_missing_flux.value) * \
                                  u.dex(m.m_to_l_unit)
    mass_table['outer_lum'] = mag_missing_flux.to(
        u.dex(m.bandpass_sol_l_unit),
        totalmass.bandpass_flux_to_solarunits(totalmass.StellarMass.absmag_sun))

    mass_table['outer_mass_ring'] = \
        (mass_table['outer_lum'][:, totalmass.StellarMass.bands_ixs['i']] + \
         mass_table['outer_ml_ring']).to(u.Msun)
    mass_table['outer_mass_cmlr'] = \
        (mass_table['outer_lum'][:, totalmass.StellarMass.bands_ixs['i']] + \
         mass_table['outer_ml_cmlr']).to(u.Msun)

    return mass_table
Exemple #7
0
def update_mass_table(res_fnames, mlband='i'):
    '''
    '''

    # filter out whose that have not been done
    if mass_table_old is None:
        already_aggregated = [False for _ in range(len(res_fnames))]
    else:
        already_aggregated = [
            os.path.split(fn)[1].split('_')[0] in mass_table_old['plateifu']
            for fn in res_fnames
        ]
    res_fnames = [fn for done, fn in zip(already_aggregated, res_fnames)]

    # aggregate individual galaxies, and stack them
    mass_tables_new = list(
        ProgressBar.map(partial(mass_agg_onegal, mlband=mlband),
                        res_fnames,
                        multiprocess=False,
                        step=5))
    mass_table_new = t.vstack(mass_tables_new)

    # if there was an old mass table, stack it with the new one
    if mass_table_old is None:
        mass_table = mass_table_new
    else:
        mass_table = t.vstack([mass_table_old, mass_table_new],
                              join_type='inner')

    cmlr = cmlr_kwargs

    cb1, cb2 = cmlr['cb1'], cmlr['cb2']
    color_missing_flux = mass_table['outer_absmag_{}'.format(cb1)] - \
                         mass_table['outer_absmag_{}'.format(cb2)]

    mass_table['outer_ml_cmlr'] = np.polyval(
        cmlr['cmlr_poly'], color_missing_flux.value) * u.dex(m.m_to_l_unit)

    mass_table['outer_mass_ring'] = \
        (mass_table['outer_lum_{}'.format(mlband)] + \
         mass_table['outer_ml_ring']).to(u.Msun)
    mass_table['outer_mass_cmlr'] = \
        (mass_table['outer_lum_{}'.format(mlband)] + \
         mass_table['outer_ml_cmlr']).to(u.Msun)

    return mass_table['plateifu', 'mass_in_ifu', 'outer_mass_cmlr',
                      'outer_mass_ring']
Exemple #8
0
    def __call__(self):
        """Compute a bootstraped map

        Returns
        -------
        :class:`nikamap.NikaMap`
            a bootstraped data set
        """

        bs_array = np.concatenate(
            ProgressBar.map(
                self.shuffled_average,
                np.array_split(np.arange(self.n_bootstrap), cpu_count()),
                ipython_widget=self.ipython_widget,
                multiprocess=True,
            ))

        data = np.mean(bs_array, axis=0)
        e_data = np.std(bs_array, axis=0, ddof=1)

        # Mask unobserved regions
        unobserved = self.time == 0
        data[unobserved] = np.nan
        e_data[unobserved] = np.nan

        data = NikaMap(
            data,
            mask=unobserved,
            uncertainty=StdDevUncertainty(e_data),
            unit=self.unit,
            wcs=WCS(self.header),
            meta={
                "header": self.header,
                "primary_header": self.primary_header
            },
            time=self.time,
        )

        return data
Exemple #9
0
k_ma = np.array(['k' in f for f in files])
j_ma = np.array(['j' in f for f in files])
z_ma = np.array(['z' in f for f in files])
h_ma = np.array(['h' in f for f in files])
y_ma = np.array(['j' in f for f in files])

sk,sj,sz,sh,sy = seeing[k_ma],seeing[j_ma],seeing[z_ma],seeing[h_ma],seeing[y_ma]
k_file = files[k_ma][np.argmin(sk)]
j_file = files[j_ma][np.argmin(sj)]
z_file = files[z_ma][np.argmin(sz)]
h_file = files[h_ma][np.argmin(sh)]
y_file = files[y_ma][np.argmin(sy)]

#RADEC
go_files = [k_file,j_file,z_file,h_file,y_file]
ProgressBar.map(XYtoRADEC,go_files,multiprocess=True)

#Match
ejec = 'java -jar %s/stilts.jar tmatchn multimode=pairs nin=5 matcher=sky ' % stilts_folder
for i,f in enumerate(go_files)
    f = f.replace('.fits','dat')
    ejec += 'in%d=%s ifmt%d=ascii values%d="RA DEC" ' % (i+1, f, i+1, i+1)
ejec += 'join1=match out=./CMD/match.dat ofmt=ascii'
os.system(ejec)

#Indices
data = np.genfromtxt('./CMD/match.dat',unpack=True)

m1 = K - 1.08 * (Z - Y)
m2 = H - 1.13 * (J - K)
m3 = J - 1.03 * (Y - K)
Exemple #10
0
def make_validation_report(urls=None,
                           destdir='astropy.io.votable.validator.results',
                           multiprocess=True,
                           stilts=None):
    """
    Validates a large collection of web-accessible VOTable files.

    Generates a report as a directory tree of HTML files.

    Parameters
    ----------
    urls : list of strings, optional
        If provided, is a list of HTTP urls to download VOTable files
        from.  If not provided, a built-in set of ~22,000 urls
        compiled by HEASARC will be used.

    destdir : path, optional
        The directory to write the report to.  By default, this is a
        directory called ``'results'`` in the current directory. If the
        directory does not exist, it will be created.

    multiprocess : bool, optional
        If `True` (default), perform validations in parallel using all
        of the cores on this machine.

    stilts : path, optional
        To perform validation with ``votlint`` from the the Java-based
        `STILTS <http://www.star.bris.ac.uk/~mbt/stilts/>`_ VOTable
        parser, in addition to `astropy.io.votable`, set this to the
        path of the ``'stilts.jar'`` file.  ``java`` on the system shell
        path will be used to run it.

    Notes
    -----
    Downloads of each given URL will be performed only once and cached
    locally in *destdir*.  To refresh the cache, remove *destdir*
    first.
    """
    from astropy.utils.console import (color_print, ProgressBar, Spinner)

    if stilts is not None:
        if not os.path.exists(stilts):
            raise ValueError('{0} does not exist.'.format(stilts))

    destdir = os.path.abspath(destdir)

    if urls is None:
        with Spinner('Loading URLs', 'green') as s:
            urls = get_urls(destdir, s)
    else:
        color_print('Marking URLs', 'green')
        for url in ProgressBar.iterate(urls):
            with result.Result(url, root=destdir) as r:
                r['expected'] = type

    args = [(url, destdir) for url in urls]

    color_print('Downloading VO files', 'green')
    ProgressBar.map(download, args, multiprocess=multiprocess)

    color_print('Validating VO files', 'green')
    ProgressBar.map(validate_vo, args, multiprocess=multiprocess)

    if stilts is not None:
        color_print('Validating with votlint', 'green')
        votlint_args = [(stilts, x, destdir) for x in urls]
        ProgressBar.map(votlint_validate,
                        votlint_args,
                        multiprocess=multiprocess)

    color_print('Generating HTML files', 'green')
    ProgressBar.map(write_html_result, args, multiprocess=multiprocess)

    with Spinner('Grouping results', 'green') as s:
        subsets = result.get_result_subsets(urls, destdir, s)

    color_print('Generating index', 'green')
    html.write_index(subsets, urls, destdir)

    color_print('Generating subindices', 'green')
    subindex_args = [(subset, destdir, len(urls)) for subset in subsets]
    ProgressBar.map(write_subindex, subindex_args, multiprocess=multiprocess)
Exemple #11
0
name     = np.genfromtxt(folder+'zinfo_img',unpack=True,usecols=(0,),dtype='string')
k_mask   = np.array(['k' in f for f in name])
sek,elk,yrk = np.transpose([se,el,yr])[k_mask].T
yrk = (yrk-yrk[0])/365.242199

color_print('Recopilando archivos de epocas...','cyan')
epochs = glob.glob('./%s/*.*' % match_folder)

color_print('Realizando match de la MF con las epocas','cyan')
ejecuta = 'java -jar %s/stilts.jar tmatch2 in1=./%s values1="ID" ifmt1=ascii ' % (stilts_folder, master)

def mf_match(ep):
    ej2 = 'in2=%s values2="ID_1" ifmt2=ascii icmd2=\'keepcols "ID_1 X Y"\' matcher=exact find=best join=1and2 out=./%s/%s ofmt=ascii progress=none ocmd="delcols ID_1"' % (ep, match_master, ep.split('/')[-1].replace('.match','.mfma'))
    os.system(ejecuta + ej2)

ProgressBar.map(mf_match,epochs,multiprocess=True)

color_print('Realizando transformaciones lineales','cyan')
matches = glob.glob('./%s/*.*' % match_master)
bid     = np.genfromtxt(locales,unpack=True,usecols=(0,))

def shift(ep):
    ids,x1,y1,mk,mj,x2,y2 = np.genfromtxt(ep,unpack=True)

    local_mask = np.in1d(ids,bid)
    lid,lx1,ly1,lx2,ly2  = np.transpose([ids,x1,y1,x2,y2])[local_mask].T

    loc_xy = np.transpose([lx2,ly2])
    nbrs   = NN(n_neighbors=vecinos, algorithm='auto').fit(loc_xy)

    coo_xy = np.transpose([x2,y2])
Exemple #12
0
keps = eps[info_k]
jeps = eps[info_j]

kse,kel,kyr = np.transpose([se,el,yr])[info_k].T
jse,jel,jyr = np.transpose([se,el,yr])[info_j].T

msk_idx = np.argmin(kse)
msj_idx = np.argmin(jse)

print '\tSeeing minimo en Ks encontrado en %s (%f)' % (keps[msk_idx],kse[msk_idx])
print '\tSeeing minimo en J encontrado en %s (%f)' % (jeps[msj_idx],jse[msj_idx])

#Obtiene los RA-DEC
color_print('Convirtiendo XY a RADEC...','cyan')
print '\tConvirtiendo Ks'
ProgressBar.map(XYtoRADEC,k_files,multiprocess=True)
print '\tConvirtiendo J'
ProgressBar.map(XYtoRADEC,j_files,multiprocess=True)

#Crea el CMD
color_print('Creando CMD...','cyan')
K_ref = './%s/%s' % (radec_folder,k_files[msk_idx].split('/')[-1].replace('.dao','.dat'))
J_ref = './%s/%s' % (radec_folder,j_files[msj_idx].split('/')[-1].replace('.dao','.dat'))

com_cmd = 'java -jar %s/stilts.jar tmatch2 ifmt1=ascii ifmt2=ascii matcher=sky ofmt=ascii values1="RA DEC" values2="RA DEC" \
        in1=%s in2=%s out=%s params=%.1f progress=none join=all1' % (stilts_folder,K_ref,J_ref,cmd_out,match_tol)
os.system(com_cmd)

#Match de epocas con la de referencia (CMD)
makedir(match_folder)
    # white dwarf temp
    twdVals = np.random.normal(loc=args.twd,scale=args.e_twd,size=chainLength)*units.K
    # period
    pVals = np.random.normal(loc=args.p,scale=args.e_p,size=chainLength)*units.d

    # loop over the MCMC chain, calculating system parameters as we go
    
    # table for results
    results = Table(names=('q','Mw','Rw','Mr','Rr','a','Kw','Kr','incl'))
    # need to be a little careful about astropy versions here, since only
    # versions >=1.0 allow quantities in tables
    # function below extracts value from quantity and floats alike
    getval = lambda el: getattr(el,'value',el) 
            
    psolve = partial(solve,baseDir=baseDir)
    data = zip(qVals,dphiVals,rwVals,twdVals,pVals)
    solvedParams = PB.map(psolve,data,multiprocess=True)
    
    print 'Writing out results...'
    # loop over these results and put all the solutions in our results table
    iStep = 0    
    bar = ProgressBar()
    for thisResult in solvedParams:
        bar.render(int(100*iStep/(len(solvedParams))),'Combining data')
        iStep += 1
        if thisResult is not None:
            results.add_row(thisResult)      

    print 'Found solutions for %d percent of samples in MCMC chain' % (100*float(len(results))/float(chainLength))
    results.write('physicalparams.log',format='ascii.commented_header')
Exemple #14
0
 def _map(self, func, items):
     return ProgressBar.map(func, items, multiprocess=self.multiprocess)
Exemple #15
0
 def write_all_tpfs(self):
     """Produce TPF files for all targets in the cadence data."""
     target_ids = list(self.pixel_mapping.targets.keys())
     log.info("Writing {} Target Pixel Files.".format(len(target_ids)))
     ProgressBar.map(self.write_tpf, target_ids, multiprocess=True)
Exemple #16
0
    data   = np.genfromtxt(f, unpack=True)
    nchip  = int(f.split('_')[-1].split('-')[0])

    x1, y1 = data[3], data[4]

    ma     = (x1 < 2048)*(y1 < 2048)
    x1, y1 = x1[ma], y1[ma]
    data   = data[:,ma]

    header = 'ID RA DEC X Y MAG MAG_ERR'
    fmt    = '%.0f %.7f %.7f %.3f %.3f %.3f %.3f'

    x_dc_1 = np.empty_like(x1)
    y_dc_1 = np.empty_like(y1)

    for i in xrange(len(x1)):
        x_dc_1[i], y_dc_1[i] = VIRCAM_gc_J(x1[i], y1[i], nchip)

    data[3] = x_dc_1
    data[4] = y_dc_1

    out_file = f.replace('.dat','.datgc')
    np.savetxt(out_file, data.T, fmt=fmt, header=header)

if files.shape:
    ProgressBar.map(ejecuta, files, multiprocess=True)
else:
    print 'Una imagen en el input'
    ejecuta(files.tolist())
    print 'Listo!'
Exemple #17
0
color_print('Recopilando archivos de epocas...', 'cyan')
epochs = glob.glob('./%s/*.*' % match_folder)

color_print('Realizando match de la MF con las epocas', 'cyan')
ejecuta = 'java -jar %s/stilts.jar tmatch2 in1=./%s values1="ID" ifmt1=ascii ' % (
    stilts_folder, master)


def mf_match(ep):
    ej2 = 'in2=%s values2="ID_1" ifmt2=ascii icmd2=\'keepcols "ID_1 X Y"\' matcher=exact find=best join=1and2 out=./%s/%s ofmt=ascii progress=none ocmd="delcols ID_1"' % (
        ep, match_master, ep.split('/')[-1].replace('.match', '.mfma'))
    os.system(ejecuta + ej2)


ProgressBar.map(mf_match, epochs, multiprocess=True)

color_print('Realizando transformaciones lineales', 'cyan')
matches = glob.glob('./%s/*.*' % match_master)
bid = np.genfromtxt(locales, unpack=True, usecols=(0, ))


def shift(ep):
    ids, x1, y1, mk, mj, x2, y2 = np.genfromtxt(ep, unpack=True)

    local_mask = np.in1d(ids, bid)
    lid, lx1, ly1, lx2, ly2 = np.transpose([ids, x1, y1, x2, y2])[local_mask].T

    loc_xy = np.transpose([lx2, ly2])
    nbrs = NN(n_neighbors=vecinos, algorithm='auto').fit(loc_xy)
Exemple #18
0
    chainLength = len(qVals)

    # white dwarf temp
    twdVals = np.random.normal(
        loc=args.twd, scale=args.e_twd, size=chainLength) * units.K
    # period
    pVals = np.random.normal(loc=args.p, scale=args.e_p,
                             size=chainLength) * units.d

    # loop over the MCMC chain, calculating system parameters as we go

    # table for results
    results = Table(names=('q', 'Mw', 'Rw', 'Mr', 'Rr', 'a', 'Kw', 'Kr',
                           'incl'))
    # need to be a little careful about astropy versions here, since only
    # versions >=1.0 allow quantities in tables
    # function below extracts value from quantity and floats alike
    getval = lambda el: getattr(el, 'value', el)

    psolve = partial(solve, baseDir=baseDir)
    data = zip(qVals, dphiVals, rwVals, twdVals, pVals)
    solvedParams = ProgressBar.map(psolve, data, multiprocess=True)

    # loop over these results and put all the solutions in our results table
    for thisResult in solvedParams:
        if thisResult is not None:
            results.add_row(thisResult)

    print 'Found solutions for %d percent of samples in MCMC chain' % (
        100 * float(len(results)) / float(chainLength))
    results.write('physicalparams.log', format='ascii.commented_header')
    # Clean up
    outfile.close()
    infile.close()


def convert_to_dmc(input_fn, output_fn):
    """Convert from csv to pipe-delimited.
    """
    syscall("""sed "s/,/|/g" {} > {}""".format(input_fn, output_fn))


if __name__ == "__main__":
    input_filenames = glob.glob(os.path.join(CROPPED_CATS_DIR, "*-cropped"))
    log.info("Converting cropped catalogues into csv format")
    ProgressBar.map(convert_to_csv,
                    input_filenames,
                    multiprocess=True, step=1)

    log.info("Removing objects appearing twice in the cropped tiles")
    ProgressBar.map(remove_duplicates_from_tiles,
                    input_filenames,
                    multiprocess=True, step=1)

    log.info("Removing objects already in EPIC with stilts "
             "using a {} arcsec matching radius".format(MATCHING_RADIUS))
    ProgressBar.map(remove_duplicates,
                    input_filenames,
                    multiprocess=True, step=1)

    merge_fn = os.path.join(FINAL_CAT_DIR, "merged.dmc.csv")
    log.info("Merging the tiles into {}".format(merge_fn))
Exemple #20
0

if __name__ == '__main__':
    mlband = 'i'

    aggman = MassAggregationManager(
        cspbase='/usr/data/minhas2/zpace/sdss/sas/mangawork/manga/sandbox/mangapca/zachpace/CSPs_CKC14_MaNGA_20190215-1/',
        globstring='**/*-*/*-*_zpres.fits',
        masstable_fname_base='v2_5_3/2.3.0/masstables/{}.ecsv')
    res_fnames, res_plateifus = aggman.find(redo=False)
    nres_to_agg = len(res_fnames)

    if nres_to_agg > 0:
        print('aggregating {}'.format(nres_to_agg))
        ProgressBar.map(
            _aggregate_into_table_file,
            list(zip(res_fnames, [mlband, ] * nres_to_agg, [aggman.cspbase, ] * nres_to_agg,
                     res_plateifus)))

    mass_table = t.vstack(
        [t.QTable.read(fn, format='ascii.ecsv') for fn in glob(
            os.path.join(aggman.cspbase, aggman.masstable_fname_base.format('*')))])

    mass_table['distmod'] = cosmo.distmod([drpall.loc[obj]['nsa_zdist'] for obj in mass_table['plateifu']])

    for band in 'griz':
        outerfluxname = f'flux_outer_{band}'
        outermagname = f'mag_outer_{band}'
        outersollumname = f'sollum_outer_{band}'
        outerlogsollumname = f'logsollum_outer_{band}'

        for coltype in ['nsa', 'in_ifu']:
Exemple #21
0
 def write_all_tpfs(self):
     """Produce TPF files for all targets in the cadence data."""
     target_ids = list(self.pixel_mapping.targets.keys())
     log.info("Writing {} Target Pixel Files.".format(len(target_ids)))
     ProgressBar.map(self.write_tpf, target_ids, multiprocess=True)
    x_bins = np.arange(0, n_bins_x)
    mu_bins = np.linspace(mu_min,
                          mu_max,
                          int(mu_max / step_width_mu) + 1,
                          endpoint=True)

    print("Generating FC confidence belt for %s values of mu." % len(mu_bins))

    partial_func = partial(
        fc_find_acceptance_interval_poisson,
        background=background,
        x_bins=x_bins,
        alpha=cl,
    )

    results = ProgressBar.map(partial_func, mu_bins, multiprocess=True)

    LowerLimitAna, UpperLimitAna = zip(*results)

    LowerLimitAna = np.asarray(LowerLimitAna)
    UpperLimitAna = np.asarray(UpperLimitAna)

    fc_fix_limits(LowerLimitAna, UpperLimitAna)

    fig = plt.figure()
    ax = fig.add_subplot(111)

    plt.plot(LowerLimitAna, mu_bins, ls="-", color="red")
    plt.plot(UpperLimitAna, mu_bins, ls="-", color="red")

    plt.grid(True)
    n_bins_x = 100
    step_width_mu = 0.005
    mu_min = 0
    mu_max = 50
    cl = 0.90

    x_bins = np.arange(0, n_bins_x)
    mu_bins = np.linspace(mu_min, mu_max, int(mu_max / step_width_mu) + 1, endpoint=True)

    print("Generating FC confidence belt for %s values of mu." % len(mu_bins))

    partial_func = partial(
        fc_find_acceptance_interval_poisson, background=background, x_bins=x_bins, alpha=cl
    )

    results = ProgressBar.map(partial_func, mu_bins, multiprocess=True)

    LowerLimitAna, UpperLimitAna = zip(*results)

    LowerLimitAna = np.asarray(LowerLimitAna)
    UpperLimitAna = np.asarray(UpperLimitAna)

    fc_fix_limits(LowerLimitAna, UpperLimitAna)

    fig = plt.figure()
    ax = fig.add_subplot(111)

    plt.plot(LowerLimitAna, mu_bins, ls="-", color="red")
    plt.plot(UpperLimitAna, mu_bins, ls="-", color="red")

    plt.grid(True)
Exemple #24
0
            output_fn = "../moc/k2-footprint-c{:02d}-proposed.moc".format(campaign)
        else:
            output_fn = "../moc/k2-footprint-c{:02d}.moc".format(campaign)
    # Obtain the footprint corners in polar coordinates
    log.info("Preparing footprint polygons for C{}".format(campaign))
    polygons = []
    for _, channel in FOOTPRINT["c{}".format(campaign)]["channels"].items():
        polygon = [np.pi/2. - np.radians(channel["corners_dec"]),
                   np.radians(channel["corners_ra"])]
        polygons.append(polygon)
    # Obtain the healpix diamonds that cover the polygons entirely
    # and add these to a `MOC` object
    log.info("Converting polygons into healpix format")
    moc = mocpy.MOC(moc_order=norder_moc)
    for p in polygons:
        pix_list = hp.query_polygon(2**norder_moc,
                                    hp.ang2vec(p[0], p[1]),
                                    inclusive=True, nest=True)
        for pix in pix_list:
            moc.add_pix(norder_moc, pix)
    # Finally, write the resulting MOC file to disk
    log.info("Writing {}".format(output_fn))
    moc.plot()  # IMPORTANT! moc.write is corrupt if plot is not called first
    moc.write(output_fn)


if __name__ == "__main__":
    # Write MOC files for all campaigns in parallel
    campaigns = getFieldNumbers()
    ProgressBar.map(write_k2_moc, campaigns, multiprocess=True)
Exemple #25
0
catalog_k = np.sort([f for f in catalog if "k" in f])
catalog_j = np.sort([f for f in catalog if "j" in f])

#Obtiene el seeing
color_print('-Obteniendo menor seeing...','cyan')
ks,ksi = lowest_seeing(fits_k)
js,jsi = lowest_seeing(fits_j)
cmd_fits = fits_j[jsi], fits_k[ksi]
cmd_cata = catalog_j[jsi], catalog_k[ksi]
print '\tMenor seeing en K: %s (%f)' % (cmd_fits[1],ks)
print '\tMenor seeing en J: %s (%f)' % (cmd_fits[0],js)

#Obtiene los RA y DEC con WCS
color_print('-Obteniendo RADEC...','cyan')
if get_RADEC: 
	ProgressBar.map(XYtoRADEC,np.transpose([fits_k,catalog_k]),multiprocess=True)
	ProgressBar.map(XYtoRADEC,np.transpose([fits_j,catalog_j]),multiprocess=True)
cmd_dat = cmd_fits[0].replace('fits','dat'), cmd_fits[1].replace('fits','dat')

#CMD (match y creacion)
color_print('-Match CMD...','cyan')
execute  = 'sh stilts tmatch2 ifmt1=ascii ifmt2=ascii matcher=sky ofmt=ascii values1="RA DEC" values2="RA DEC" '
if match_CMD:
	exec_CMD = 'in1=%s in2=%s out=%s params=%.1f progress=none join=all1' % (folder+cmd_dat[1],folder+cmd_dat[0],results+'CMD.dat',match_tolerance)

	os.system(execute+exec_CMD)

#Match de todas las epocas con la de referencia (menor seeing)
color_print('-Match epocas...','cyan')
execute = execute.replace('values1="RA DEC"','values1="RA_1 DEC_1"')
if match_epo:
Exemple #26
0
def build_wavesolution(date,
                       verbose=False,
                       ntest=None,
                       idxrange=None,
                       use_fine_tuned_traces=False,
                       wavedegree=5,
                       contdegree=3,
                       lamps=["Hg", "Cd", "Xe"],
                       savefig=True,
                       saveindividuals=False,
                       xybounds=None,
                       rebuild=True):
    """ Create the wavelength solution for the given night.
    The core of the solution fitting is made in pysedm.wavesolution.

    Parameters
    ----------
    
    Returns
    -------

    """
    timedir = io.get_datapath(date)

    if verbose:
        print("Directory affected by Wavelength Calibration: %s" % timedir)

    if not rebuild and len(glob(timedir + "%s_WaveSolution.pkl" % (date))) > 0:
        warnings.warn(
            "WaveSolution already exists for %s. rebuild is False, so nothing is happening"
            % date)
        return

    # ----------------
    # - Load the Data
    # - SpectralMatch using domes
    #   Built by build_spectmatcher
    smap = io.load_nightly_tracematch(date, withmask=True)

    if use_fine_tuned_traces:
        raise ValueError("use_fine_tuned_traces is not supported anymore")

    fileccd_lamps = io.get_night_files(date,
                                       "ccd.lamp",
                                       target="|".join(lamps))
    lamps = [get_ccd(f_, tracematch=smap) for f_ in fileccd_lamps]

    # - The CubeSolution
    csolution = get_wavesolution(*lamps)

    # ----------------
    # - Spaxel Selection
    if xybounds is None:
        xybounds = INDEX_CCD_CONTOURS

    idxall = smap.get_traces_within_polygon(xybounds)
    if idxrange is not None:
        idxall = [l for l in idxall if l >= idxrange[0] and l < idxrange[1]]
    idx = idxall if ntest is None else np.random.choice(
        idxall, ntest, replace=False)

    # - Do The loop and map it thanks to astropy
    from astropy.utils.console import ProgressBar

    def fitsolution(idx_):
        if saveindividuals:
            saveplot = timedir + "%s_wavesolution_trace%d.pdf" % (date, idx_)
        else:
            saveplot = None
        csolution.fit_wavelesolution(traceindex=idx_,
                                     saveplot=None,
                                     contdegree=contdegree,
                                     wavedegree=wavedegree,
                                     plotprop={"show_guesses": True})
        if saveplot is not None:
            csolution._wsol.show(show_guesses=True, savefile=saveplot)
            mpl.close("all")

    ProgressBar.map(fitsolution, idx)

    # - output - #
    outfile = "%s_WaveSolution" % date
    if idxrange is not None:
        outfile += "_range_%d_%d" % (idxrange[0], idxrange[1])
    if ntest is not None:
        outfile += "_ntest%d" % (ntest)

    dump_pkl(csolution.wavesolutions, timedir + "%s.pkl" % outfile)

    if savefig:
        if ntest is not None or idxrange is not None:
            print(
                "No WaveSolution saving plot when ntest or idxrange are set. ")
        else:
            print("Saving Wavesolution plot")
            wsol = io.load_nightly_wavesolution(date)
            hexagrid = io.load_nightly_hexagonalgrid(date)
            pl = wsol.show_dispersion_map(hexagrid,
                                          vmin="0.5",
                                          vmax="99.5",
                                          outlier_highlight=5,
                                          show=False)
            pl['fig'].savefig(timedir +
                              "%s_wavesolution_dispersionmap.pdf" % date)
Exemple #27
0
def make_validation_report(
    urls=None, destdir='astropy.io.votable.validator.results',
    multiprocess=True, stilts=None):
    """
    Validates a large collection of web-accessible VOTable files.

    Generates a report as a directory tree of HTML files.

    Parameters
    ----------
    urls : list of strings, optional
        If provided, is a list of HTTP urls to download VOTable files
        from.  If not provided, a built-in set of ~22,000 urls
        compiled by HEASARC will be used.

    destdir : path, optional
        The directory to write the report to.  By default, this is a
        directory called ``'results'`` in the current directory. If the
        directory does not exist, it will be created.

    multiprocess : bool, optional
        If `True` (default), perform validations in parallel using all
        of the cores on this machine.

    stilts : path, optional
        To perform validation with ``votlint`` from the the Java-based
        `STILTS <http://www.star.bris.ac.uk/~mbt/stilts/>`_ VOTable
        parser, in addition to `astropy.io.votable`, set this to the
        path of the ``'stilts.jar'`` file.  ``java`` on the system shell
        path will be used to run it.

    Notes
    -----
    Downloads of each given URL will be performed only once and cached
    locally in *destdir*.  To refresh the cache, remove *destdir*
    first.
    """
    from astropy.utils.console import (color_print, ProgressBar, Spinner)

    if stilts is not None:
        if not os.path.exists(stilts):
            raise ValueError(
                '{0} does not exist.'.format(stilts))

    destdir = os.path.abspath(destdir)

    if urls is None:
        with Spinner('Loading URLs', 'green') as s:
            urls = get_urls(destdir, s)
    else:
        color_print('Marking URLs', 'green')
        for url in ProgressBar.iterate(urls):
            with result.Result(url, root=destdir) as r:
                r['expected'] = type

    args = [(url, destdir) for url in urls]

    color_print('Downloading VO files', 'green')
    ProgressBar.map(
        download, args, multiprocess=multiprocess)

    color_print('Validating VO files', 'green')
    ProgressBar.map(
        validate_vo, args, multiprocess=multiprocess)

    if stilts is not None:
        color_print('Validating with votlint', 'green')
        votlint_args = [(stilts, x, destdir) for x in urls]
        ProgressBar.map(
            votlint_validate, votlint_args, multiprocess=multiprocess)

    color_print('Generating HTML files', 'green')
    ProgressBar.map(
        write_html_result, args, multiprocess=multiprocess)

    with Spinner('Grouping results', 'green') as s:
        subsets = result.get_result_subsets(urls, destdir, s)

    color_print('Generating index', 'green')
    html.write_index(subsets, urls, destdir)

    color_print('Generating subindices', 'green')
    subindex_args = [(subset, destdir, len(urls)) for subset in subsets]
    ProgressBar.map(
        write_subindex, subindex_args, multiprocess=multiprocess)
Exemple #28
0
def build_cubes(ccdfiles,
                date,
                lbda=None,
                tracematch=None,
                wavesolution=None,
                hexagrid=None,
                flatfielded=True,
                flatfield=None,
                traceflexure_corrected=True,
                atmcorrected=True,
                flexure_corrected=True,
                build_calibrated_cube=True,
                calibration_ref=None,
                savefig=True,
                verbose=True,
                notebook=False):
    """ Build a cube from the an IFU ccd image. This image 
    should be bias corrected and cosmic ray corrected.

    The standard created cube will be 3Dflat field correct 
    (see flatfielded) and corrected for atmosphere extinction 
    (see atmcorrected). A second cube will be flux calibrated 
    if possible (see build_calibrated_cube).

    Parameters
    ----------
    ccd: [CCD]
        A ccd object from which the cube will be extracted.
        
    date: [string]
        date in usual YYYYMMDD format
    
    lbda: [None/array] -optional-
        wavelength array used to build the cube. 
        If not given the default sedm wavelength range will be used. 
        See pysedm.sedm.SEDM_LBDA.

    // Cube Calibrator //
    
    wavesolution: [WaveSolution] -optional-
        The wavelength solution containing the pixel<->wavelength conversion.
        If None, this will be loaded using `date`.

    hexagrid: [HexaGrid] -optional-
        The Hexagonal Grid tools containing the index<->qr<->xy conversions.
        If None, this will be loaded using `date`.

    flatfield: [Slice] -optional-
        Object containing the relative transmission of the IFU spaxels.
        If None, this will be loaded using `date`.

    // Action Selection //
    flexure_corrected: [bool] -optional-
        Shall the cube be flexure corrected ?
        - Remark, this means the cube will be built twice: 
            - 1 time to estimated the flexure
            - 1 time with flexure correction applied.

    flatfielded: [bool] -optional-
        Shall the cube be flatfielded?
        - This information will be saved in the header-
    
    atmcorrected: [bool] -optional-
        Shall the cube the corrected for atmosphere extinction?
        - This information will be saved in the header-

    // Additional outcome: Flux calibrated cube //

    build_calibrated_cube: [bool] -optional-
        Shall this method build an additionnal flux calibrated cube?
        
    calibration_ref: [None/string] -optional-
        If you want to build a calibrated cube, you can provide the filename
        of the spectrum containing the inverse-sensitivity (fluxcal*)
        If None, this will load the latest fluxcal object of the night.
        If Nothing found, no flux calibrated cube will be created. 

    Returns
    -------
    Void
    """
    if traceflexure_corrected:
        from ..flexure import TraceFlexure
        from ..mapping import Mapper
    # ------------------ #
    # Loading the Inputs #
    # ------------------ #
    if tracematch is None:
        tracematch = io.load_nightly_tracematch(
            date, withmask=False if traceflexure_corrected else True)

    if hexagrid is None:
        hexagrid = io.load_nightly_hexagonalgrid(date)

    if wavesolution is None:
        wavesolution = io.load_nightly_wavesolution(date)
        wavesolution._load_full_solutions_()

    if lbda is None:
        lbda = SEDM_LBDA

    if flatfielded and flatfield is None:
        flatfield = io.load_nightly_flat(date)

    # - In Summary, a Mapper
    if traceflexure_corrected:
        indexes = tracematch.get_traces_within_polygon(INDEX_CCD_CONTOURS)
        mapper = Mapper(tracematch=tracematch.copy(),
                        hexagrid=hexagrid,
                        wavesolution=wavesolution)
        mapper.derive_spaxel_mapping(list(wavesolution.wavesolutions.keys()))

    # ---------------- #
    # Loading the CCDS #
    # ---------------- #
    ccds = []
    for ccdfile in ccdfiles:
        ccd_ = get_ccd(ccdfile, tracematch=tracematch.copy(), background=0)
        if traceflexure_corrected:
            flex = TraceFlexure(ccd_, mapper=mapper)
            flex.derive_j_offset(verbose=verbose)
            ccd_.tracematch.add_trace_offset(0, flex.j_offset)
            if savefig:
                flex.show_j_flexure_ccd(show=False,
                                        savefile=ccd_.filename.replace(
                                            "crr", "flexuretrace_crr").replace(
                                                ".fits", ".pdf"))
            ccd_.header["FLXTRACE"] = (
                True, "Is TraceMatch corrected for j flexure?")
            ccd_.header["FLXTRVAL"] = (
                flex.j_offset,
                "amplitude in pixel of the  j flexure Trace correction")
            if verbose:
                print("Loading the %d traces" % len(mapper.traceindexes))
            load_trace_masks(ccd_.tracematch,
                             mapper.traceindexes,
                             notebook=notebook)
        else:
            ccd_.header["FLXTRACE"] = (
                False, "Is TraceMatch corrected for j flexure?")
            ccd_.header["FLXTRVAL"] = (
                0, "amplitude in pixel of the  j flexure Trace correction")

        ccd_.fetch_background(set_it=True, build_if_needed=True)
        # - Variance
        if not ccd_.has_var():
            ccd_.set_default_variance()
        ccds.append(ccd_)

    # ---------------- #
    # Build the Cubes  #
    # ---------------- #
    # internal routine
    def _build_cubes_(ccdin):
        print(ccdin.filename)
        prop = dict(lbda=lbda,
                    wavesolution=wavesolution,
                    hexagrid=hexagrid,
                    flexure_corrected=flexure_corrected,
                    flatfielded=flatfielded,
                    flatfield=flatfield,
                    atmcorrected=atmcorrected,
                    build_calibrated_cube=build_calibrated_cube,
                    calibration_ref=calibration_ref,
                    savefig=savefig)
        #try:
        build_sedmcube(ccdin, date, **prop)
        #except:
        #    warnings.warn("FAILED building cube for ccd: %s"%ccdin.filename.split("/")[-1])

    # The actual build
    if len(ccds) > 1:
        from astropy.utils.console import ProgressBar
        ProgressBar.map(_build_cubes_, ccds)
    else:
        _build_cubes_(ccds[0])
	samp=sorted(n.random.randint(len(idx),size=25))
	f,axes=pl.subplots(5,5,sharex=True,sharey=True)
	for ax,s in zip(axes.flat,samp):
		im=ax.imshow(vignets2[s])
	axes[0,0].set_xlim(0,36)
	axes[0,0].set_ylim(0,36)
	axes[0,0].axis('equal')
	f.subplots_adjust(right=.8)
	cbar_ax=f.add_axes([0.85,.15,.05,.7])
	f.colorbar(im,cax=cbar_ax)
	pl.show()
	samp=sorted(n.random.randint(nobj1,size=100))
	f,axes=pl.subplots(10,10,sharex=True,sharey=True)
	for i,s in zip(range(len(axes.flat)),samp):
		im=axes.flat[i].imshow(vignets1[s])
	axes[0,0].set_xlim(0,36)
	axes[0,0].set_ylim(0,36)
	axes[0,0].axis('equal')
	f.subplots_adjust(right=.8)
	cbar_ax=f.add_axes([0.85,.15,.05,.7])
	f.colorbar(im,cax=cbar_ax)
	pl.show()
	'''
dire = '/Users/simon/Documents/'
direc = [
    dire + 'CFHTLS-WIRDS/D2/R/Step 2/Proc/4PSFEx.fits',
    dire + 'NGVS/NGVS/NGVS+0+1/Ks/Phase2/test.cat',
    dire + 'NGVS/NGVS/NGVS+0+1/r/NGVS+0+1R.cat'
]
ProgressBar.map(convert_catalog, direc, multiprocess=True)
	os.chdir(path)
	'''
	vignets2=vignets1[idx]
	print len(vignets1), len(vignets2)
	samp=sorted(n.random.randint(len(idx),size=25))
	f,axes=pl.subplots(5,5,sharex=True,sharey=True)
	for ax,s in zip(axes.flat,samp):
		im=ax.imshow(vignets2[s])
	axes[0,0].set_xlim(0,36)
	axes[0,0].set_ylim(0,36)
	axes[0,0].axis('equal')
	f.subplots_adjust(right=.8)
	cbar_ax=f.add_axes([0.85,.15,.05,.7])
	f.colorbar(im,cax=cbar_ax)
	pl.show()
	samp=sorted(n.random.randint(nobj1,size=100))
	f,axes=pl.subplots(10,10,sharex=True,sharey=True)
	for i,s in zip(range(len(axes.flat)),samp):
		im=axes.flat[i].imshow(vignets1[s])
	axes[0,0].set_xlim(0,36)
	axes[0,0].set_ylim(0,36)
	axes[0,0].axis('equal')
	f.subplots_adjust(right=.8)
	cbar_ax=f.add_axes([0.85,.15,.05,.7])
	f.colorbar(im,cax=cbar_ax)
	pl.show()
	'''
dire='/Users/simon/Documents/'
direc=[dire+'CFHTLS-WIRDS/D2/R/Step 2/Proc/4PSFEx.fits',dire+'NGVS/NGVS/NGVS+0+1/Ks/Phase2/test.cat',dire+'NGVS/NGVS/NGVS+0+1/r/NGVS+0+1R.cat']
ProgressBar.map(convert_catalog,direc,multiprocess=True)