Esempio n. 1
0
# define data set
datadir = os.getenv('CSH_DATA')
filenames = [datadir + '/1342185454_blue_PreparedFrames.fits[5954:67614]',
             datadir + '/1342185455_blue_PreparedFrames.fits[5954:67615]']
# no compression
output_path = os.path.join(os.getenv('HOME'), 'data', 'csh', 'output',)
# compression modes
compressions = ["", "ca", "cs"]
#compressions = ["ca"]
# median filter length
deglitch=True
covariance=True
filtering = True
filter_length = 10000
hypers = (1e9, 1e9)
ext = ".fits"
pre = "ngc6946_rls_cov_"
# to store results
sol = []
# define same header for all maps
tod, projection, header, obs = csh.load_data(filenames[0])
del tod, projection, obs
# find a map for each compression and save it
for comp in compressions:
    sol.append(csh.rls(filenames, compression=comp, hypers=hypers, 
                       header=header,
                       deglitch=deglitch, covariance=covariance,
                       filtering=filtering, filter_length=filter_length))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 2
0
#!/usr/bin/env python
import os
import csh

# define data set
datadir = os.getenv('CSH_DATA')
filenames = [datadir + '/1342185454_blue_PreparedFrames.fits[5954:67614]',
             datadir + '/1342185455_blue_PreparedFrames.fits[5954:67615]']
# no compression
output_path = os.path.join(os.getenv('HOME'), 'data', 'csh', 'output',)
# compression modes
compressions = ["", "ca", "cs"]
# median filter length
filtering = True
filter_length = 10000
hypers = (1e8, 1e8)
ext = ".fits"
pre = "ngc6946_rls_cov_"
# to store results
sol = []
# find a map for each compression and save it
for comp in compressions:
    sol.append(csh.rls(filenames, compression=comp, hypers=hypers,
                       deglitch=False,
                       filtering=filtering, filter_length=filter_length,
                       algo=csh.lo.acg
                       ))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 3
0
wavelet = None
ext = ".fits"
pre = "ngc6946_huber_"
# to store results
sol = []
# define same header for all maps
tod, projection, header, obs = csh.load_data(filenames)
# get the weight map
weights = projection.transpose(tod.ones(tod.shape))
weights.writefits(os.path.join(output_path, pre + 'weights' + ext))
del tod, projection, obs
# find a map for each compression and save it
for comp in compressions:
    sol.append(
        csh.rls(filenames,
                compression=comp,
                hypers=hypers,
                header=header,
                factor=factor,
                algo=algo,
                deltas=deltas,
                wavelet=wavelet,
                tol=tol,
                maxiter=maxiter,
                deglitch=deglitch,
                covariance=covariance,
                filtering=filtering,
                filter_length=filter_length))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 4
0
# compression modes
#compressions = ["", "ca", "cs"]
compressions = ["ca"]
# median filter length
algo = lo.acg
deglitch = True
covariance = True
decompression = True
filtering = True
filter_length = 10000
hypers = (1e8, 1e8)
ext = ".fits"
pre = "ngc6946_madmap_acg_"
# to store results
sol = []
# find a map for each compression and save it
for comp in compressions:
    sol.append(
        csh.rls(filenames,
                compression=comp,
                hypers=hypers,
                deglitch=deglitch,
                covariance=covariance,
                decompression=decompression,
                filtering=filtering,
                filter_length=filter_length,
                algo=algo,
                tol=1e-8))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 5
0
# no compression
output_path = os.path.join(
    os.getenv('HOME'),
    'data',
    'csh',
    'output',
)
# compression modes
compression = ""
# median filter length
deglitch = True
covariance = True
decompression = True
filtering = True
filter_length = 10000
hypers = (1e9, 1e9)
ext = ".fits"
pre = "abell2218_madmap1_"
# to store results
# find a map for each compression and save it
sol = csh.rls(filenames,
              compression=compression,
              hypers=hypers,
              deglitch=deglitch,
              covariance=covariance,
              decompression=decompression,
              filtering=filtering,
              filter_length=filter_length)
fname = os.path.join(output_path, pre + compression + ext)
sol.writefits(fname)
Esempio n. 6
0
bpj.writefits(os.path.join(output_path, pre + 'bpj' + ext))
# get the weight map
weights = projection.transpose(tod.ones(tod.shape))
weights.writefits(os.path.join(output_path, pre + 'weights' + ext))
naive = bpj / weights
naive[np.isnan(naive)] = 0.
naive.writefits(os.path.join(output_path, pre + 'naive' + ext))
del tod, projection, obs
# find a map for each compression and save it
# to store results
bpj = True
sol = []
bpjs = []
for comp in compressions:
    print("Inversion with " + comp + " compression")
    if comp == "":
        hypers = (1/8., 1/8.)
    else:
        hypers = (1e0, 1e0)
    s, b = csh.rls(filenames, compression=comp, hypers=hypers, 
                       header=header, deltas=deltas,
                       deglitch=deglitch, covariance=covariance,
                       filtering=filtering, filter_length=filter_length,
                       algo=lo.hacg, tol=1e-8, wavelet=wavelet, bpj=bpj
                       )
    sol.append(s)
    bpjs.append(b)
    fname = os.path.join(output_path, pre + comp)
    bpjs[-1].writefits(fname + '_bpj' + ext)
    sol[-1].writefits(fname + ext)
Esempio n. 7
0
filter_length = 100
#hypers = (1e9, 1e9)
hypers = (1e0, 1e0)
ext = ".fits"
pre = "abell2218_high_red_rls_"
# to store results
sol = []
# define same header for all maps
tod, projection, header, obs = csh.load_data(filenames)
# get the weight map
weights = projection.transpose(tod.ones(tod.shape))
weights.writefits(os.path.join(output_path, pre + 'weights' + ext))
del tod, projection, obs
# find a map for each compression and save it
for comp in compressions:
    if comp == "":
        hypers = (1 / 8., 1 / 8.)
    else:
        hypers = (1e0, 1e0)
    sol.append(
        csh.rls(filenames,
                compression=comp,
                hypers=hypers,
                header=header,
                deglitch=deglitch,
                covariance=covariance,
                filtering=filtering,
                filter_length=filter_length))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 8
0
]
# no compression
output_path = os.path.join(
    os.getenv('HOME'),
    'data',
    'csh',
    'output',
)
# compression modes
compressions = ["", "ca", "cs"]
# median filter length
filtering = True
filter_length = 10000
hypers = (1e8, 1e8)
ext = ".fits"
pre = "ngc6946_rls_cov_"
# to store results
sol = []
# find a map for each compression and save it
for comp in compressions:
    sol.append(
        csh.rls(filenames,
                compression=comp,
                hypers=hypers,
                deglitch=False,
                filtering=filtering,
                filter_length=filter_length,
                algo=csh.lo.acg))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 9
0
import os
import csh

# define data set
datadir = os.getenv('CSH_DATA')
ids = ['1342184518', '1342184519', '1342184596', '1342184597', 
       '1342184598', '1342184599']
filenames = [os.path.join(datadir, id_str + '_blue_PreparedFrames.fits')
             for id_str in ids]
# no compression
output_path = os.path.join(os.getenv('HOME'), 'data', 'csh', 'output',)
# compression modes
compression = ""
# median filter length
deglitch=True
covariance=True
decompression=True
filtering = True
filter_length = 10000
hypers = (1e9, 1e9)
ext = ".fits"
pre = "abell2218_madmap1_"
# to store results
# find a map for each compression and save it
sol = csh.rls(filenames, compression=compression, hypers=hypers, 
             deglitch=deglitch, covariance=covariance,
             decompression=decompression,
             filtering=filtering, filter_length=filter_length)
fname = os.path.join(output_path, pre + compression + ext)
sol.writefits(fname)
Esempio n. 10
0
covariance=False
filtering = True
filter_length = 1000
hypers = (1e7, 1e7)
#hypers = (1e2, 1e2)
deltas = (None, 1e-8, 1e-8)
algo = lo.hacg
tol = 1e-5
maxiter = 30
#wavelet = 'haar'
wavelet = None
ext = ".fits"
pre = "ngc6946_huber_"
# to store results
sol = []
# define same header for all maps
tod, projection, header, obs = csh.load_data(filenames)
# get the weight map
weights = projection.transpose(tod.ones(tod.shape))
weights.writefits(os.path.join(output_path, pre + 'weights' + ext))
del tod, projection, obs
# find a map for each compression and save it
for comp in compressions:
    sol.append(csh.rls(filenames, compression=comp, hypers=hypers, 
                       header=header, factor=factor, algo=algo,
                       deltas=deltas, wavelet=wavelet, tol=tol, maxiter=maxiter,
                       deglitch=deglitch, covariance=covariance,
                       filtering=filtering, filter_length=filter_length))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 11
0
filtering = True
filter_length = 100
#hypers = (1e9, 1e9)
hypers = (1e0, 1e0)
wavelet='haar'
deltas = (None, 1e-8, 1e-8)
ext = ".fits"
pre = "abell2218_high_red_huber_"
# to store results
sol = []
# define same header for all maps
tod, projection, header, obs = csh.load_data(filenames)
# get the weight map
weights = projection.transpose(tod.ones(tod.shape))
weights.writefits(os.path.join(output_path, pre + 'weights' + ext))
del tod, projection, obs
# find a map for each compression and save it
for comp in compressions:
    if comp == "":
        hypers = (1/8., 1/8.)
    else:
        hypers = (1e0, 1e0)
    sol.append(csh.rls(filenames, compression=comp, hypers=hypers, 
                       header=header, deltas=deltas,
                       deglitch=deglitch, covariance=covariance,
                       filtering=filtering, filter_length=filter_length,
                       algo=lo.hacg, tol=1e-8, wavelet=wavelet
                       ))
    fname = os.path.join(output_path, pre + comp + ext)
    sol[-1].writefits(fname)
Esempio n. 12
0
import numpy as np
import csh
import tamasis

# define data set
datadir = os.path.join(tamasis.tamasis_dir, 'tests',)
filenames = os.path.join(datadir, 'frames_blue.fits[0:16]')
# no compression
# compression modes
compressions = ["",]
# median filter length
filtering = True
filter_length = 10000
hypers = (1e15, 1e15)
ext = ".fits"
pre = "ngc6946_rls_cov_"
# to store results
models = []
# to output only the model !!!
model_only = True
for comp in compressions:
    models.append(csh.rls(filenames, compression=comp, hypers=hypers,
                          deglitch=False,
                          filtering=filtering, filter_length=filter_length,
                          model_only=model_only
                          ))
for model in models:
    M = model.todense()
    MT = model.T.todense()
    assert np.all(M.T == MT)