Esempio n. 1
0
def coaddDiffBundle(mapfiles, output_name, pad_to=None, overwrite=True):
    coadd = files.read(mapfiles[0])
    plus = 1
    minus = 0
    for i, mfile in enumerate(mapfiles[1:]):
        m = files.read(mfile)
        if i % 2 == 0:
            coadd += -1 * m  # subtraction works only on weighted regions
        else:
            coadd += m
    # pad the map
    if pad_to is not None:
        args = coadd.initargs(noshape=True)
        weight_full = np.zeros(pad_to)
        map_full = np.zeros(pad_to)
        pad = [(pt - ms) / 2 for pt, ms in zip(pad_to, coadd.shape)]
        weight_full[pad[0]:pad[0] + coadd.shape[0],
                    pad[1]:pad[1] + coadd.shape[1]] = weight_map
        map_full[pad[0]:pad[0] + coadd.shape[0],
                 pad[1]:pad[1] + coadd.shape[1]] = sum_map
        coadd = ObservedSky(map=map_full, weight=weight_full)
    coadd.writeToHDF5(output_name, overwrite=overwrite)

    info = open(output_name + '.info', 'w')
    info.write('%d\n' % len(mapfiles))
    info.write('\n'.join(mapfiles))
    info.close()
Esempio n. 2
0
def coadd(mapfiles, pol = True):
    coadd = None
    for mf in mapfiles:
        if coadd is None:
            coadd = files.read(mf)
        else:
            coadd += files.read(mf)
    return coadd
Esempio n. 3
0
def quickCheck(auxfiles):
    for f in auxfiles:
        aux = files.read(f)
        try:
            loadDataForAuxFile(aux)
        except KeyError:
            print '\t\t\t' + f
Esempio n. 4
0
def loadDataForAuxFile(aux):
    if isinstance(aux, str):
        aux = files.read(aux)
    data = SPTDataReader(aux.header.start_date,
                         aux.header.stop_date,
                         quiet=True)
    data.readData()
    return data
Esempio n. 5
0
def plotCmap(mapfile, plot_dir):
    m = files.read(mapfile)
    f = pl.figure()
    pl.imshow(m.Map.real_map_T, vmin=-5e-3, vmax=5e-3, cmap=matplotlib.cm.gray)
    pl.colorbar()
    name = '.'.join(path.basename(mapfile).split('.')[:-1])
    pl.savefig(path.join(plot_dir, name + '.png'))
    pl.close('all')
def getMap(fname):
    dirname, bname = os.path.split(fname)
    mapname = bname[:-14] + '.h5'
    if dirname.endswith('maps') or 'bundle' in dirname:
        map_dir = dirname
    else:
        map_dir = os.path.join(os.path.dirname(dirname), 'maps')
    return files.read(os.path.join(map_dir, mapname))
Esempio n. 7
0
def getNoise(mapfiles, noise_ell=[5500, 6500]):
    noise = np.zeros(np.shape(mapfiles))
    for i, mf in enumerate(mapfiles):
        m = files.read(mf)
        noise[i] = calculateNoise(m.removeWeight(),
                                  ell_range=noise_ell,
                                  return_noise=True,
                                  quiet=True)[0]
        del m
    return noise
Esempio n. 8
0
def coadd(directory, band):
    mapfiles = glob.glob(os.path.join(directory, '*%sghz.h5' %band))
    coadd = None
    for mf in mapfiles:
        _map = files.read(mf)
        if coadd is None:
            coadd = _map
        else:
            coadd += _map
    return coadd
Esempio n. 9
0
def idf_to_map_half(outdir):
    try:
        os.makedirs(os.path.join(outdir, 'left'))
        os.makedirs(os.path.join(outdir, 'right'))
    except OSError:
        pass
    map_args = {
        'good_bolos': [
            'optical_bolometers',
            'no_c4',
            'bolometer_flags',
            'has_pointing',
            'has_polcal',
            'timestream_flags',
            # 'elnod',
            # 'calibrator',
            'good_timestream_weight',
        ],
        'reso_arcmin':
        30,
        'proj':
        1,
        'map_shape': (45, 170),
        'map_center': [9 * 15, -51],
        'timestream_filtering': {
            'poly_order': 0,
        }
    }
    bands = [90, 150]
    dirs = ['half1', 'half2']
    for b in bands:
        idfs = []
        for d in dirs:
            idfs.append(
                files.read(
                    glob.glob(os.path.join(outdir, d,
                                           '*{:03d}*.h5'.format(b))))[0])
        maps_left = [
            cMapping(i, doreal=True, use_leftgoing=True, **map_args)[str(b)]
            for i in idfs
        ]
        maps_right = [
            cMapping(i, doreal=True, use_leftgoing=False, **map_args)[str(b)]
            for i in idfs
        ]
        left = np.sum(maps_left)
        right = np.sum(maps_right)
        left.writeToHDF5(os.path.join(outdir, 'left',
                                      'sunop_map_{:03d}ghz.h5'.format(b)),
                         overwrite=True)
        right.writeToHDF5(os.path.join(outdir, 'right',
                                       'sunop_map_{:03d}ghz.h5'.format(b)),
                          overwrite=True)
    return left, right
Esempio n. 10
0
def plot(mapfile, plot_dir):
    print mapfile
    m = files.read(mapfile)
    m.removeWeight()
    f = pl.figure()
    # m.plot('T', bw = True, vmin = -1e-3, vmax = 1e-3, figure = 1)
    m.plot('T', bw=True, weight=True, figure=1)
    name = '.'.join(path.basename(mapfile).split('.')[:-1])
    f.set_size_inches(12.15, 6.)
    pl.savefig(path.join(plot_dir, name + '.png'))
    pl.close('all')
Esempio n. 11
0
def map_from_many_idfs(idf_list, outdir):
    map_args = {
        'good_bolos': [
            'optical_bolometers',
            'no_c4',
            'bolometer_flags',
            'has_pointing',
            'has_polcal',
            'timestream_flags',
            # 'elnod',
            # 'calibrator',
            'good_timestream_weight',
        ],
        'reso_arcmin':
        30,
        'proj':
        1,
        'map_shape': (45, 170),
        'map_center': [9 * 15, -51],
        'timestream_filtering': {
            'poly_order': 4,
            'ell_lowpass': 360
        }
    }
    glitchfinder_args = {
        'timestream_ids_to_check': None,
        'ps_list': "",
        'verbose': True
    }
    cutter = cuts.Cutter()
    idfs = []
    for i_file in idf_list:
        idf = files.read(i_file)
        cutter.flagTimestreamJumps(idf, **glitchfinder_args)
        idfs.append(idf)
    maps_left = [
        cMapping(i, doreal=True, use_leftgoing=True,
                 **map_args)[str(idfs[0].band)] for i in idfs
    ]
    maps_right = [
        cMapping(i, doreal=True, use_leftgoing=False,
                 **map_args)[str(idfs[0].band)] for i in idfs
    ]
    left = np.sum(maps_left)
    right = np.sum(maps_right)
    left.writeToHDF5(os.path.join(
        outdir, 'left', 'sunop_map_{:03d}ghz.h5'.format(idfs[0].band)),
                     overwrite=True)
    right.writeToHDF5(os.path.join(
        outdir, 'right', 'sunop_map_{:03d}ghz.h5'.format(idfs[0].band)),
                      overwrite=True)
    return left, right
Esempio n. 12
0
def coaddBundle(mapfiles, output_name, pad_to=None, overwrite=True):
    coadd = files.read(mapfiles[0])
    for mfile in mapfiles[1:]:
        m = files.read(mfile)
        coadd += m
    # pad the map
    if pad_to is not None:
        args = coadd.initargs(noshape=True)
        weight_full = np.zeros(pad_to)
        map_full = np.zeros(pad_to)
        pad = [(pt - ms) / 2 for pt, ms in zip(pad_to, coadd.shape)]
        weight_full[pad[0]:pad[0] + coadd.shape[0],
                    pad[1]:pad[1] + coadd.shape[1]] = weight_map
        map_full[pad[0]:pad[0] + coadd.shape[0],
                 pad[1]:pad[1] + coadd.shape[1]] = sum_map
        coadd = ObservedSky(map=map_full, weight=weight_full)
    coadd.writeToHDF5(output_name, overwrite=overwrite)

    info = open(output_name + '.info', 'w')
    info.write('%d\n' % len(mapfiles))
    info.write('\n'.join(mapfiles))
    info.close()
Esempio n. 13
0
def getLTFromRA(mapfile, thresh=.3, min_offset=100):
    # look at the mean x-position of good pixels
    try:
        map = files.read(mapfile)
    except IOError:
        return None
    xpix = map.shape[1]
    goodpix = np.where(map.weight > thresh * np.median(map.weight))
    mean_x = np.mean(goodpix[1])
    if mean_x < xpix / 2 - min_offset:
        return 'L'
    if mean_x > xpix / 2 + min_offset:
        return 'T'
    else:
        return 'F'
Esempio n. 14
0
def checkOb(obfile, check_field, lock=None):
    # return True if the ob is bad
    try:
        ob = files.read(obfile)
    except ValueError:
        return False
    unique_vals = np.unique(ob.observation[check_field])
    unique_frac = float(len(unique_vals)) / len(ob.observation[check_field])
    if unique_frac < 1.2:
        lock.acquire()
        res_str = ob.from_filename + ' ' + str(unique_frac)
        # for v in unique_vals:
        #     res_str += ', ' + str(v)
        print res_str
        lock.release()
        return True
Esempio n. 15
0
def carrierCheck(auxfiles, carrier_filename, read_filename):
    carrier_out = open(carrier_filename, 'w')
    read_out = open(read_filename, 'w')
    passed_out = open('/home/ndhuang/code/auxchecker/passed_carrier.txt', 'w')
    for af in auxfiles:
        aux = files.read(af)
        try:
            data = loadDataForAuxFile(aux)
        except:
            read_out.write(af + '\n')
            continue
        carrier_frac = singleCarrierCheck(data)
        carrier_out.write(af + ' %f\n' % carrier_frac)
    carrier.close()
    read_out.close()
    passed_out.close()
Esempio n. 16
0
def noise_diff(band):
    directory = '/mnt/rbfa/ndhuang/fast_500d_map/run3/map_ra0hec-57.5/ra0hdec-57.5/coadds/'
    mapfiles = glob.glob(os.path.join(directory, '*%sghz.h5' %band))
    # ensure an even number of maps
    if len(mapfiles) %2 != 0:
        mapfiles = mapfiles[1:]
    coadd = None
    for i, mf in enumerate(mapfiles):
        _map = files.read(mf)
        if coadd is None:
            coadd = _map
        else:
            try:
                if i %2 == 0:
                    coadd += _map
                else:
                    coadd -= _map
            except ValueError:
                print mf
                pass
    return coadd
Esempio n. 17
0
import matplotlib
matplotlib.use('Agg')
import glob
from matplotlib import pyplot as pl, cm
from sptpol_software.util import files
import numpy as np
dir = '/data39/ndhuang/clusters/ra23h30dec-55/run2/bundles2/150/bundle*.fits'
mask = '/data39/ndhuang/clusters/ra23h30dec-55/run2/bundles1/150/mask.fits'
mask = files.read(mask)
mapfiles = glob.glob(dir)
fig = pl.figure()
for mf in mapfiles:
    map = files.read(mf)
    pl.imshow(map.map.map * mask.masks.apod_mask, cmap = cm.gray, vmin = -5e-4, vmax = 5e-4)
    pl.colorbar()
    pl.savefig(mf.strip('.fits') + '.png')
    fig.clf()
Esempio n. 18
0
# plot a map with ringing in it
import glob, os
import numpy as np
from sptpol_software.util import files

directory = '/data22/acrites/coadds_cuts_test/test11_20130503/'
mapfiles = glob.glob(os.path.join(directory, 'smap*.h5'))
m = files.read(mapfiles[0])
weight_map = m.getTOnly().weight
tcoadd_map = m.getTOnly().map
for mf in mapfiles[1:]:
    m = files.read(mf)
    t_map = m.getTOnly()
    tcoadd_map += t_map.map
    weight_map += t_map.weight

inds = np.nonzero(weight_map)
tcoadd_map[inds] /= weight_map[inds]
inds = np.nonzero(weight_map == 0)
tcoadd_map[inds] = 0
np.save('ringy_coadd', tcoadd_map)
np.save('ringy_weight', weight_map)
Esempio n. 19
0
def makeBundle(mapfiles):
    bundle = files.read(mapfiles[0])
    for mf in mapfiles[1:]:
        bundles += files.read(mf)
Esempio n. 20
0
import sys, glob, os
from sptpol_software.util import files

if __name__ == '__main__':
    mapdir = sys.argv[1]
    mapfiles_left = glob.glob(os.path.join(mapdir, 'left', '*.h5'))
    coadd_dir = os.path.join(mapdir, 'coadds')
    if not os.path.exists(coadd_dir):
        os.makedirs(coadd_dir)

    for mf_l in mapfiles_left:
        mapname = os.path.basename(mf_l)
        mf_r = os.path.join(mapdir, 'right', mapname)
        if not os.path.exists(mf_r):
            continue
        map_l = files.read(mf_l)
        map_r = files.read(mf_r)
        try:
            full_map = map_l + map_r
        except ValueError, err:
            print mapname
            import IPython
            IPython.embed()
        full_map.writeToHDF5(os.path.join(coadd_dir, mapname))
from sptpol_software.util import files
import glob
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as pl

mapfiles = glob.glob(
    '/data/sptdat/auxdata/map/autoprocessed_maps/ra5hdec-35/coadds/left/*150ghz.h5'
)
mapfiles = sorted(mapfiles)

for i, mf in enumerate(mapfiles):
    print mf
    mapp = files.read(mf)
    mapp.removeWeight()
    fig = pl.figure()
    mapp.plot('T', bw=True, vmin=-5e-4, vmax=5e-4, figure=1)
    dpi = fig.get_dpi()
    fig.set_size_inches((1920. / dpi, 1200. / dpi))
    fig.tight_layout()
    pl.savefig('/home/ndhuang/plots/pretty_maps/%d.png' % i)
    pl.close('all')
Esempio n. 22
0
def analyzeData(time_interval, idf_band=None):
    """
    Nearly the generic function, but we return the result of the analysis function instead of 
    the data object that we read out.
    """
    # This is a bit of a kludge to let us analyze both 150 GHz and 90 GHz IDFs with the same script.
    # When the autoprocessor calls analyzeData, we'll get the default argument idf_band=None.
    # This bit of code will separately run the 150 GHz and 90 GHz mapmaking, then combine the
    # results and return them.
    if read_from_idf and idf_band is None:
        if do_left_right:
            # Do mapmaking for each of the bands in turn.
            analysis_result150, analysis_result150_left, analysis_result150_right = analyzeData(time_interval, idf_band=150)
            analysis_result90, analysis_result90_left, analysis_result90_right = analyzeData(time_interval, idf_band=90)
        
            # Combine the dictionaries of maps.
            analysis_result150.update(analysis_result90)
            analysis_result150_left.update(analysis_result90_left)
            analysis_result150_right.update(analysis_result90_right)
        
            # Return the combined results.
            return analysis_result150, analysis_result150_left, analysis_result150_right
                   
        else:
            # Do mapmaking for each of the bands in turn.
            analysis_result150 = analyzeData(time_interval, idf_band=150)
            analysis_result90 = analyzeData(time_interval, idf_band=90)
        
            # Combine the dictionaries of maps.
            analysis_result150.update(analysis_result90)
        
            # Return the combined results.
            return analysis_result150 


    # Check if we're reading from an IDF or going directly from archive files.
    if read_from_idf:
        if my_name == 'ra0hdec-57.5':
            print '2013 field...'
            idf_filename = os.path.join(directories['idf_dir'],'data','%s_idf_%s_%03ighz.h5' % (my_name, time_interval[0].file, idf_band))
        else:
            idf_filename = os.path.join(directories['idf_dir'],my_name,'data','%s_idf_%s_%03ighz.h5' % (my_name, time_interval[0].file, idf_band))
        data = read(idf_filename, timeit=True)
        if not data:
            raise ValueError("I couldn't find an IDF for the %s observation taken at %s. (No file at %s .)"
                             % (my_name, time_interval[0].archive, idf_filename))
        # test if data is in ra range you want
        if ra_cut:
            if (data.observation.mean_ra < ra_cut[0]) or (data.observation.mean_ra > ra_cut[1]):   
                raise ValueError("Skipping this IDF b/c wrong RA range: %s observation taken at %s. (file at %s .)"
                             % (my_name, time_interval[0].archive, idf_filename))

        data._readConfigs() # We need the auxdata_keys information.
        
        # The IDF generator may have flagged some scans in some timestreams as bad. 
        # With such a huge source, the RMS flagger's output is suspect (in fact, very 
        # bad over the source), so remove all timestream flags.
        for scan in data.scan:
            scan.is_bad_channel[:]=False

        #Correct pointing after the fact.
        if correct_pointing:
            op.applyOfflinePointing(data, model='SPT', overwrite_global=True,
                                flags=flags, overwrite=True)

        #Grab the thermometry and metrology data.
        thermo_data = {'tracker.encoder_off':np.median(data.antenna.track_enc_off, axis=1), \
                       'tracker.horiz_mount':np.median(data.antenna.track_hor_mnt, axis=1), \
                       'tracker.horiz_off':np.median(data.antenna.track_hor_off, axis=1), \
                       #'tracker.tilt_xy_avg':np.median(data.antenna.track_tilt_xy_avg, axis=1), \
                       'tracker.linear_sensor_avg':np.median(data.antenna.track_linsens_avg, axis=1), \
                       'scu.temp':np.median(data.antenna.scu_temp, axis=1),
                       'scu.benchoff':data.antenna.scu_benchoff[:,0],
                       'observation.temp_avg':data.observation.temp_avg,
                       'observation.pressure_avg':data.observation.pressure_avg,
                       'observation.mean_az':data.observation.mean_az,
                       'observation.mean_el':data.observation.mean_el,
                       'observation.wind_dir_avg':data.observation.wind_dir_avg,
                       'observation.wind_speed_avg':data.observation.wind_speed_avg
                       }

        this_filename = "%s_%s_thermolinear.pkl" % (my_name, time_interval[0].file)
        filename_out_dir = os.path.join(directories['output_dir'], my_name, subdirectory_name)
        filename = os.path.join(filename_out_dir, this_filename)

        pk.dump(thermo_data, open(filename, 'w'))

    else:
        data = SPTDataReader(time_interval[0], time_interval[1], 
                             experiment=experiment,
                             master_configfile=master_config)
        data.readData(obstype=my_name, **readout_kwargs)

        #Correct pointing after the fact.
        if correct_pointing:
            op.applyOfflinePointing(data, model='SPT', overwrite_global=True,
                                flags=flags, overwrite=True)

        #Grab the thermometry and metrology data.
        thermo_data = {'tracker.encoder_off':np.median(data.antenna.track_enc_off, axis=1), \
                       'tracker.horiz_mount':np.median(data.antenna.track_hor_mnt, axis=1), \
                       'tracker.horiz_off':np.median(data.antenna.track_hor_off, axis=1), \
                       #'tracker.tilt_xy_avg':np.median(data.antenna.track_tilt_xy_avg, axis=1), \
                       'tracker.linear_sensor_avg':np.median(data.antenna.track_linsens_avg, axis=1), \
                       'scu.temp':np.median(data.antenna.scu_temp, axis=1),
                       'scu.benchoff':data.antenna.scu_benchoff[:,0],
                       'observation.temp_avg':data.observation.temp_avg,
                       'observation.pressure_avg':data.observation.pressure_avg,
                       'observation.mean_az':data.observation.mean_az,
                       'observation.mean_el':data.observation.mean_el,
                       'observation.wind_dir_avg':data.observation.wind_dir_avg,
                       'observation.wind_speed_avg':data.observation.wind_speed_avg
                       }

        this_filename = "%s_%s_thermolinear.pkl" % (my_name, time_interval[0].file)
        filename_out_dir = os.path.join(directories['output_dir'], my_name, subdirectory_name)
        filename = os.path.join(filename_out_dir, this_filename)

        pk.dump(thermo_data, open(filename, 'w'))

    
    if mask_radius:
        if force_source_location:
            ra, dec = force_source_location[0], force_source_location[1]
        else:
            # Find the location of the source. Do a linear fit subtraction now. Make an IDF
            # for the sourcefinding, as a quick and easy way to be able to do light filtering
            # on the sourcefinding map without filtering the real data.
            temp_idf = (data.copy() if read_from_idf else mapidf.MapIDF(data))
            finder_map = quicklook.quickmap(temp_idf, good_bolos=['flagged','pointing'],
                                            reso_arcmin=0.25,
                                            proj=5,
                                            map_shape=[0.8,1.],
                                            map_center=map_center,
                                            t_only=True,
                                            inverse_noise_weighted_map=True,
                                            timestream_filtering={'poly_order':1, 
                                                                  'dynamic_source_mask':True,
                                                                  'outlier_mask_nsigma':3.0})
            temp_idf = None # Discard the temporary IDF now that we're done with it.
            # Discard the map we're not creating. We only need one of them.
            finder_map = finder_map[str(idf_band)] if idf_band is not None else finder_map['150'] 
        
            # Find the RA and dec of the maximum point in the map.
            ra, dec = finder_map.pix2Ang(np.unravel_index(np.argmax(np.abs(finder_map.map)), finder_map.shape))
    

        # Make a temporary pointsource config file, and write it in the arguments
        # to be passed to quickmap.
        ptsrc_configfile = tempfile.NamedTemporaryFile(mode='w', suffix='.txt')
        ptsrc_configfile.write('1 %f %f %f' % (ra, dec, mask_radius))
        ptsrc_configfile.flush()
        print "  Writing temporary pointsource config file, %s . Contents:" % ptsrc_configfile.name
        print '1 %f %f %f\n' % (ra, dec, mask_radius)
        analysis_function_kwargs['timestream_filtering']['pointsource_file'] = ptsrc_configfile.name 
 
        
    for func, kwargs in zip(preprocessing_function, preprocessing_function_kwargs):
        func(data, **kwargs)
        
    print "  Processing sum map.\n"   
    print "  analysis_function_kwargs : %s" % str(analysis_function_kwargs)
    analysis_result = analysis_function(data, **analysis_function_kwargs)
    
    if do_left_right:
        # Now we've done filtering once, no need to do it again.
        lr_analysis_function_kwargs = analysis_function_kwargs.copy() # Don't alter the original kwargs!
        lr_analysis_function_kwargs['timestream_filtering'] = {}
        
        print "  Processing leftgoing map.\n"   
        analysis_result_left = analysis_function(data, use_leftgoing=True, **lr_analysis_function_kwargs)
        
        print "  Processing rightgoing map.\n"   
        analysis_result_right = analysis_function(data, use_leftgoing=False, **lr_analysis_function_kwargs)
    
    if mask_radius:
        ptsrc_configfile.close()
    
    if read_from_idf:
        # If this is an IDF, it's only got one band. Delete maps from the other(s) so we
        # don't save empty maps.
        if str(data.band) not in analysis_result:
            raise RuntimeError("I was expecting to see %s in the output maps, but it's not there!" % str(data.band))
        for band in analysis_result.keys():
            if band!=str(data.band):
                del analysis_result[band]
                if do_left_right:
                    del analysis_result_left[band]
                    del analysis_result_right[band]
    
    if do_left_right:
        return analysis_result, analysis_result_left, analysis_result_right
    else:
        return analysis_result
Esempio n. 23
0
import os
import cPickle as pickle
import numpy as np
from sptpol_software.util import files

def readRunlist(runlist):
    runlist = open(runlist, 'r')
    mapfiles = runlist.readlines()
    i = 0
    while i < len(mapfiles):
        mapfiles[i] = mapfiles[i].strip()
        if not os.path.exists(mapfiles[i]):
            mapfiles.pop(i)
        else:
            i += 1
    return mapfiles

if __name__ == '__main__':
    mapfiles = readRunlist('/mnt/rbfa/ndhuang/maps/clusters/ra1hdec-35/150ghz_runlist.txt')
    idf_dir = '/data/ndhuang/auxdata/idf/ra1hdec-35/data/'
    T = np.zeros(len(mapfiles))
    for i, mf in enumerate(mapfiles):
        filename = os.path.basename(mf)
        idf_name = filename.replace('clustermap_', '').replace('5_2', '5_idf_2', 1)
        idf = files.read(os.path.join(idf_dir, idf_name))
        T[i] = idf.observation.temp_avg
    
    f = open('/mnt/rbfa/ndhuang/maps/clusters/ra1hdec-35/t_avg', 'w')
    pickle.dump([T, mapfiles], f)
    f.close()
    def __init__(self,
                 field,
                 band,
                 plotname,
                 clusterdir='/mnt/rbfa/ndhuang/maps/clusters',
                 v=1e-3,
                 maskfile=None,
                 radec0=None):
        self.proj = 0
        self.reso = .25

        # parse field center from field name
        if radec0 is not None:
            self.radec0 = radec0
        elif field in centers.keys():
            self.radec0 = centers[field]
        else:
            match = re.match('ra(\d+)h(\d*)dec([0-9\-]+)', field)
            ra = 15 * float(match.group(1))
            if len(match.group(2)) > 0:
                ra += float(match.group(2)) / 60 * 15
            dec = float(match.group(3))
            self.radec0 = [ra, dec]

        # grab the coadd and expand it
        try:
            coadd = files.read(
                os.path.join(clusterdir, field, '%03d_coadd.fits' % band))
        except IOError:
            coadd = files.read(
                os.path.join(clusterdir, field, '%03dghz_coadd.fits' % band))
        self.map = coadd.coadd.map
        self.map_shape = np.shape(self.map)
        # print self.map_shape
        # self.resizeMap(2)

        self.contour = np.zeros(self.map_shape, dtype=int)

        self.fig = pl.figure(figsize=(9, 9), dpi=3000)
        pl.imshow(self.map,
                  cmap=cm.gray,
                  vmin=-v,
                  vmax=v,
                  interpolation='None',
                  figure=self.fig)
        sig, ra, dec, rad = \
            np.loadtxt(os.path.join(clusterdir, field, 'cluster_out',
                                    'new', field + '_3sigma_clusters.txt'),
                              dtype = float, skiprows = 1, unpack = True)

        i = 0
        while sig[i] >= 4.5:
            # self.circleCluster([ra[i], dec[i]], rad[i])
            if sig[i] > 5:
                color = 'red'
            else:
                color = 'blue'
            self.addClusterToContour([ra[i], dec[i]], rad[i], color)
            i += 1
        # self.fig.set_size_inches(9, 9)
        # pl.contour(self.contour, colors = 'red', levels = [0], linewidth = .001, aa = False)
        # hot = np.argwhere(self.contour)
        # for points in hot:
        #     pl.plot(points[1], points[0], 'r.', markersize = .1, mew = 0, mec = 'r', mfc = 'r', aa = True)
        if maskfile is not None:
            mask = masks.srcMaskFromfile(maskfile, self.reso, self.map_shape,
                                         self.proj, self.radec0)
            pl.contour(mask, levels=[.9], colors='yellow', linewidth=.15)
        pl.savefig(os.path.join('/home/ndhuang/plots/clusters/',
                                plotname + '.png'),
                   bbox_inches='tight',
                   pad_inches=0,
                   dpi=1000)
        print i
Esempio n. 25
0
import cPickle as pickle
import IPython
import glob
from sptpol_software.util import files
import numpy as np

mask = files.read(
    '/data39/ndhuang/clusters/ra23h30dec-55/run2/bundles1/150/mask.fits')
mask = mask.masks.apod_mask
mask = mask[348:3748, 348:3748]
bundle_dir = '/data39/ndhuang/clusters/ra23h30dec-55/run1/bundles/abbys_bundles/bundle*.fits'
# bundle_dir = 'bundles/150/bundle*.fits'
bundles = glob.glob(bundle_dir)
noise = np.zeros(50)
weight = np.zeros(50)
bundle_pairs = ['' for b in noise]
i = 0
while len(bundles) > 0:
    bf1 = bundles.pop(np.random.randint(0, len(bundles)))
    bf2 = bundles.pop(np.random.randint(0, len(bundles)))
    bundle_pairs[i] = bf1 + ', ' + bf2
    b1 = files.read(bf1)
    b2 = files.read(bf2)
    diff = (b1.map.map - b2.map.map) * mask
    good = np.nonzero(mask)
    noise[i] = np.std(diff[good])
    weight[i] = np.std((b1.weight.weight + b1.weight.weight)[good])
    print bundle_pairs[i], noise[i], weight[i]
    i += 1

f = open('bundle_pairs.pkl', 'w')
Esempio n. 26
0
import numpy as np
from sptpol_software.observation.receiver import Receiver
from sptpol_software.util import files

boards = ['120', '122', '144', '150']
rec = Receiver()
bolo_inds = np.zeros(24 * len(boards) * 2, dtype=int)
print np.shape(bolo_inds)
i = 0
for b in boards:
    board = rec.devices.mux_boards[b]
    for squid in board.getSQUIDs():
        squid = board[squid]
        for bolo in squid.getBolosOrderedByFrequency():
            bolo_inds[i] = bolo.index
            i += 1
bolo_inds = bolo_inds[np.nonzero(bolo_inds)[0]]

source_dir = '/data/sptdat/auxdata/source/'
pre_source = 'source_rcw38_20131111_011216.fits'
post_source = 'source_rcw38_20131213_102250.fits'

pre = files.read(source_dir + pre_source)
post = files.read(source_dir + post_source)

deltax = pre.observation.bolo_x_offset - post.observation.bolo_x_offset
deltay = pre.observation.bolo_y_offset - post.observation.bolo_y_offset
Esempio n. 27
0
    parser = argparse.ArgumentParser()
    parser.add_argument('psddir', type=str)
    parser.add_argument('--band', default=None, type=int)
    parser.add_argument('--plot-dir', default='', dest='plotdir')
    parser.add_argument('--tex-file', default=None, type=str)
    args = parser.parse_args()
    if args.band is None:
        args.band = [90, 150]
    else:
        args.band = [args.band]
    if args.tex_file is not None:
        texfile = open(args.tex_file, 'a')
        texfile.write('\\clearpage\n')
    else:
        texfile = None

    for band in args.band:
        band_name = '%03dghz' % band
        psd = files.read(os.path.join(args.psddir, band_name + '_psd.fits'))
        ell = np.fft.fftfreq(np.shape(psd.psd.psd)[0], np.pi /
                             (4 * 60 * 180)) * 2 * np.pi
        psd = properPSD(psd.psd.psd, ell)
        tex_2d = plot2d(psd, ell, band, args.plotdir)
        tex_1d = plot1d(psd, ell, band, args.plotdir)
        if texfile is not None:
            texfile.write(tex_2d + '\n')
            texfile.write(tex_1d + '\n')

    if texfile is not None:
        texfile.close()
def consolidate_source_scan(start_date=None, stop_date=None, fitsdir='/data/sptdat/auxdata/', \
                            sources_to_use=['rcw38','mat5a'], filename=None, savename=None, doall=True, dosave=True):

    """
    Translated from RK's original consolidate_source_scan.pro.
    
    The purpose of this program is to consolidate the information contained in the
    source_scan fits files which is relevant to pointing.  The relevant info is stuffed
    into a dictionary and saved as a pkl file.

    ===Things that need to be saved in this pickle file===
    XDEG, YDEG, AMP, DC, ID
    STARTTIME,STARTTIME_MJD, STOPTIME, STOPTIME_MJD, MEAN_MJD
    SOURCE
    FILENAME
    TEMP_AVG, WIND_SPEED_AVG, WID_DIR_AVG, PRESSURE_AVG, TIPPER_TAU_MEAN
    AZ_ACTUAL, EL_ACTUAL
-    MEAN_AZ, MEAN_EL
    WNOISE, ELNOD_RESPONSE, ELNOD_SIGMA, CAL_RESPONSE, CAL_SIGMA
    LINEAR_SENSOR data (DET, DEL, DAZ, L1, L2, R1, R2)
    NSCANS, NFRAMES, NSAMPLES
    STRUCTURE THERMOMETRY
    ===

    Originally created by RK, 24 Sep 2008 in IDL.
    Translated to python by JWH, Oct 2012.
    """

    nbolo = 1599

    # Grab the filenames. Convert name strings to dates, and sort the filenames by date.
    filelist = np.array(tools.flatten([[glob(os.path.join(fitsdir, 'source/*'+srcname+'*.fits')) for srcname in sources_to_use], \
                                      [glob(os.path.join(fitsdir, 'source_vfast/*'+srcname+'*.fits')) for srcname in sources_to_use]]))
    filelist_times = time.filenameToTime(filelist)
    time_argsort = np.argsort(filelist_times)
    filelist = filelist[time_argsort]
    filelist_times = filelist_times[time_argsort]
    
    if start_date == None: 
        start_date = filelist_times[0]
    else:
        start_date = time.SptDatetime(start_date)
    if stop_date == None: 
        stop_date = filelist_times[-1]
    else:
        stop_date = time.SptDatetime(stop_date)
    
    #Figure out which filenames to read given the start and stop dates.
    times_to_use = np.array(map(lambda x: start_date <= x <= stop_date, filelist_times))
    print 'Consolidating source scans between ', start_date, ' and ', stop_date, '.'

    if not times_to_use.any():
        print 'There are no source scans in that date range...'
        return
    else:
        print 'There are %d source observations in that date range.' % np.sum(times_to_use)
        filelist = filelist[times_to_use]
    nlist = len(filelist)
    
    #Record the system time
    clocka = comptime()

    #Restore the old dictionary if you gave filename to the function.
    if filename == None:
        filename = '/home/jhenning/sptpol_code/sptpol_software/analysis/pointing/source_scan_structure.pkl'
    if savename == None:
        savename = filename

    if doall == False:
        sold = pk.load(open(filename, 'r'))

    
    #Create an empty list of dictionaries to fill with the data that we need.
    s = []
    for i in range(nlist):
        
        s.append({'xdeg':np.zeros(nbolo), 'ydeg':np.zeros(nbolo),
         'amp':np.zeros(nbolo), 'dc':np.zeros(nbolo), 'id':np.zeros(nbolo, dtype=np.int16),
         'starttime':'', 'starttime_mjd':0.0, 
         'stoptime':'', 'stoptime_mjd':0.0, 'mean_mjd':0.0, 
         'source':'', 'filename':'',
         'temp_avg':0.0, 'wind_speed_avg':0.0, 'wind_dir_avg':0.0, 'pressure_avg':0.0,
         'tipper_tau_mean':0.0,
         'az_actual':np.zeros(nbolo), 'el_actual':np.zeros(nbolo),
         'mean_az':0.0, 'mean_el':0.0,
         'focus_position':np.zeros(6),
         'wnoise':np.zeros(nbolo), 'elnod_response':np.zeros(nbolo), 'elnod_sigma':np.zeros(nbolo),
         'cal_response':np.zeros(nbolo), 'cal_sigma':np.zeros(nbolo),
         'med_daz':0.0, 'med_del':0.0, 'med_det':0.0,
         'med_l1':0.0, 'med_l2':0.0, 'med_r1':0.0, 'med_r2':0.0,
         'nscans':0.0, 'nframes':0.0, 'nsamples':0.0,
         'med_scu_temp':np.zeros(60), 'mean_scu_temp':np.zeros(60)})

    #Now loop over each source scan and grab each of these data.
    nnew=0
    for i in range(nlist):
        timeb = comptime()
        print str(i), '/', str(nlist-1)
        print '...checking ', filelist[i]

        #Check to see if the old dictionary list has this file's information.
        if doall == True: pass
        else:
            wh_already = np.where(filelist[i] == np.array([item['filename'] for item in sold]))[0]
            if len(wh_already) > 1:
                print 'This file is in the source scan summary more than once!... '
                print 'Pausing so you can do something about it.'
                raw_input('Press ENTER to continue...')

            if len(wh_already)==1:
                print 'This file has already been incorporated into the source scan summary... '
                print 'Packing old results and moving to next source scan.'
                #Stuff the new dictionary with the old information.
                s[i] = sold[wh_already[0]]
                continue


        #Okay, let's start grabbing information from new source scans.
        nnew += 1

        data = 0
        print '...reading FITS file...'
        print '...loading ', filelist[i]
        data = files.read(filelist[i])
        #nframes = data.observation.n_frames
        #nscans = data.observation.n_scans
        #nsamples = data.observation.n_samples

        #For now, load in the data from the archive for the observation time to grab
        #raw pointing information etc.  Eventually, these should be saved with the source fits.
        #extra = SPTDataReader(start_date=time.SptDatetime(data.header.start_date), \
        #                      stop_date = time.SptDatetime(data.header.stop_date))
        #extra.readData(verbose=False, timestream_units='watts', correct_global_pointing=False)

        #Double-check that the data has as observation structure.
        if hasattr(data, 'observation') == False:
            #print "   Missing an observation structure! Skipping observation %s." % data.header.start_date.arc
            print "   Missing an observation structure! Skipping observation %s." % data.from_filename
            nnew -= 1
            continue

        #Fill in auxiliary data for this scan.
        #Creat an SPTDataReader object to get ancillary information about the observation.
        da = SPTDataReader(start_date=data.header.start_date,
                           stop_date=data.header.stop_date, quiet=True)
        data.auxdata_keys = da.auxdata_keys
        data.rec = da.rec
        data.cuts = da.cuts
        data.telescope = da.telescope
        data.config = da.config
        try:
            #Let's not read in noise data.
            #files.fillAuxiliaryData(data, obs_types=['calibrator','elnod','noise'],
            files.fillAuxiliaryData(data, obs_types=['calibrator','elnod'],
                                    same_fridge_cycle=True, overwrite=False)
        except Exception, err:
            print "   Couldn't copy in auxiliary data: %s" % str(err)
            nnew -= 1
            continue
        
        #Finally, let's start pulling out some of the values we're looking for.
        xdeg = data.observation.bolo_x_offset
        ydeg = data.observation.bolo_y_offset
        bolo_id = data.observation.bolo_id_index_in_arc_file
        starttime = data.observation.start_time
        stoptime = data.observation.stop_time
        starttime_mjd = time.SptDatetime(starttime).mjd
        stoptime_mjd = time.SptDatetime(stoptime).mjd
        mean_mjd = np.mean([starttime_mjd, stoptime_mjd])

        #Is this a valid source scan?
        wh = np.where(xdeg == xdeg)
        nwh = len(wh)
        if nwh == 0:
            print "   Not a valid source scan. Skipping observation %s." % data.header.start_date.arc
            nnew -= 1
            continue
        
        #Make sure calibrator and noise auxiliary info is present.
        #Be sure to skip nan's otherwise np.std() will return nan no matter how many 
        #good bolo data there are.
        try:
            #if ( np.std(data.observation.bolo_cal_response[np.isfinite(data.observation.bolo_cal_response)]) == 0. or
            #     np.std(data.observation.bolo_psd_white_noise_level[np.isfinite(data.observation.bolo_psd_white_noise_level)] == 0.0)):
            if ( np.std(data.observation.bolo_cal_response[np.isfinite(data.observation.bolo_cal_response)]) == 0.):
                #print "   Missing calibrator or noise data. Skipping this observation."
                print "   Missing calibrator data. Not worrying about noise... Skipping this observation."
                nnew -= 1
                continue
        except AttributeError, err:
            # If we couldn't get calibrator and/or noise data, skip this observation.
            print err
            nnew -= 1
            continue
    

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('files', nargs = '+', type = str)
    parser.add_argument('--field', type = str, default = None)
    parser.add_argument('--coadd', action = 'store_true',
                        help = "Coadd maps with good pointing")
    args = parser.parse_args()
    if args.field is not None:
        import field_centers
        radec0_nom = field_centers.centers[args.field]
    pting = [[] for f in args.files]
    times = [[] for f in args.files]
    for i, f in enumerate(args.files):
        pting[i] = files.read(f)
        times[i] = fnameToTime(f)
    times = np.asarray(times)
    mddec = grabField(pting, 'mean_ddec')
    mdra = grabField(pting, 'mean_dra')
    sddec = grabField(pting, 'sigma_ddec')
    sdra = grabField(pting, 'sigma_dra')
    radec0 = grabField(pting, 'radec0_corr')
    to_arr = lambda x: np.asarray([x]).flatten()
    dra = grabField(pting, 'dra', converter = to_arr)
    nsrc = np.asarray([len(r) for r in dra])

    if args.coadd:
        good = np.where((nsrc > 1) & (abs(sdra) < .1))[0]
        coadd = getMap(args.files[good[0]])
        y0, x0 = ang2Pix(np.transpose(radec0), coadd.center, .25, coadd.shape, 
Esempio n. 30
0
import glob, sys
from os import path
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as pl
from sptpol_software.util import files

#map_dir = '/data/sptdat/auxdata/map/autoprocessed_maps/ra3hdec-25/coadds/left/'
plot_dir = '/data/ndhuang/plots/ra3hdec-25_monitoring'
if __name__ == '__main__':
    map_dir = sys.argv[1]
    mapfiles = glob.glob(path.join(map_dir, '*.h5'))
    for mf in mapfiles:
        m = files.read(mf)
        m.removeWeight()
        f = pl.figure()
        m.plot('T', bw=True, vmin=-2e-3, vmax=2e-3, figure=1)
        name = path.basename(mf).split('.')[0]
        f.set_size_inches(18, 12.15)
        pl.savefig(path.join(plot_dir, name + '.png'))
        pl.close('all')
bundles = [x for x in sorted(gl('/data57/jhenning/ra0hdec-57p5_V3/20160518/bundles/150/lowell_ratio_1p0/all/*.h5')) if '.ffts.' not in x]
pos = bundles[::2]
neg = bundles[1::2]
D = files.read('/data57/jhenning/ra0hdec-57p5_V3/20160518/bundles/150/lowell_ratio_1p0/all/coadd_halfASubhalfB.h5').removeWeight().flattenPol()
MAN = maps.MapAnalyzer(delta_ell=delta_ell,set_special_bb_bins=False)
mask = files.read('/data57/jhenning/ra0hdec-57p5_V3/20160518/bundles/150/lowell_ratio_1p0/all/total_mask_apod15.0arcmin_ptsrc10.0arcmin.pkl')
fft_shape = np.array([int(2*np.pi/(D['T'].reso_rad *delta_ell))] * 2)
noise_Cls = MAN.calculateCls(D, sky_window_func=mask, maps_to_cross=['T','E','B'], fft_shape=fft_shape, calculate_dls=False)
"""

#noise_cls_file = 'data/SPTpol_maps/noise_cls.pkl.gz_v_JT_20161004'
noise_cls_file = 'data/SPTpol_maps/noise_cls.pkl.gz_v_JT_20161005'

if not local:
	folderpath = '/data57/jhenning/ra0hdec-57p5_V3/20160518/bundles/150/lowell_ratio_1p0/all'
	D = files.read('%s/coadd_halfASubhalfB.h5' %(folderpath)).removeWeight().flattenPol()

	delta_ell=5.
	MAN = maps.MapAnalyzer(delta_ell=delta_ell,set_special_bb_bins=False)
	mask = files.read('%s/total_mask_apod15.0arcmin_ptsrc10.0arcmin.pkl' %(folderpath))
	fft_shape = np.array([int(2*np.pi/(D['T'].reso_rad *delta_ell))] * 2)
	
	#noise_Cls = MAN.calculateCls(D, sky_window_func=mask, maps_to_cross=['T','E','B'], fft_shape=fft_shape, calculate_dls=False)
	noise_Cls = MAN.calculateCls(D, sky_window_func=mask, maps_to_cross=['T','Q','U'], fft_shape=fft_shape, calculate_dls=False)

	#get ells
	binned_ells = noise_Cls.ell.TT
	#ell_bins = noise_Cls.ell_bins.TT
	ells_equal = np.allclose(noise_Cls.ell.TT, noise_Cls.ell.QQ) #assert statement
	if not ells_equal:
		print 'TT and QQ ells are different'