Beispiel #1
0
def getTimeInterval():
    """
    Replace generic getTimeInterval with one that calls logs.readSourceScans instead.
    """
    return logs.readSourceScanTimes(autoprocess_start_date, autoprocess_stop_date, my_name,
                                    nscans_min=nscans_min, nscans_max=nscans_max,
                                    log_summary_dir=directories['log_summary_dir'])
Beispiel #2
0
def readInterval(interval):
    '''Just read some data'''
    interval = logs.readSourceScanTimes(interval[0],
                                        interval[1],
                                        source,
                                        nscans_min=50)[0]
    data = SPTDataReader(interval[0],
                         interval[1],
                         quiet=True,
                         config_file="sptpol_stripped_master_config")
    data.readData(interval[0],
                  interval[1],
                  correct_global_pointing=False,
                  standardize_samplerates=False)
    return data
Beispiel #3
0
if __name__ == '__main__':
    # times = [['140326 22:30:51', '140327 01:02:21'], # dither 6
    #          ['140328 10:26:20', '140328 12:57:40'], # dither 12
    #          ['140329 22:28:40', '140330 01:02:44'], # dither 18
    #          ['140331 12:59:38', '140331 15:32:02'], # dither 0
    #          ['140403 07:49:00',
    #           SptDatetime.now().strftime('%y%m%d %H:%M:%S')]]
    times = [
        ['140525 05:01:19', '140525 07:40:23'],  # dither 0
        ['140525 14:16:48', '140525 16:57:01']
    ]  # dither 9
    OUTPUT_DIR = '/data/ndhuang/fast_500d_map/run1/proj1'
    datas = []
    for t in times:
        realtimes = logs.readSourceScanTimes(t[0],
                                             t[1],
                                             'ra0hdec-57.5',
                                             nscans_min=0)
        for rt in realtimes:
            start = SptDatetime(rt[0])
            stop = SptDatetime(rt[1])
            data = SPTDataReader(start_date=start, stop_date=stop, quiet=True)
            data.readData(start, stop, correct_global_pointing=False)
            datas.append(data)
            break
            # make_map(start, stop)

    # coadd = sum(maps)
    # ma = MapAnalyzer(delta_ell=25, set_special_bb_bins = False)
    # cls = ma.calculateCls()
    # pl.plot(cls.ell.TT, cls.TT)
    # savefig('/home/ndhuang/plots/ra0hdec-57.5_fast_TT.png')
Beispiel #4
0
    ]
    maps_right = [
        cMapping(i, doreal=True, use_leftgoing=False,
                 **map_args)[str(idfs[0].band)] for i in idfs
    ]
    left = np.sum(maps_left)
    right = np.sum(maps_right)
    left.writeToHDF5(os.path.join(
        outdir, 'left', 'sunop_map_{:03d}ghz.h5'.format(idfs[0].band)),
                     overwrite=True)
    right.writeToHDF5(os.path.join(
        outdir, 'right', 'sunop_map_{:03d}ghz.h5'.format(idfs[0].band)),
                      overwrite=True)
    return left, right


if __name__ == '__main__':
    N_PROCS = 4
    OUTPUT_DIR = '/data/ndhuang/sunop_sidelobe'

    realtimes = logs.readSourceScanTimes('20150201',
                                         '20150205',
                                         'opsun',
                                         nscans_min=0)
    # make_idf_pieces(realtimes[0][0], realtimes[0][1])

    idf90 = glob.glob(os.path.join(OUTPUT_DIR, 'idf*090ghz*.h5'))
    map_from_many_idfs(idf90, OUTPUT_DIR)
    idf150 = glob.glob(os.path.join(OUTPUT_DIR, 'idf*150ghz*.h5'))
    map_from_many_idfs(idf150, OUTPUT_DIR)
Beispiel #5
0
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
from matplotlib import mlab, pyplot as pl
from sptpol_software.autotools import logs
from sptpol_software.data.readout import SPTDataReader
from sptpol_software.analysis.processing import getGoodBolos

if __name__ == '__main__':
    # start = '140611 06:03:43' # new fast scan
    # stop = '140611 08:42:36'
    start = '140325 02:54:31' # lead-trail
    stop = '140325 08:47:35'
    start, stop = logs.readSourceScanTimes(start, stop, 'ra0hdec-57.5', 
                                           nscans_min = 30)[0]
    data = SPTDataReader(start, stop, quiet = True)
    data.readData(start, stop, correct_global_pointing = False)
    scan_len = [(s.stop_index - s.start_index) for s in data.scan]
    scan_len = max(scan_len)
    coadd = [np.zeros(scan_len), np.zeros(scan_len)]
    coadd_el = [np.zeros(scan_len), np.zeros(scan_len)]
    coadd_err = [np.zeros(scan_len), np.zeros(scan_len)]
    nfft = 2048
    pxx = [np.zeros(nfft / 2 + 1), np.zeros(nfft / 2 + 1)]
    pxx_el = [np.zeros(nfft / 2 + 1), np.zeros(nfft / 2 + 1)]
    pxx_err = [np.zeros(nfft / 2 + 1), np.zeros(nfft / 2 + 1)]
    good_bolos = getGoodBolos(data, 'map_default')
    for s in data.scan:
        if s.is_leftgoing:
            ind = 0
Beispiel #6
0
import numpy as np
from matplotlib import pyplot as pl
from sptpol_software.autotools import logs
import sptPyReadArc as arc

# read in data
start, stop = logs.readSourceScanTimes('20150201',
                                       '20150204',
                                       'opsun',
                                       nscans_min=10)[0]
# First, read in the entire with one bolo
data_small = arc.readArc([
    'receiver.bolometers.adcCountsI[0]~ double',
    'antenna0.tracker.scan_flag~ uint'
], str(start), str(stop), '/data/sptdat/arcfile_directory/20150129_000000/')
# Now, the full set (~60 GB)
data_full = arc.readArc([
    'receiver.bolometers.adcCountsI[0-1599]~ double',
    'antenna0.tracker.scan_flag~ uint'
], str(start), str(stop), '/data/sptdat/arcfile_directory/20150129_000000/')

# Upsample the scan flags
# I have tested that the scan flags are identical between data_full and
# data_small, but you can test this:
# np.nonzero(data_small['antenna0.tracker.scan_flag'][0] -
#            data_full['antenna0.tracker.scan_flag'][0])

# I am assuming that the scan flags and bolodata vectors cover exactly the
# same amount of time so I can use the index as a proxy for time.
# Since this works with the small data sample, I believe the assumptions
# are correct
from datetime import datetime

from sptpol_software.data.readout import SPTDataReader
from sptpol_software.util.time import SptDatetime
from sptpol_software.analysis import processing
from sptpol_software.autotools import logs
readout_kwargs = {
    'timestream_units':
    'Watts',  # Convert to T_CMB in preprocessing functions.
    'correct_global_pointing': True,
    'downsample_bolodata': 4,
    'project_iq': True,
    'exception_on_no_iq': True
}

for times in logs.readSourceScanTimes('20-Mar-2012',
                                      'now',
                                      'ra23h30dec-55',
                                      nscans_min=1):
    start = SptDatetime(times[0])
    end = SptDatetime(times[1])

    try:
        data = SPTDataReader(start, end)
        data.readData(**readout_kwargs)
        processing.notchFilter(data, verbose=True)
    except ValueError, err:
        print err