Example #1
0
 def test_filters(self):
     ### Test general filter
     p = core.G3Pipeline()
     p.Add(data_sim.PipelineSeeder(self.frames))
     p.Add(Filter, input='signal', output=None, 
              filter_function=lambda freqs:np.ones_like(freqs))
     p.Run()
     
     p = core.G3Pipeline()
     p.Add(data_sim.PipelineSeeder(self.frames))
     p.Add(LowPassButterworth)
     p.Run()
Example #2
0
  def test_DataG3Module(self):
      ### Test that it works in a pipeline
      p = core.G3Pipeline()
      p.Add(data_sim.PipelineSeeder(self.frames))
      p.Add(DataG3Module, input='signal', output=None)
      p.Run()
 
      ### test that it works on individual frames
      x = DataG3Module(input='signal', output=None)
      x.apply(self.frames[0])
      
      ### Test that it works as an inline function
      p = core.G3Pipeline()
      p.Add(data_sim.PipelineSeeder(self.frames))
      p.Add(DataG3Module.from_function(lambda d:d+1, input='signal', output=None))
      p.Run()
Example #3
0
 def test_condition(self):
     p = core.G3Pipeline()
     p.Add(data_sim.PipelineSeeder(self.frames))
     p.Add(Detrend,input='signal', output=None)
     p.Add(Retrend,input='signal', output=None)
     p.Run()
     
     p = core.G3Pipeline()
     p.Add(data_sim.PipelineSeeder(self.frames))
     p.Add(MeanSubtract,input='signal', output=None)
     p.Add(MedianSubtract,input='signal', output=None)
     p.Run()
     
     p = core.G3Pipeline()
     p.Add(data_sim.PipelineSeeder(self.frames))
     p.Add(Decimate,input='signal', output=None)
     p.Run()
     
     p = core.G3Pipeline()
     p.Add(data_sim.PipelineSeeder(self.frames))
     p.Add(Resample,input='signal', output=None)
     p.Run()
 def test_correct_processing(self):
     pipe = core.G3Pipeline()
     pipe.Add(FakeMap, test=1)
     pipe.Add(ChangeCoordSys,
              coord_ref_old=core.MapCoordReference.Equatorial,
              coord_ref_new=core.MapCoordReference.Galactic,
              alt_key_in="ra",
              alt_key_out="l",
              az_key_in="dec",
              az_key_out="b",
              pol_key_in="pol",
              pol_key_out="pol_gal")
     pipe.Add(self._correct_pol)
     pipe.Run()
Example #5
0
def filter_g3file(g3file, hdr, args):
    "Run the actual filtering"

    # Create pipe
    pipe = core.G3Pipeline()
    pipe.Add(core.G3Reader, filename=g3file)
    band = hdr['BAND'][0]

    logger.info(f"Filtering {g3file} band: {band}")

    if args.coadd is not None:
        # Match the band of the coadd
        for map_id, data in args.g3coadds.items():
            if map_id == 'Coadd' + band:
                logger.info(f"Adding InjectMaps for {map_id}")
                pipe.Add(
                    maps.InjectMaps,
                    map_id=map_id,
                    maps_in=data,
                    ignore_missing_weights=True,
                )

    elif args.mask is not None:
        # mask has been injected in coadd.
        # this handles the no-coadd case
        logger.info(f"Adding mask InjectMaps for {args.mask_id}")
        pipe.Add(
            maps.InjectMaps,
            map_id=args.mask_id,
            maps_in={"T": args.g3mask},
            ignore_missing_weights=True,
        )
        # del args.g3mask

    if not args.polarized:
        pipe.Add(maps.map_modules.MakeMapsUnpolarized)

    # Add the TransientMapFiltering to the pipe
    logger.info(f"Adding TransientMapFiltering for {band}")
    pipe.Add(
        transients.TransientMapFiltering,
        bands=args.band,
        subtract_coadd=args.subtract_coadd,
        mask_id=args.mask_id,
    )

    return pipe
Example #6
0
def scan_directory(directory, config):
    """Scan a given directory for .g3 files, adding them to the Database.

    Parameters
    ----------
    directory : str
        the top level directory to scan
    config : dict
        SQL config for the DB connection

    """
    # Establish DB connection.
    cnx = mysql.connector.connect(host=config['host'],
                                  user=config['user'],
                                  passwd=config['passwd'],
                                  db=config['db'])
    cur = cnx.cursor()
    print("SQL server connection established")

    # Gather all files we want to scan.
    a = os.walk(directory)

    # Iterate over file list and run pipeline on each file.
    for root, directory, _file in a:
        for g3 in _file:
            if g3[-2:] == "g3":
                try:
                    p = core.G3Pipeline()
                    #print("Adding %s/%s to G3Reader"%(root, g3))
                    p.Add(core.G3Reader, filename=os.path.join(root, g3))
                    p.Add(add_files_to_feeds_table, cur=cur, r=root, f=g3)
                    p.Add(add_fields_and_times_to_db, cur=cur, r=root, f=g3)
                    p.Run()
                    # Mark feed_id as 'scanned' in feeds table.
                    cur.execute(
                        "UPDATE feeds \
                                 SET scanned=1 \
                                 WHERE filename=%s \
                                 AND path=%s", (g3, root))
                    cnx.commit()
                except RuntimeError:
                    print(
                        "Could not read {}, ".format(os.path.join(root, g3)) +
                        "file likely still being written.")

    cur.close()
    cnx.close()
Example #7
0
def write_example_file(filename='hk_out.g3', hkagg_version=2):
    """Generate some example HK data and write to file.

    Args:
        filename (str): filename to write data to
        hkagg_version (int): which HK version to write to file

    """
    test_file = filename

    # Write a stream of HK frames.
    # (Inspect the output with 'spt3g-dump hk_out.g3 so3g'.)
    seeder = Seeder()
    w = core.G3Pipeline()
    w.Add(seeder)
    w.Add(HKTranslator(target_version=hkagg_version))
    w.Add(core.G3Writer(test_file))

    if hkagg_version <= 1:
        seeder.extend(get_v0_stream())
    else:
        seeder.extend(get_v2_stream())
    w.Run()
    del w
Example #8
0
                    elif waf not in wafer_list:
                        wafer_list.append(waf)
                if not np.isnan(props.band):
                    if (str(int(props.band/core.G3Units.GHz))+'GHz'
                        not in bands):
                        bad_bolos.append(bolo)
            break
    if len(wafer_list) > 0:
        break
bad_bolos = list(np.unique(bad_bolos))
args.wafers_to_include = wafer_list

# =============================================================================
# Begin pipeline
# -----------------------------------------------------------------------------
pipe = core.G3Pipeline()
pipe.Add(core.G3Reader, filename=args.input_files)

if not args.sim:
    # Drop timestreams from detectors that we're not interested in
    if len(bad_bolos) > 0:
        pipe.Add(remove_present_in_list, target_key = 'RawTimestreams_I',
                 reference_list = bad_bolos)
    raw_ts_key = 'RawTimestreams_I'
    raw_ts_key_i = 'RawTimestreams_I'
    raw_ts_key_q = 'RawTimestreams_Q'

    # -------------------------------------------------------------------------
    # Drop certain data before further processing
    # -------------------------------------------------------------------------
    # Cut turnarounds, deduplicate metadata
Example #9
0
def coadd_maps(maps_in, map_id=None, output_file=None, obslist_to_cut=None):
    """
    Coadd `maps_in` into a single map.
    
    Parameters
    ----------
    maps_in: str or list of str
        A filepath, directory, or lists of either that
        points to maps stored in .g3(.gz) files.
        
    map_id: str or list of str
        Add maps that have an Id key matching the pattern, or one of the 
        patterns in the list. Maps matching separate patterns are added to
        separate coadds. Understands Unix shell-style wildcards (e.g. *, ?).
        
    output_file: str
        If specified, save the output map to this path.
        
    Returns
    -------
    G3Frame containing the coadded map
    """
    if not isinstance(maps_in, list):
        maps_in = [maps_in]
    if not all([isinstance(mp, str) for mp in maps_in]):
        raise TypeError("All inputs must be strings")
        
    if map_id is not None:
        if not isinstance(map_id, list):
            map_id = [map_id]
        if not all([isinstance(mid, str) for mid in map_id]):
            raise TypeError("All inputs must be strings")


    maps_to_add = []
    for pth in maps_in:
        if os.path.isdir(pth):
            now_maps = glob(os.path.join(pth,'*.g3*'))
            maps_to_add += now_maps
        elif os.path.isfile(pth):
            maps_to_add.append(pth)
        else:
            raise OSError(
                "%s is not an existing file or directory."%pth)
    
    map_out = _GrabMapFrame()
    pipe = core.G3Pipeline()
    pipe.Add(core.G3Reader, filename=maps_to_add)
    pipe.Add(core.Dump)
    if obslist_to_cut is not None:
        pipe.Add(CutObsids, obslist=obslist_to_cut)
    pipe.Add(lambda frame: frame.type == core.G3FrameType.Map)
    pipe.Add(lambda frame: 'ScanNumber' not in frame or \
             frame['ScanNumber']<70)
    if args.sign_flip:
        pipe.Add(FlipSignsForCoadd, seed=42)
    if not map_id:
        pipe.Add(util.framecombiner.MapFrameCombiner, fr_id=None)
    else:
        for id in map_id:
            pipe.Add(util.framecombiner.MapFrameCombiner, fr_id=id)
    pipe.Add(lambda frame: 'Id' not in frame or \
             fnmatch(frame['Id'], 'combined_*'))
    if output_file is not None:
        parent = os.path.dirname(output_file)
        if parent != "" and not os.path.exists(parent):
            os.makedirs(parent)
        pipe.Add(core.G3Writer, filename=output_file)    
    pipe.Add(map_out)
    pipe.Run()
    
    return map_out.map
Example #10
0
def dump(run_number, dir_output):
    ddir = '/data/pb2/ChileData/g3compressed/20000000/Run2000{:04d}/'.format(
        run_number)
    dfiles = sorted(glob(ddir + '*.g3'))

    plotdir = '{:s}/Run{:04d}/'.format(dir_output, run_number)
    if not os.path.isdir(plotdir):
        os.makedirs(plotdir)

    # peek and get the number of detectors
    f = core.G3File(dfiles[0])
    frame = f.next()
    while frame.type != core.G3FrameType.Scan:
        frame = f.next()
    n_dets = len(frame['RawTimestreams_I'].keys())

    # loading lots of tods is ram intensive, so do it in chunks
    jump_interval = 100
    for start in range(n_dets)[::jump_interval]:
        end = start + jump_interval

        # detector TOD, in both phases
        tod_buffs_I = {}
        tod_buffs_Q = {}

        # detector sample times
        times_buff = []

        # boresight encoder data
        antennatime_buff = []
        az_buff = []
        el_buff = []

        # set up pipeline
        pipe = core.G3Pipeline()
        pipe.Add(core.G3Reader, filename=dfiles)
        pipe.Add(get_times, times_buff=times_buff)
        pipe.Add(get_TOD,
                 start=start,
                 end=end,
                 tod_buffs_I=tod_buffs_I,
                 tod_buffs_Q=tod_buffs_Q)
        pipe.Add(get_azel,
                 antennatime_buff=antennatime_buff,
                 az_buff=az_buff,
                 el_buff=el_buff)
        pipe.Run()

        # group stuff into single arrays
        times = np.concatenate(times_buff)
        antennatimes = np.concatenate(antennatime_buff)
        az = np.concatenate(az_buff)
        el = np.concatenate(el_buff)

        # nice date format for plotting
        bolodates = md.date2num([dt.datetime.fromtimestamp(t) for t in times])
        antennadates = md.date2num(
            [dt.datetime.fromtimestamp(t) for t in antennatimes])

        # decide which plots we want here
        PLOT_ADCVSTOD = False

        for det in tod_buffs_I:
            print('Concatenating {:s}...'.format(det))
            tod_I = np.concatenate(tod_buffs_I[det])
            tod_Q = np.concatenate(tod_buffs_Q[det])

            # skip problem TODs
            if np.all(np.isnan(tod_I)) or np.all(np.isnan(tod_Q)): continue
            if len(tod_I) != len(times) or len(tod_I) != len(tod_Q):
                print('skipping because tod is different length')
                continue

            # plot_ADCvsTOD(bolodates, tod_I, tod_Q, antennatimes, plotdir, az, el, run_number, det)

            # fig, ax = plt.subplots(figsize=(12,10), sharex=True, nrows=4, ncols=1)
            # for iax in range(4):
            #     ax[iax].xaxis.set_major_formatter(md.DateFormatter('%Y/%m/%d %H:%M:%S'))
            #     plt.xticks(rotation=45, fontsize=6)
            #     #ax[iax].xaxis.set_tick_params(which='both', rotation=45)
            # ax[0].plot(bolodates, tod_I, 'b.', markersize=2)
            # ax[1].plot(bolodates, tod_Q, 'r.', markersize=2)
            # ax[2].plot(antennadates, az*180/np.pi, 'c-')
            # ax[3].plot(antennadates, el*180/np.pi, 'm-')
            # ax[3].set_xlabel('UTC time')
            # ax[0].set_ylabel('ADC counts (I)')
            # ax[1].set_ylabel('ADC counts (Q)')
            # ax[2].set_ylabel('Boresight Az [deg]')
            # ax[3].set_ylabel('Boresight El [deg]')
            # fig.suptitle('Run {:d} - {:s}'.format(run_number, det), fontweight='bold')
            # fig.savefig(plotdir + '{:s}.png'.format(det))
            # #plt.show()
            # plt.close(fig)

            # plot_dElvsdAz

            #fig, ax = plt.subplots(111)

        print(az)
Example #11
0
    def test_00_basic(self):
        """Write a stream of HK frames and scan it for errors."""

        # Write a stream of HK frames.
        # (Inspect the output with 'spt3g-dump hk_out.g3 so3g'.)
        print('Streaming to %s' % test_file)
        w = core.G3Writer(test_file)

        # Create something to help us track the aggregator session.
        hksess = so3g.hk.HKSessionHelper(session_id=None,
                                         description="Test HK data.")

        # Register a data provider.
        prov_id = hksess.add_provider(
            description='Fake data for the real world.')

        # Start the stream -- write the initial session and status frames.
        w.Process(hksess.session_frame())
        w.Process(hksess.status_frame())

        # Add a bunch of data frames
        t_next = time.time()
        for i in range(10):
            f = hksess.data_frame(prov_id=prov_id, timestamp=t_next)
            hk = so3g.IrregBlockDouble()
            hk.prefix = 'hwp_'
            hk.data['position'] = [1, 2, 3, 4, 5]
            hk.data['speed'] = [1.2, 1.2, 1.3, 1.2, 1.3]
            hk.t = t_next + np.arange(len(hk.data['speed']))
            t_next += len(hk.data['speed'])
            f['blocks'].append(hk)
            w.Process(f)

        w.Flush()
        del w

        print('Stream closed.\n\n')

        # Now play them back...
        print('Reading back:')
        for f in core.G3File(test_file):
            ht = f.get('hkagg_type')
            if ht == so3g.HKFrameType.session:
                print('Session: %i' % f['session_id'])
            elif ht == so3g.HKFrameType.status:
                print('  Status update: %i providers' % (len(f['providers'])))
            elif ht == so3g.HKFrameType.data:
                print('  Data: %i blocks' % len(f['blocks']))
                for block in f['blocks']:
                    for k, v in block.data.items():
                        print('    %s%s' % (block.prefix, k), v)

        # Scan and validate.
        print()
        print('Running HKScanner on the test data...')
        scanner = so3g.hk.HKScanner()
        pipe = core.G3Pipeline()
        pipe.Add(core.G3Reader(test_file))
        pipe.Add(scanner)
        pipe.Run()

        print('Stats: ', scanner.stats)
        print('Providers: ', scanner.providers)

        self.assertEqual(scanner.stats['concerns']['n_error'], 0)
        self.assertEqual(scanner.stats['concerns']['n_warning'], 0)
Example #12
0
#!/usr/bin/env python

import spt3g, os
from spt3g import core

p = core.G3Pipeline()
p.Add(core.G3InfiniteSource, type=core.G3FrameType.Timepoint, n=10)
p.Add(core.Dump)
p.Add(core.G3Writer, filename='testpi.g3')
p.Run()

# Check that pipelineinfo is added and is runnable
assert(len(list(core.G3File('testpi.g3'))) == 11)
for i in core.G3File('testpi.g3'):
	if i.type == core.G3FrameType.PipelineInfo:
		pi = i.values()[0]
		break

os.remove('testpi.g3')
print(repr(pi))
exec(repr(pi))
pipe.Run()

assert(len(list(core.G3File('testpi.g3'))) == 11)

# Check that PI frame has two entries on the second run through
p = core.G3Pipeline()
p.Add(core.G3Reader, filename='testpi.g3')
def check(fr):
	if fr.type == core.G3FrameType.PipelineInfo:
		assert(len(fr) == 2)
Example #13
0
    def test_00_basic(self):
        """Write a stream of HK frames and scan it for errors."""

        # Write a stream of HK frames.
        # (Inspect the output with 'spt3g-dump hk_out.g3 so3g'.)
        print('Streaming to %s' % test_file)
        w = core.G3Writer(test_file)

        # Create something to help us track the aggregator session.
        hksess = so3g.hk.HKSessionHelper(session_id=None,
                                         hkagg_version=2,
                                         description="Test HK data.")

        # Register a data provider.
        prov_id = hksess.add_provider(
            description='Fake data for the real world.')

        # Start the stream -- write the initial session and status frames.
        w.Process(hksess.session_frame())
        w.Process(hksess.status_frame())

        # Add a bunch of data frames
        t_next = time.time()
        for i in range(10):
            f = hksess.data_frame(prov_id=prov_id, timestamp=t_next)
            hk = core.G3TimesampleMap()
            speed = [1.2, 1.2, 1.3, 1.2, 1.3]
            hk.times = [
                core.G3Time(_t * core.G3Units.second)
                for _t in t_next + np.arange(len(speed))
            ]
            hk['position'] = core.G3VectorDouble(np.arange(len(speed)))
            hk['speed'] = core.G3VectorDouble(speed)
            hk['error_bits'] = core.G3VectorInt([10] * len(speed))
            hk['mode_str'] = core.G3VectorString(['ok'] * len(speed))
            t_next += len(hk)
            f['blocks'].append(hk)
            f['block_names'].append('main_block')
            w.Process(f)

        w.Flush()
        del w

        print('Stream closed.\n\n')

        # Now play them back...
        print('Reading back:')
        for f in core.G3File(test_file):
            ht = f.get('hkagg_type')
            if ht == so3g.HKFrameType.session:
                print('Session: %i' % f['session_id'])
            elif ht == so3g.HKFrameType.status:
                print('  Status update: %i providers' % (len(f['providers'])))
            elif ht == so3g.HKFrameType.data:
                print('  Data: %i blocks' % len(f['blocks']))
                for i, block in enumerate(f['blocks']):
                    print('    Block %i' % i)
                    for k, v in block.items():
                        print('    %s' % k, v)

        # Scan and validate.
        print()
        print('Running HKScanner on the test data...')
        scanner = so3g.hk.HKScanner()
        pipe = core.G3Pipeline()
        pipe.Add(core.G3Reader(test_file))
        pipe.Add(scanner)
        pipe.Run()

        print('Stats: ', scanner.stats)
        print('Providers: ', scanner.providers)

        self.assertEqual(scanner.stats['concerns']['n_error'], 0)
        self.assertEqual(scanner.stats['concerns']['n_warning'], 0)
                self.delta_f_weights = (
                    frame["weight_high_freq"] -
                    frame["weight_low_freq"]) / core.G3Units.sec
            elif frame.type == core.G3FrameType.Map and \
                 (self.map_frequency is None or self.map_frequency in frame['Id']):
                self.observation_maps['T'] = frame['T']
                self.observation_maps['Q'] = frame['Q']
                self.observation_maps['U'] = frame['U']
                self.observation_maps['Wpol'] = frame['Wpol']

    if not os.path.exists(args.obsfile):
        raise FileNotFoundError('Observation file {} does not exist.'.format(
            args.obsfile))

    # individual observation
    pipe = core.G3Pipeline()
    pipe.Add(core.G3Reader, filename=args.obsfile)
    map_extractor = MapExtractor(frequency=args.frequency)
    pipe.Add(map_extractor)
    pipe.Run()
    obs_maps = map_extractor.observation_maps
    delta_f_weights = map_extractor.delta_f_weights

    # coadded observation
    pipe_coadd = core.G3Pipeline()
    pipe_coadd.Add(core.G3Reader, filename=args.coaddfile)
    map_extractor_coadd = MapExtractor()
    pipe_coadd.Add(map_extractor_coadd)
    pipe_coadd.Run()
    coadd_maps = map_extractor_coadd.observation_maps