def SimStub(fr, ts_key, valid_ids_key='valid_ids', flag_key='Flags', weight_key='TodWeights'): ''' This function produces a separate .g3 file used later for making mock-observations. It records the active bolometers, detector weights, pointing, and flags in each scan frame. This output file is also useful for statistics on weights and flags. ''' to_keep = [ valid_ids_key, flag_key, weight_key, 'RawBoresightAz', 'RawBoresightEl' ] for model in ['Offline', 'Online']: to_keep += [ model + 'BoresightAz', model + 'BoresightEl', model + 'BoresightRa', model + 'BoresightDec', model + 'RaDecRotation', model + 'PointingModel' ] if fr.type == core.G3FrameType.Scan: if ts_key not in fr: raise KeyError("ts_key %s not present in scan frame!" % ts_key) fr[valid_ids_key] = core.G3VectorString(fr[ts_key].keys()) for k in fr: if k not in to_keep: del fr[k]
def create_focal_plane(n_det): # Generate a simple squarish grid. i, p = np.arange(n_det) // 2, np.arange(n_det) % 2 side = max(2, int(i[-1]**.5)) row, col = i // side, i % side pol_fam = (row + col) % 2 pol = (pol_fam * 45 + p * 90) * core.G3Units.deg x = (col / (side - 1) - .5) * 1. * core.G3Units.deg y = (row / (side - 1) - .5) * 1. * core.G3Units.deg # Convert to quternions in the prescribed way. phi = np.arctan2(y, x) theta = np.arcsin((x**2 + y**2)**.5 / core.G3Units.rad) q = (coords.q_euler(2, phi) * coords.q_euler(1, theta) * coords.q_euler(2, -phi) * coords.q_euler(2, pol / core.G3Units.rad)) f = core.G3Frame(FT.Calibration) #booo f['cal_type'] = 'focal_plane' # For now just store a vector of detector names, then a vector of # boresight-relative quaternion rotations for corresponding dets. f['signal_q'] = q f['signal_x'] = core.G3VectorDouble(x) f['signal_y'] = core.G3VectorDouble(y) f['signal_theta'] = core.G3VectorDouble(theta) f['signal_phi'] = core.G3VectorDouble(phi) f['signal_pol'] = core.G3VectorDouble(pol) f['signal_names'] = core.G3VectorString() for j in range(n_det): f['signal_names'].append('det%04i%s' % (i[j], {0: 'A', 1: 'B'}[p[j]])) return f
def DecryptFeatureBit(f): ''' Unpacks the GCP feature flags ''' if f.type != core.G3FrameType.GcpSlow: return flag_array = core.G3VectorString() feature_bit = f['array']['frame']['features'].value flags = [ 'analyze', 'source_scan', 'cabin_shutter', 'elnod', 'pol_cal', 'calibrator', 'every_pixel_on_src', 'skydip', 'optical', 'noise', 'trail', 'el_scan', None, None, None, None, None, None, None, 'debug' ] # Sorry... NDH for i in enumerate(flags): if feature_bit & (1 << i[0]): if i[1] is None: core.log_error('Got an unused feature bit: {:d}'.format(i[0])) flag_array.append(i[1]) f['GCPFeatureBits'] = flag_array
def SimStub(fr, ts_key, valid_ids_key = 'valid_ids', flag_key='Flags', weight_key = 'TodWeights'): to_keep = [valid_ids_key, flag_key, weight_key, 'RawBoresightAz', 'RawBoresightEl', 'OnlineBoresightAz','OnlineBoresightEl', 'OnlineBoresightRa','OnlineBoresightDec', 'OnlineRaDecRotation','OnlinePointingModel'] if fr.type == core.G3FrameType.Scan: assert(ts_key in fr) fr[valid_ids_key] = core.G3VectorString(fr[ts_key].keys()) for k in fr: if k not in to_keep: del fr[k]
def to_frame(self, hksess=None, clear=False): """ Returns a G3Frame based on the provider's blocks. Args: hksess (optional): If provided, the frame will be based off of hksession's data frame. If the data will be put into a clean frame. clear (bool): Clears provider data if True. """ if hksess is not None: frame = hksess.data_frame(prov_id=self.prov_id) else: frame = core.G3Frame(core.G3FrameType.Housekeeping) frame['address'] = self.address frame['provider_session_id'] = self.sessid block_names = [] for block_name, block in self.blocks.items(): if not block.timestamps: continue try: m = core.G3TimesampleMap() m.times = g3_cast(block.timestamps, time=True) for key, ts in block.data.items(): m[key] = g3_cast(ts) except Exception as e: self.log.warn("Error received when casting timestream! {e}", e=e) continue frame['blocks'].append(m) block_names.append(block_name) if 'block_names' in frame: frame['block_names'].extend(block_names) else: frame['block_names'] = core.G3VectorString(block_names) if clear: self.clear() return frame
def data_frame(self, prov_id, timestamp=None): """ Return a Data frame template. The prov_id must match the prov_id in one of the Provider blocks in the preceding status frame. The session manager should create and add IrregBlockDouble items to the 'blocks' list. """ if timestamp is None: timestamp = time.time() f = core.G3Frame() f.type = core.G3FrameType.Housekeeping f['hkagg_version'] = self.hkagg_version f['hkagg_type'] = so3g.HKFrameType.data f['session_id'] = self.session_id f['prov_id'] = prov_id f['timestamp'] = timestamp f['blocks'] = core.G3VectorFrameObject() if self.hkagg_version >= 2: f['block_names'] = core.G3VectorString() return f
def get_v2_stream(): """Generate some example HK data, in schema version 2. Returns a list of frames constituting a valid version 2 HK stream. """ # Create something to help us track the aggregator session. hksess = so3g.hk.HKSessionHelper(session_id=1234, hkagg_version=2, description="Test HK data.") # Register a data provider. prov_id = hksess.add_provider( description='Fake data for the real world.') # Start the stream -- write the initial session and status frames. frames = [ hksess.session_frame(), hksess.status_frame(), ] # Now make a data frame. f = hksess.data_frame(prov_id=prov_id) # Add some data blocks. hk = core.G3TimesampleMap() hk.times = core.G3VectorTime([core.G3Time(i*core.G3Units.seconds) for i in [0, 1, 2, 3, 4]]) hk['speed'] = core.G3VectorDouble([1.2, 1.2, 1.2, 1.2, 1.2]) f['blocks'].append(hk) f['block_names'].append('group0') hk = core.G3TimesampleMap() hk.times = core.G3VectorTime([core.G3Time(i*core.G3Units.seconds) for i in [0, 1, 2, 3, 4]]) hk['position'] = core.G3VectorInt([1, 2, 3, 4, 5]) hk['mode'] = core.G3VectorString(['going', 'going', 'going', 'going', 'gone/']) f['blocks'].append(hk) f['block_names'].append('group1') frames.append(f) return frames
def config_frame(self): """ Generates a config frame for lyrebird """ frame = core.G3Frame(core.G3FrameType.Wiring) frame['x'] = core.G3VectorDouble(self.xs) frame['y'] = core.G3VectorDouble(self.ys) frame['cname'] = core.G3VectorString(self.cnames) frame['rotation'] = core.G3VectorDouble(self.rots) frame['templates'] = core.G3VectorString(self.templates) frame['values'] = core.G3VectorString(self.value_names) frame['color_is_dynamic'] = core.G3VectorBool(self.eq_color_is_dynamic) frame['equations'] = core.G3VectorString(self.eqs) frame['eq_labels'] = core.G3VectorString(self.eq_labels) frame['cmaps'] = core.G3VectorString(self.cmaps) return frame
def Process(self, f): """Translates one frame to the target schema. Irrelevant frames are passed through unmodified. Args: f: a G3Frame Returns: A list containing only the translated frame. G3Pipeline compatibility would permit us to return a single frame here, instead of a length-1 list. But we also sometimes call Process outside of a G3Pipeline, where a consistent output type is desirable. Returning lists is most future-compatible; consumers that want to assume length-1 should assert it to be true. """ if f.type == core.G3FrameType.EndProcessing: core.log_info(str(self.stats)) return [f] if f.type != core.G3FrameType.Housekeeping: self.stats['n_other'] += 1 return [f] # It is an HK frame. orig_version = f.get('hkagg_version', 0) self.stats['n_hk'] += 1 self.stats['versions'][orig_version] = self.stats['versions'].get(orig_version, 0) + 1 if orig_version > self.target_version and not self.future_tolerant: raise ValueError( ('Translator to v%i encountered v%i, but future_tolerant=False.') % (self.TARGET_VERSION, orig_version)) if orig_version >= self.target_version: return [f] # Always update the version, even if that's our only change... if 'hkagg_version' in f: if 'hkagg_version_orig' not in f: f['hkagg_version_orig'] = orig_version del f['hkagg_version'] f['hkagg_version'] = self.target_version # No difference in Session/Status for v0, v1, v2. if f.get('hkagg_type') != so3g.HKFrameType.data: return [f] if self.target_version == 0: return [f] if orig_version == 0: # Pop the data blocks out of the frame. orig_blocks = f.pop('blocks') f['blocks'] = core.G3VectorFrameObject() # Now process the data blocks. for block in orig_blocks: new_block = core.G3TimesampleMap() new_block.times = so3g.hk.util.get_g3_time(block.t) for k in block.data.keys(): v = block.data[k] new_block[k] = core.G3VectorDouble(v) f['blocks'].append(new_block) if self.target_version == 1: return [f] if orig_version <= 1: # Add 'block_names'. Since we don't want to start # caching Block Stream information, just compute a good # block name based on the alphabetically first field in # the block. block_names = [] for block in f['blocks']: field_names = list(sorted(block.keys())) block_names.append('block_for_%s' % field_names[0]) assert(len(block_names[-1]) < 256) # What have you done. orig_block_names = [] f['block_names'] = core.G3VectorString(block_names) return [f]
def test_00_basic(self): """Write a stream of HK frames and scan it for errors.""" # Write a stream of HK frames. # (Inspect the output with 'spt3g-dump hk_out.g3 so3g'.) print('Streaming to %s' % test_file) w = core.G3Writer(test_file) # Create something to help us track the aggregator session. hksess = so3g.hk.HKSessionHelper(session_id=None, hkagg_version=2, description="Test HK data.") # Register a data provider. prov_id = hksess.add_provider( description='Fake data for the real world.') # Start the stream -- write the initial session and status frames. w.Process(hksess.session_frame()) w.Process(hksess.status_frame()) # Add a bunch of data frames t_next = time.time() for i in range(10): f = hksess.data_frame(prov_id=prov_id, timestamp=t_next) hk = core.G3TimesampleMap() speed = [1.2, 1.2, 1.3, 1.2, 1.3] hk.times = [ core.G3Time(_t * core.G3Units.second) for _t in t_next + np.arange(len(speed)) ] hk['position'] = core.G3VectorDouble(np.arange(len(speed))) hk['speed'] = core.G3VectorDouble(speed) hk['error_bits'] = core.G3VectorInt([10] * len(speed)) hk['mode_str'] = core.G3VectorString(['ok'] * len(speed)) t_next += len(hk) f['blocks'].append(hk) f['block_names'].append('main_block') w.Process(f) w.Flush() del w print('Stream closed.\n\n') # Now play them back... print('Reading back:') for f in core.G3File(test_file): ht = f.get('hkagg_type') if ht == so3g.HKFrameType.session: print('Session: %i' % f['session_id']) elif ht == so3g.HKFrameType.status: print(' Status update: %i providers' % (len(f['providers']))) elif ht == so3g.HKFrameType.data: print(' Data: %i blocks' % len(f['blocks'])) for i, block in enumerate(f['blocks']): print(' Block %i' % i) for k, v in block.items(): print(' %s' % k, v) # Scan and validate. print() print('Running HKScanner on the test data...') scanner = so3g.hk.HKScanner() pipe = core.G3Pipeline() pipe.Add(core.G3Reader(test_file)) pipe.Add(scanner) pipe.Run() print('Stats: ', scanner.stats) print('Providers: ', scanner.providers) self.assertEqual(scanner.stats['concerns']['n_error'], 0) self.assertEqual(scanner.stats['concerns']['n_warning'], 0)