Exemplo n.º 1
0
def _concat_hk_stream(blocks_in):
    """Concatenates an ordered list of compatible HK blocks into a single
    frame.  Each block should be a valid G3TimesampleMap with the same
    keys.

    Returns a single G3TimesampleMap with all fields concatenated.

    """
    blk = core.G3TimesampleMap()
    blk.times = core.G3VectorTime(blocks_in[0].times)
    fields = list(blocks_in[0].keys())
    for f in fields:
        f_short = f.split('.')[-1]
        blk[f] = blocks_in[0][f_short]
    for b in blocks_in[1:]:
        blk.times.extend(b.times)
    for f in fields:
        f_short = f.split('.')[-1]
        for _type in _SCHEMA_V1_BLOCK_TYPES:
            if isinstance(blocks_in[0][f_short], _type):
                break
        else:
            raise RuntimeError('Field "%s" is of unsupported type %s.' %
                               (f_short, type(blocks_in[0][f_short])))
        for b in blocks_in[1:]:
            blk[f].extend(b[f_short])
    return blk
Exemplo n.º 2
0
def get_v2_stream():
    """Generate some example HK data, in schema version 2.

    Returns a list of frames constituting a valid version 2 HK stream.

    """
    # Create something to help us track the aggregator session.
    hksess = so3g.hk.HKSessionHelper(session_id=1234,
                                     hkagg_version=2,
                                     description="Test HK data.")

    # Register a data provider.
    prov_id = hksess.add_provider(
        description='Fake data for the real world.')

    # Start the stream -- write the initial session and status frames.
    frames = [
        hksess.session_frame(),
        hksess.status_frame(),
    ]

    # Now make a data frame.
    f = hksess.data_frame(prov_id=prov_id)

    # Add some data blocks.
    hk = core.G3TimesampleMap()
    hk.times = core.G3VectorTime([core.G3Time(i*core.G3Units.seconds) for i in [0, 1, 2, 3, 4]])
    hk['speed'] = core.G3VectorDouble([1.2, 1.2, 1.2, 1.2, 1.2])
    f['blocks'].append(hk)
    f['block_names'].append('group0')

    hk = core.G3TimesampleMap()
    hk.times = core.G3VectorTime([core.G3Time(i*core.G3Units.seconds) for i in [0, 1, 2, 3, 4]])
    hk['position'] = core.G3VectorInt([1, 2, 3, 4, 5])
    hk['mode'] = core.G3VectorString(['going', 'going', 'going', 'going', 'gone/'])
    f['blocks'].append(hk)
    f['block_names'].append('group1')

    frames.append(f)
    return frames
Exemplo n.º 3
0
def get_test_block(length, keys=['a', 'b', 'c', 'd'], offset=0, ordered=True):
    type_cycle = [(core.G3VectorDouble, float), (core.G3VectorInt, int),
                  (core.G3VectorString, str), (core.G3VectorBool, bool)]
    t0 = core.G3Time('2019-01-01T12:30:00') + offset * SEC
    m = core.G3TimesampleMap()
    times = np.arange(length)
    if not ordered:
        np.random.shuffle(times)
    m.times = core.G3VectorTime(t0 + times * SEC)
    for i, k in enumerate(keys):
        y = (np.random.uniform(size=length) * 100).astype(int)
        constructor, cast_func = type_cycle[i % len(type_cycle)]
        vect = constructor(list(map(cast_func, y)))
        m[k] = vect
    return m
Exemplo n.º 4
0
    def to_frame(self, hksess=None, clear=False):
        """
        Returns a G3Frame based on the provider's blocks.

        Args:
            hksess (optional):
                If provided, the frame will be based off of hksession's data frame.
                If the data will be put into a clean frame.
            clear (bool):
                Clears provider data if True.
        """

        if hksess is not None:
            frame = hksess.data_frame(prov_id=self.prov_id)
        else:
            frame = core.G3Frame(core.G3FrameType.Housekeeping)

        frame['address'] = self.address
        frame['provider_session_id'] = self.sessid

        block_names = []
        for block_name, block in self.blocks.items():
            if not block.timestamps:
                continue
            try:
                m = core.G3TimesampleMap()
                m.times = g3_cast(block.timestamps, time=True)
                for key, ts in block.data.items():
                    m[key] = g3_cast(ts)
            except Exception as e:
                self.log.warn("Error received when casting timestream! {e}",
                              e=e)
                continue
            frame['blocks'].append(m)
            block_names.append(block_name)

        if 'block_names' in frame:
            frame['block_names'].extend(block_names)
        else:
            frame['block_names'] = core.G3VectorString(block_names)

        if clear:
            self.clear()
        return frame
Exemplo n.º 5
0
dt = 0.001  # seconds
halfscan = 10  # degrees

for i in range(10):
    # Number of samples
    n = int(halfscan / v_az / dt)
    # Vector of unix timestamps
    t = frame_time + dt * np.arange(n)
    # Vector of az and el
    az = v_az * dt * np.arange(n)
    if i % 2:
        az = -az
    el = az * 0 + 50.

    # Construct a "block", which is a named G3TimesampleMap.
    block = core.G3TimesampleMap()
    block.times = core.G3VectorTime(
        [core.G3Time(_t * core.G3Units.s) for _t in t])
    block['az'] = core.G3VectorDouble(az)
    block['el'] = core.G3VectorDouble(el)

    # Create an output data frame template associated with this
    # provider.
    frame = session.data_frame(prov_id)

    # Add the block and block name to the frame, and write it.
    frame['block_names'].append('pointing')
    frame['blocks'].append(block)
    writer.Process(frame)

    # For next iteration.
Exemplo n.º 6
0
    def Process(self, f):
        """Translates one frame to the target schema.  Irrelevant frames are
        passed through unmodified.

        Args:
          f: a G3Frame

        Returns:
          A list containing only the translated frame.  G3Pipeline
          compatibility would permit us to return a single frame here,
          instead of a length-1 list.  But we also sometimes call
          Process outside of a G3Pipeline, where a consistent output
          type is desirable.  Returning lists is most
          future-compatible; consumers that want to assume length-1
          should assert it to be true.

        """
        if f.type == core.G3FrameType.EndProcessing:
            core.log_info(str(self.stats))
            return [f]

        if f.type != core.G3FrameType.Housekeeping:
            self.stats['n_other'] += 1
            return [f]

        # It is an HK frame.
        orig_version = f.get('hkagg_version', 0)

        self.stats['n_hk'] += 1
        self.stats['versions'][orig_version] = self.stats['versions'].get(orig_version, 0) + 1

        if orig_version > self.target_version and not self.future_tolerant:
            raise ValueError(
                ('Translator to v%i encountered v%i, but future_tolerant=False.')
                % (self.TARGET_VERSION, orig_version))

        if orig_version >= self.target_version:
            return [f]

        # Always update the version, even if that's our only change...
        if 'hkagg_version' in f:
            if 'hkagg_version_orig' not in f:
                f['hkagg_version_orig'] = orig_version
            del f['hkagg_version']
        f['hkagg_version'] = self.target_version

        # No difference in Session/Status for v0, v1, v2.
        if f.get('hkagg_type') != so3g.HKFrameType.data:
            return [f]

        if self.target_version == 0:
            return [f]

        if orig_version == 0:
            # Pop the data blocks out of the frame.
            orig_blocks = f.pop('blocks')
            f['blocks'] = core.G3VectorFrameObject()

            # Now process the data blocks.
            for block in orig_blocks:
                new_block = core.G3TimesampleMap()
                new_block.times = so3g.hk.util.get_g3_time(block.t)
                for k in block.data.keys():
                    v = block.data[k]
                    new_block[k] = core.G3VectorDouble(v)
                f['blocks'].append(new_block)

        if self.target_version == 1:
            return [f]

        if orig_version <= 1:
            # Add 'block_names'.  Since we don't want to start
            # caching Block Stream information, just compute a good
            # block name based on the alphabetically first field in
            # the block.
            block_names = []
            for block in f['blocks']:
                field_names = list(sorted(block.keys()))
                block_names.append('block_for_%s' % field_names[0])
                assert(len(block_names[-1]) < 256)  # What have you done.
            orig_block_names = []
            f['block_names'] = core.G3VectorString(block_names)

        return [f]
Exemplo n.º 7
0
    def test_00_basic(self):
        """Write a stream of HK frames and scan it for errors."""

        # Write a stream of HK frames.
        # (Inspect the output with 'spt3g-dump hk_out.g3 so3g'.)
        print('Streaming to %s' % test_file)
        w = core.G3Writer(test_file)

        # Create something to help us track the aggregator session.
        hksess = so3g.hk.HKSessionHelper(session_id=None,
                                         hkagg_version=2,
                                         description="Test HK data.")

        # Register a data provider.
        prov_id = hksess.add_provider(
            description='Fake data for the real world.')

        # Start the stream -- write the initial session and status frames.
        w.Process(hksess.session_frame())
        w.Process(hksess.status_frame())

        # Add a bunch of data frames
        t_next = time.time()
        for i in range(10):
            f = hksess.data_frame(prov_id=prov_id, timestamp=t_next)
            hk = core.G3TimesampleMap()
            speed = [1.2, 1.2, 1.3, 1.2, 1.3]
            hk.times = [
                core.G3Time(_t * core.G3Units.second)
                for _t in t_next + np.arange(len(speed))
            ]
            hk['position'] = core.G3VectorDouble(np.arange(len(speed)))
            hk['speed'] = core.G3VectorDouble(speed)
            hk['error_bits'] = core.G3VectorInt([10] * len(speed))
            hk['mode_str'] = core.G3VectorString(['ok'] * len(speed))
            t_next += len(hk)
            f['blocks'].append(hk)
            f['block_names'].append('main_block')
            w.Process(f)

        w.Flush()
        del w

        print('Stream closed.\n\n')

        # Now play them back...
        print('Reading back:')
        for f in core.G3File(test_file):
            ht = f.get('hkagg_type')
            if ht == so3g.HKFrameType.session:
                print('Session: %i' % f['session_id'])
            elif ht == so3g.HKFrameType.status:
                print('  Status update: %i providers' % (len(f['providers'])))
            elif ht == so3g.HKFrameType.data:
                print('  Data: %i blocks' % len(f['blocks']))
                for i, block in enumerate(f['blocks']):
                    print('    Block %i' % i)
                    for k, v in block.items():
                        print('    %s' % k, v)

        # Scan and validate.
        print()
        print('Running HKScanner on the test data...')
        scanner = so3g.hk.HKScanner()
        pipe = core.G3Pipeline()
        pipe.Add(core.G3Reader(test_file))
        pipe.Add(scanner)
        pipe.Run()

        print('Stats: ', scanner.stats)
        print('Providers: ', scanner.providers)

        self.assertEqual(scanner.stats['concerns']['n_error'], 0)
        self.assertEqual(scanner.stats['concerns']['n_warning'], 0)