示例#1
0
 def nextreader(self):
     nextfilelist = self.files.pop(0)
     self.broadcast = False
     if isinstance(nextfilelist, str):
         self.broadcast = True
         if self.comm.rank == 0:
             self.cur_reader = core.G3Reader(nextfilelist)
         else:
             self.cur_reader = lambda fr: None
     else:
         toreadhere = nextfilelist[self.comm.rank::self.comm.size]
         self.cur_reader = core.G3Reader(toreadhere)
示例#2
0
 def test_30_serialization(self):
     m0 = get_test_block(100)
     m1 = get_test_block(200, offset=100)
     m2 = m0.concatenate(m1)
     m0.check()
     m1.check()
     m2.check()
     f = core.G3Frame()
     f['irreg0'] = m0
     f['irreg1'] = m1
     core.G3Writer('test.g3').Process(f)
     f = core.G3Reader('test.g3').Process(None)[0]
     f['irreg0'].check()
     f['irreg1'].check()
     f['irreg0'].concatenate(f['irreg1'])['b']
示例#3
0
    def test_serialize(self):
        """Confirm full ranges can be saved and loaded."""

        w = core.G3Writer(test_filename)
        for isize, val in bit_sizes:
            f = core.G3Frame()
            f['v'] = core.G3VectorInt([val] * 10)
            w(f)
        del w

        r = core.G3Reader(test_filename)
        for isize, val in bit_sizes:
            f = r(None)[0]
            v_in = list(f['v'])
            self.assertTrue(all([_v == val for _v in v_in]),
                            "Failed to save/load value %i" % val)
        del r
示例#4
0
    def start_stream(self, session, params=None):
        if params is None:
            params = {}

        time_per_file = params.get("time_per_file", 60*60) # [sec]
        data_dir = params.get("data_dir", "data/")

        self.log.info("Writing data to {}".format(data_dir))
        self.log.info("New file every {} seconds".format(time_per_file))

        reader = core.G3Reader("tcp://localhost:{}".format(self.port))
        writer = None

        last_meta = None
        self.is_streaming = True

        while self.is_streaming:
            if writer is None:
                start_time = datetime.utcnow()
                ts = start_time.timestamp()
                subdir = os.path.join(data_dir, "{:.5}".format(str(ts)))

                if not os.path.exists(subdir):
                    os.makedirs(subdir)

                filename = start_time.strftime("%Y-%m-%d-%H-%M-%S.g3")
                filepath = os.path.join(subdir, filename)
                writer = core.G3Writer(filename=filepath)
                if last_meta is not None:
                    writer(last_meta)

            frames = reader.Process(None)
            for f in frames:
                if f.type == core.G3FrameType.Housekeeping:
                    last_meta = f
                writer(f)

            if (datetime.utcnow().timestamp() - ts) > time_per_file:
                writer(core.G3Frame(core.G3FrameType.EndProcessing))
                writer = None

        if writer is not None:
            writer(core.G3Frame(core.G3FrameType.EndProcessing))

        return True, "Finished Streaming"
示例#5
0
    def _establish_reader_connection(self, timeout=5):
        """Establish the connection to the G3NetworkSender.

        Attempts to connect once and waits if connection could not be made and
        last connection attempt was very recent.

        Parameters
        ----------
        timeout : int
            Timeout in seconds for the G3Reader, afterwhich the connection will
            drop and G3Reader will return empty lists on each Process call.

        Returns
        -------
        reader : spt3g.core.G3Reader
            The G3Reader object connected to the configured address and port

        """
        reader = None

        try:
            reader = core.G3Reader(self.address, timeout=timeout)
            self.log.debug("G3Reader connection to {addr} established!",
                           addr=self.address)
        except RuntimeError:
            self.log.error("G3Reader could not connect.")

        # Prevent rapid connection attempts
        if self.last_connection_time is not None:
            t_diff = time.time() - self.last_connection_time
            if t_diff < 1:
                self.log.debug("Last connection was only {d} seconds ago. " +
                               "Sleeping for {t}.",
                               d=t_diff,
                               t=(1 - t_diff))
                time.sleep(1 - t_diff)

        self.last_connection_time = time.time()

        return reader
示例#6
0
    def _readback_compare(self,
                          ts,
                          filename='readback_test.g3',
                          cleanup=True,
                          err_msg='(no detail)'):
        """Cache the data from ts, write ts to a file, read it back from file,
        compare to cached data.

        """
        # Cache
        fake_ts = (collections.namedtuple(
            'pseudo_ts', ['times', 'names', 'data']))(np.array(ts.times),
                                                      np.array(ts.names),
                                                      ts.data.copy())
        # Write
        f = core.G3Frame()
        f['item'] = ts
        core.G3Writer(filename).Process(f)
        # Read
        ts1 = core.G3Reader(filename).Process(None)[0]['item']
        self._check_equal(fake_ts, ts1, err_msg=err_msg)
        if cleanup:
            os.remove(filename)
示例#7
0
def read_spt3g_obs(file):
    reader = c3g.G3Reader(file, 1)
    f = list(reader(None))
    obframe = f[0]
    obspat = re.compile("^obs_(.*)")
    detpat = re.compile("{}_(.*)".format(STR_QUAT))

    obs = dict()
    props = dict()
    dets = dict()
    nsamp = None
    for k in obframe.keys():
        if k == "samples":
            nsamp = int(obframe[k])
        else:
            obsmat = obspat.match(k)
            detmat = detpat.match(k)
            if obsmat is not None:
                obs[obsmat.group(1)] = s3utils.from_g3_type(obframe[k])
            elif detmat is not None:
                dets[detmat.group(1)] = np.array(obframe[k], dtype=np.float64)
            else:
                props[k] = s3utils.from_g3_type(obframe[k])
    return obs, props, dets, nsamp
示例#8
0
    def test_40_encoding_serialized(self):
        test_file = 'test_g3super.g3'
        offsets = {
            'int32': [0, 2**25, 2**26 / 3., -1.78 * 2**27],
            'int64': [0, 2**25, 2**26 / 3., -1.78 * 2**27],
            'float32': [0],
            'float64': [0, 2**25, 2**26 / 3., -1.78 * 2**27, 1.8 * 2**35],
        }
        decimals = 2
        precision = 10**-decimals

        w = core.G3Writer(test_file)
        records = []
        for dtype in ALL_DTYPES:
            for offset in offsets[dtype]:
                f = core.G3Frame()
                ts = self._get_ts(4, 100, sigma=100, dtype=dtype)
                ts.data += int(offset)
                if dtype in FLOAT_DTYPES:
                    ts.data[:] = np.round(ts.data)
                    ts.calibrate([.01] * ts.data.shape[0])
                records.append(ts.data.copy())
                f['a'] = ts
                w.Process(f)
        del w
        # readback
        r = core.G3Reader(test_file)
        for dtype in ALL_DTYPES:
            for offset in offsets[dtype]:
                err_msg = f'Failed for dtype={dtype}, offset={offset}'
                ts2 = r.Process(None)[0]['a']
                record = records.pop(0)
                np.testing.assert_allclose(record,
                                           ts2.data,
                                           atol=precision * 1e-3,
                                           err_msg=err_msg)
示例#9
0
文件: scanner.py 项目: jit9/so3g
                for k, v in b.data.items():
                    if len(v) != len(b.t):
                        core.log_error(
                            'Field "%s" has %i samples but .t has %i samples.'
                            % (k, len(v), len(b.t)))
                        self.stats['concerns']['n_error'] += 1
            if len(t_check) and abs(min(t_check) - t_this) > 60:
                core.log_warn(
                    'data frame timestamp (%.1f) does not correspond to '
                    'data timestamp vectors (%s) .' % (t_this, t_check),
                    unit='HKScanner')
                self.stats['concerns']['n_warning'] += 1

        else:
            core.log_warn('Weird hkagg_type: %i' % f['hkagg_type'],
                          unit='HKScanner')
            self.stats['concerns']['n_warning'] += 1

        return [f]


if __name__ == '__main__':
    # Run me on a G3File containing a Housekeeping stream.
    core.set_log_level(core.G3LogLevel.LOG_INFO)
    import sys
    for f in sys.argv[1:]:
        p = core.G3Pipeline()
        p.Add(core.G3Reader(f))
        p.Add(HKScanner())
        p.Run()
示例#10
0
f['Vec'] = v
v = core.G3VectorInt([17, 42, 87])
f['VecInt'] = v
m = core.G3MapDouble()
m['Six'] = 6
m['GoingOnSixteen'] = 15.9
f['Map'] = m

if len(sys.argv) > 1:
    core.G3Writer(sys.argv[1])(f)
    sys.exit(0)

# For now, we test files from big-endian (PPC64) and little-endian (amd64)
# 64-bit systems. Should include some 32-bit ones.

for test in ['test-be.g3', 'test-le.g3']:
    print(test)
    testdata = core.G3Reader(os.path.join(testpath, test))(None)[0]

    assert (testdata['Five'] == f['Five'])
    assert (len(testdata['Vec']) == len(f['Vec']))
    for i in range(len(testdata['Vec'])):
        assert (testdata['Vec'][i] == f['Vec'][i])
    assert (len(testdata['VecInt']) == len(f['VecInt']))
    for i in range(len(testdata['VecInt'])):
        assert (testdata['VecInt'][i] == f['VecInt'][i])
    assert (len(testdata['Map']) == len(f['Map']))
    assert (testdata['Map'].keys() == f['Map'].keys())
    for i in testdata['Map'].keys():
        assert (testdata['Map'][i] == f['Map'][i])
示例#11
0
文件: test_hkagg.py 项目: jit9/so3g
    def test_00_basic(self):
        """Write a stream of HK frames and scan it for errors."""

        # Write a stream of HK frames.
        # (Inspect the output with 'spt3g-dump hk_out.g3 so3g'.)
        print('Streaming to %s' % test_file)
        w = core.G3Writer(test_file)

        # Create something to help us track the aggregator session.
        hksess = so3g.hk.HKSessionHelper(session_id=None,
                                         description="Test HK data.")

        # Register a data provider.
        prov_id = hksess.add_provider(
            description='Fake data for the real world.')

        # Start the stream -- write the initial session and status frames.
        w.Process(hksess.session_frame())
        w.Process(hksess.status_frame())

        # Add a bunch of data frames
        t_next = time.time()
        for i in range(10):
            f = hksess.data_frame(prov_id=prov_id, timestamp=t_next)
            hk = so3g.IrregBlockDouble()
            hk.prefix = 'hwp_'
            hk.data['position'] = [1, 2, 3, 4, 5]
            hk.data['speed'] = [1.2, 1.2, 1.3, 1.2, 1.3]
            hk.t = t_next + np.arange(len(hk.data['speed']))
            t_next += len(hk.data['speed'])
            f['blocks'].append(hk)
            w.Process(f)

        w.Flush()
        del w

        print('Stream closed.\n\n')

        # Now play them back...
        print('Reading back:')
        for f in core.G3File(test_file):
            ht = f.get('hkagg_type')
            if ht == so3g.HKFrameType.session:
                print('Session: %i' % f['session_id'])
            elif ht == so3g.HKFrameType.status:
                print('  Status update: %i providers' % (len(f['providers'])))
            elif ht == so3g.HKFrameType.data:
                print('  Data: %i blocks' % len(f['blocks']))
                for block in f['blocks']:
                    for k, v in block.data.items():
                        print('    %s%s' % (block.prefix, k), v)

        # Scan and validate.
        print()
        print('Running HKScanner on the test data...')
        scanner = so3g.hk.HKScanner()
        pipe = core.G3Pipeline()
        pipe.Add(core.G3Reader(test_file))
        pipe.Add(scanner)
        pipe.Run()

        print('Stats: ', scanner.stats)
        print('Providers: ', scanner.providers)

        self.assertEqual(scanner.stats['concerns']['n_error'], 0)
        self.assertEqual(scanner.stats['concerns']['n_warning'], 0)
示例#12
0
    print('Parent')

    send = core.G3NetworkSender(hostname='*', port=port)
    time.sleep(1)  # XXX: how to signal that the remote end is ready?
    print('Sending')

    for f in frames:
        send(f)
    send(core.G3Frame(core.G3FrameType.EndProcessing))

    pid, status = os.wait()
    print('Child Status: ', status)
    if status == 0:
        print('OK')
    sys.exit(status)
else:
    # Child
    print('Child')

    recv = core.G3Reader(filename='tcp://localhost:%d' % port)
    rframes = []
    for k in range(len(frames)):
        chunk = recv(None)
        print(chunk[0])
        rframes += chunk
    assert (len(rframes)) == len(frames)
    for i in range(len(rframes)):
        assert (rframes[i]['Sequence'] == i)

    sys.exit(0)
示例#13
0
        return [f]

    def __call__(self, *args, **kwargs):
        return self.Process(*args, **kwargs)


if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser(
        usage='This program can be used to convert SO HK Frames to the '
        'latest schema version.')
    parser.add_argument('--output-file', '-o', default='out.g3')
    parser.add_argument('--target-version', type=int)
    parser.add_argument('files', nargs='+', help=
                        "SO Housekeeping files to convert.")
    args = parser.parse_args()

    # Run me on a G3File containing a Housekeeping stream.
    core.set_log_level(core.G3LogLevel.LOG_INFO)

    translator_args = {}
    if args.target_version is not None:
        translator_args['target_version'] = args.target_version

    print(f'Streaming to {args.output_file}')
    p = core.G3Pipeline()
    p.Add(core.G3Reader(args.files))
    p.Add(HKTranslator(**translator_args))
    p.Add(core.G3Writer(args.output_file))
    p.Run()
示例#14
0
    def read(self, session, params=None):
        """read(src='tcp://localhost:4532')

        **Process** - Process for reading in G3Frames from a source or list of
        sources. If this source is an address that begins with ``tcp://``, the
        agent will attempt to connect to a G3NetworkSender at the specified
        location. The ``src`` param can also be a filepath or list of filepaths
        pointing to G3Files to be streamed. If a list of filenames is passed,
        once the first file is finished streaming, subsequent files will be
        streamed.
        """

        self._running = True
        session.set_status('running')

        src_idx = 0
        if isinstance(params['src'], str):
            sources = [params['src']]
        else:
            sources = params['src']

        reader = None
        source = None
        source_offset = 0
        while self._running:

            if reader is None:
                try:
                    source = sources[src_idx]
                    source_is_file = not source.startswith('tcp://')
                    reader = core.G3Reader(source, timeout=5)
                except RuntimeError as e:
                    if source_is_file:
                        # Raise error if file cannot be found
                        raise e
                    else:
                        # If not a file, log error and try again
                        self.log.error(
                            "G3Reader could not connect! Retrying in 10 sec.")
                        time.sleep(10)
                        continue

            frames = reader.Process(None)
            if not frames:
                # If source is a file, start over with next file or break if
                # finished all sources. If socket, just reset reader and try to
                # reconnect
                if source_is_file:
                    src_idx += 1
                    if src_idx >= len(sources):
                        self.log.info("Finished reading all sources")
                        break
                reader = None
                continue

            frame = frames[0]

            # If this source is a file, this will shift the timestamps so that
            # data lines up with the current timestamp instead of using the
            # timestamps in the file
            if source_is_file and (not source_offset):
                source_offset = frame['time'].time / core.G3Units.s \
                                - time.time()
            elif not source_is_file:
                source_offset = 0

            if frame.type == core.G3FrameType.Wiring:
                self._process_status(frame)
                continue
            elif frame.type == core.G3FrameType.Scan:
                out = self._process_data(frame, source_offset=source_offset)
            else:
                continue

            for f in out:
                # This will block until there's a free spot in the queue.
                # This is useful if the src is a file and reader.Process does
                # not block
                self.out_queue.put(f)
        return True, "Stopped read process"
示例#15
0
文件: reframer.py 项目: tskisner/so3g
            if changes:
                output.append(f)

        elif f['hkagg_type'] == so3g.HKFrameType.data:
            fb = self.providers[f['prov_id']]
            fb.add(f)
            if fb.ready():
                output += fb.rebundle()

        else:
            raise ValueError('Invalid hkagg_type')

        return output


if __name__ == '__main__':
    from so3g.hk import HKScanner, HKReframer
    import sys

    core.set_log_level(core.G3LogLevel.LOG_INFO)

    files = sys.argv[1:]
    p = core.G3Pipeline()
    p.Add(core.G3Reader(files))
    p.Add(HKScanner())
    rf = HKReframer()
    p.Add(rf)
    p.Add(HKScanner())
    p.Add(core.G3Writer, filename='out.g3')
    p.Run()
示例#16
0
    def test_00_basic(self):
        """Write a stream of HK frames and scan it for errors."""

        # Write a stream of HK frames.
        # (Inspect the output with 'spt3g-dump hk_out.g3 so3g'.)
        print('Streaming to %s' % test_file)
        w = core.G3Writer(test_file)

        # Create something to help us track the aggregator session.
        hksess = so3g.hk.HKSessionHelper(session_id=None,
                                         hkagg_version=2,
                                         description="Test HK data.")

        # Register a data provider.
        prov_id = hksess.add_provider(
            description='Fake data for the real world.')

        # Start the stream -- write the initial session and status frames.
        w.Process(hksess.session_frame())
        w.Process(hksess.status_frame())

        # Add a bunch of data frames
        t_next = time.time()
        for i in range(10):
            f = hksess.data_frame(prov_id=prov_id, timestamp=t_next)
            hk = core.G3TimesampleMap()
            speed = [1.2, 1.2, 1.3, 1.2, 1.3]
            hk.times = [
                core.G3Time(_t * core.G3Units.second)
                for _t in t_next + np.arange(len(speed))
            ]
            hk['position'] = core.G3VectorDouble(np.arange(len(speed)))
            hk['speed'] = core.G3VectorDouble(speed)
            hk['error_bits'] = core.G3VectorInt([10] * len(speed))
            hk['mode_str'] = core.G3VectorString(['ok'] * len(speed))
            t_next += len(hk)
            f['blocks'].append(hk)
            f['block_names'].append('main_block')
            w.Process(f)

        w.Flush()
        del w

        print('Stream closed.\n\n')

        # Now play them back...
        print('Reading back:')
        for f in core.G3File(test_file):
            ht = f.get('hkagg_type')
            if ht == so3g.HKFrameType.session:
                print('Session: %i' % f['session_id'])
            elif ht == so3g.HKFrameType.status:
                print('  Status update: %i providers' % (len(f['providers'])))
            elif ht == so3g.HKFrameType.data:
                print('  Data: %i blocks' % len(f['blocks']))
                for i, block in enumerate(f['blocks']):
                    print('    Block %i' % i)
                    for k, v in block.items():
                        print('    %s' % k, v)

        # Scan and validate.
        print()
        print('Running HKScanner on the test data...')
        scanner = so3g.hk.HKScanner()
        pipe = core.G3Pipeline()
        pipe.Add(core.G3Reader(test_file))
        pipe.Add(scanner)
        pipe.Run()

        print('Stats: ', scanner.stats)
        print('Providers: ', scanner.providers)

        self.assertEqual(scanner.stats['concerns']['n_error'], 0)
        self.assertEqual(scanner.stats['concerns']['n_warning'], 0)