Esempio n. 1
0
    def __init__(self, group_prefix="", attributes={}):

        # self.plottingQueue = queue.PriorityQueue()
        #self.__delay = delay
        self.__group_prefix = group_prefix
        self.__attributes = attributes

        #Description of heap items
        # ToDo: move to gated spectrometer or whereever the stream format is
        # defined
        self.ig = spead2.ItemGroup()
        self.ig.add_item(5632, "timestamp_count", "", (6, ), dtype=">u1")
        self.ig.add_item(5633, "polarization", "", (6, ), dtype=">u1")
        self.ig.add_item(5634, "noise_diode_status", "", (6, ), dtype=">u1")
        self.ig.add_item(5635, "fft_length", "", (6, ), dtype=">u1")
        self.ig.add_item(5636,
                         "number_of_input_samples",
                         "", (6, ),
                         dtype=">u1")
        self.ig.add_item(5637, "sync_time", "", (6, ), dtype=">u1")
        self.ig.add_item(5638, "sampling_rate", "", (6, ), dtype=">u1")
        self.ig.add_item(5639, "naccumulate", "", (6, ), dtype=">u1")
        self.ig.add_item(5641,
                         "number_of_saturated_samples",
                         "", (6, ),
                         dtype=">u1")
Esempio n. 2
0
def run_stream(stream, name, args):
    item_group = spead2.ItemGroup()
    num_heaps = 0
    while True:
        try:
            if num_heaps == args.max_heaps:
                stream.stop()
                break
            heap = yield From(stream.get())
            print("Received heap {} on stream {}".format(heap.cnt, name))
            num_heaps += 1
            try:
                if args.descriptors:
                    for raw_descriptor in heap.get_descriptors():
                        descriptor = spead2.Descriptor.from_raw(
                            raw_descriptor, heap.flavour)
                        print('''\
Descriptor for {0.name} ({0.id:#x})
  description: {0.description}
  format:      {0.format}
  dtype:       {0.dtype}
  shape:       {0.shape}'''.format(descriptor))
                changed = item_group.update(heap)
                for (key, item) in changed.items():
                    if args.values:
                        print(key, '=', item.value)
                    else:
                        print(key)
            except ValueError as e:
                print("Error raised processing heap: {}".format(e))
        except spead2.Stopped:
            print("Shutting down stream {} after {} heaps".format(
                name, num_heaps))
            break
Esempio n. 3
0
 async def transmit_item_group_async(self,
                                     item_group,
                                     memcpy,
                                     allocator,
                                     new_order='='):
     thread_pool = spead2.ThreadPool(2)
     receiver = spead2.recv.asyncio.Stream(thread_pool, loop=self.loop)
     receiver.set_memcpy(memcpy)
     if allocator is not None:
         receiver.set_memory_allocator(allocator)
     await self.prepare_receiver(receiver)
     sender = await self.prepare_sender(thread_pool)
     gen = spead2.send.HeapGenerator(item_group)
     await sender.async_send_heap(gen.get_heap())
     await sender.async_send_heap(gen.get_end())
     await sender.async_flush()
     received_item_group = spead2.ItemGroup()
     while True:
         try:
             heap = await receiver.get()
         except spead2.Stopped:
             break
         else:
             received_item_group.update(heap, new_order)
     return received_item_group
Esempio n. 4
0
    def test_memory_regions(self, num_items):
        receiver = spead2.recv.Stream(spead2.ThreadPool(),
                                      spead2.recv.StreamConfig())
        receiver.add_udp_ibv_reader(
            spead2.recv.UdpIbvConfig(
                endpoints=[(self.MCAST_GROUP, 8876)],
                interface_address=self._interface_address()))

        ig = spead2.send.ItemGroup()
        data = [np.random.randn(50) for i in range(num_items)]
        for i in range(num_items):
            ig.add_item(id=0x2345 + i,
                        name=f'name {i}',
                        description=f'description {i}',
                        shape=data[i].shape,
                        dtype=data[i].dtype,
                        value=data[i])
        sender = spead2.send.UdpIbvStream(
            spead2.ThreadPool(), spead2.send.StreamConfig(rate=1e7),
            spead2.send.UdpIbvConfig(
                endpoints=[(self.MCAST_GROUP, 8876)],
                interface_address=self._interface_address(),
                memory_regions=data))
        sender.send_heap(ig.get_heap())
        sender.send_heap(ig.get_end())

        recv_ig = spead2.ItemGroup()
        for heap in receiver:
            recv_ig.update(heap)
        assert_item_groups_equal(ig, recv_ig)
Esempio n. 5
0
 def test_replace_reset_value(self):
     """When a descriptor is replaced with an identical one but a new
     value, the new value must take effect and the version must be
     incremented."""
     ig = spead2.ItemGroup()
     ig.add_item(0x1000,
                 'item 1',
                 'item 1', (),
                 np.int32,
                 value=np.int32(4))
     ig.add_item(0x1001,
                 'item 2',
                 'item 2', (),
                 np.int32,
                 value=np.int32(5))
     item1 = ig[0x1000]
     item2 = ig[0x1001]
     item1_version = item1.version
     item2_version = item2.version
     ig.add_item(0x1000,
                 'item 1',
                 'item 1', (),
                 np.int32,
                 value=np.int32(6))
     ig.add_item(0x1001, 'item 2', 'item 2', (), np.int32)
     assert_is(item1, ig[0x1000])
     assert_is(item2, ig[0x1001])
     assert_equal(np.int32(6), item1.value)
     assert_greater(item1.version, item1_version)
     assert_equal(np.int32(5), item2.value)
     assert_equal(item2.version, item2_version)
Esempio n. 6
0
 def transmit_item_group(self, item_group, memcpy, allocator):
     if not self.spead:
         raise SkipTest('spead module not importable')
     transport = io.BytesIO()
     sender = self.spead.Transmitter(transport)
     legacy_item_group = self.spead.ItemGroup()
     for item in item_group.values():
         # PySPEAD only supports either 1D variable or fixed-size
         if item.is_variable_size():
             assert len(item.shape) == 1
             shape = -1
         else:
             shape = item.shape
         legacy_item_group.add_item(
                 id=item.id,
                 name=item.name,
                 description=item.description,
                 shape=shape,
                 fmt=self.spead.mkfmt(*item.format) if item.format else self.spead.DEFAULT_FMT,
                 ndarray=np.array(item.value) if not item.format else None)
         legacy_item_group[item.name] = item.value
     sender.send_heap(legacy_item_group.get_heap())
     sender.end()
     thread_pool = spead2.ThreadPool(1)
     receiver = spead2.recv.Stream(thread_pool, bug_compat=spead2.BUG_COMPAT_PYSPEAD_0_5_2)
     receiver.set_memcpy(memcpy)
     if allocator is not None:
         receiver.set_memory_allocator(allocator)
     receiver.add_buffer_reader(transport.getvalue())
     received_item_group = spead2.ItemGroup()
     for heap in receiver:
         received_item_group.update(heap)
     return received_item_group
Esempio n. 7
0
    def __init__(self,
                 fits_interface,
                 number_input_streams,
                 max_age=10,
                 drop_invalid_packages=True):

        self.__max_age = max_age
        self.__fits_interface = fits_interface
        self.__drop_invalid_packages = drop_invalid_packages
        self.invalidPackages = 0  # count invalid packages

        self.plottingQueue = queue.PriorityQueue()

        #Description of heap items
        self.ig = spead2.ItemGroup()
        self.ig.add_item(5632, "timestamp_count", "", (6, ), dtype=">u1")
        self.ig.add_item(5633, "polarization", "", (6, ), dtype=">u1")
        self.ig.add_item(5634, "noise_diode_status", "", (6, ), dtype=">u1")
        self.ig.add_item(5635, "fft_length", "", (6, ), dtype=">u1")
        self.ig.add_item(5636,
                         "number_of_input_samples",
                         "", (6, ),
                         dtype=">u1")
        self.ig.add_item(5637, "sync_time", "", (6, ), dtype=">u1")
        self.ig.add_item(5638, "sampling_rate", "", (6, ), dtype=">u1")
        self.ig.add_item(5639, "naccumulate", "", (6, ), dtype=">u1")

        # Queue for aggregated objects
        self.__packages_in_preparation = {}
        self.__number_of_input_streams = number_input_streams

        # Now is the latest checked package
        self.__now = 0

        self.__bandpass = None
Esempio n. 8
0
    def run(self):
        """Runs the receiver."""
        item_group = spead2.ItemGroup()

        # Iterate over all heaps in the stream.
        self._log.info("Waiting to receive on port {}".format(self._port))
        for heap in self._stream:
            # Extract data from the heap into a dictionary.
            data = {}
            items = item_group.update(heap)
            for item in items.values():
                data[item.name] = item.value

            # Read the header and create the Measurement Set.
            if 'num_channels' in data:
                self._header = {
                    'freq_start_hz': data['freq_start_hz'],
                    'freq_inc_hz': data['freq_inc_hz'],
                    'num_baselines': data['num_baselines'],
                    'num_channels': data['num_channels'],
                    'num_pols': data['num_pols'],
                    'num_stations': data['num_stations'],
                    'phase_centre_ra_deg': data['phase_centre_ra_deg'],
                    'phase_centre_dec_deg': data['phase_centre_dec_deg'],
                    'time_average_sec': data['time_average_sec'],
                    'time_inc_sec': data['time_inc_sec'],
                    'time_start_mjd_utc': data['time_start_mjd_utc']
                }
                self._log.info(
                    "Receiving {} channel(s) starting at {} MHz.".format(
                        data['num_channels'], data['freq_start_hz'] / 1e6))
                if self._measurement_set is None:
                    self._measurement_set = oskar.MeasurementSet.create(
                        self._file_name, data['num_stations'],
                        data['num_channels'], data['num_pols'],
                        data['freq_start_hz'], data['freq_inc_hz'])
                    self._measurement_set.set_phase_centre(
                        math.radians(data['phase_centre_ra_deg']),
                        math.radians(data['phase_centre_dec_deg']))

            # Write visibility data from the SPEAD heap.
            if 'vis' in data:
                vis = data['vis']
                time_inc_sec = self._header['time_inc_sec']
                if data['channel_index'] == 0:
                    self._measurement_set.write_coords(
                        self._header['num_baselines'] * data['time_index'],
                        self._header['num_baselines'], vis['uu'], vis['vv'],
                        vis['ww'], self._header['time_average_sec'],
                        time_inc_sec,
                        self._header['time_start_mjd_utc'] * 86400 +
                        time_inc_sec * (data['time_index'] + 0.5))
                self._measurement_set.write_vis(
                    self._header['num_baselines'] * data['time_index'],
                    data['channel_index'], 1, self._header['num_baselines'],
                    vis['amp'])

        # Stop the stream when there are no more heaps.
        self._stream.stop()
Esempio n. 9
0
 def test_replace_rename(self):
     """When a new item is added with a known ID but a different name, the
     old item must cease to exist."""
     ig = spead2.ItemGroup()
     ig.add_item(0x1000, 'item 1', 'item 1', (), np.int32)
     ig.add_item(0x1000, 'renamed', 'renamed', (), np.int32)
     assert list(ig.ids()) == [0x1000]
     assert list(ig.keys()) == ['renamed']
Esempio n. 10
0
    def transmit_item_groups(self,
                             item_groups,
                             *,
                             memcpy,
                             allocator,
                             new_order='=',
                             round_robin=False):
        """Transmit `item_groups` over the chosen transport.

        Return the item groups received at the other end. Each item group will
        be transmitted over a separate substream (thus, the transport must
        support substreams if `item_groups` has more than one element).
        """
        if self.requires_ipv6:
            self.check_ipv6()
        recv_config = spead2.recv.StreamConfig(memcpy=memcpy)
        if allocator is not None:
            recv_config.memory_allocator = allocator
        receivers = [
            spead2.recv.Stream(spead2.ThreadPool(), recv_config)
            for i in range(len(item_groups))
        ]
        self.prepare_receivers(receivers)
        sender = self.prepare_senders(spead2.ThreadPool(), len(item_groups))
        gens = [
            spead2.send.HeapGenerator(item_group) for item_group in item_groups
        ]
        if len(item_groups) != 1:
            # Use reversed order so that if everything is actually going
            # through the same transport it will get picked up.
            if round_robin:
                sender.send_heaps([
                    spead2.send.HeapReference(gen.get_heap(),
                                              substream_index=i)
                    for i, gen in reversed(list(enumerate(gens)))
                ], spead2.send.GroupMode.ROUND_ROBIN)
                sender.send_heaps([
                    spead2.send.HeapReference(gen.get_end(), substream_index=i)
                    for i, gen in enumerate(gens)
                ], spead2.send.GroupMode.ROUND_ROBIN)
            else:
                for i, gen in reversed(list(enumerate(gens))):
                    sender.send_heap(gen.get_heap(), substream_index=i)
                for i, gen in enumerate(gens):
                    sender.send_heap(gen.get_end(), substream_index=i)
        else:
            # This is a separate code path to give coverage of the case where
            # the substream index is implicit.
            sender.send_heap(gens[0].get_heap())
            sender.send_heap(gens[0].get_end())
        received_item_groups = []
        for receiver in receivers:
            ig = spead2.ItemGroup()
            for heap in receiver:
                ig.update(heap, new_order)
            received_item_groups.append(ig)
        return received_item_groups
Esempio n. 11
0
 def __init__(self, stream):
     self._stream = stream
     self._item_group = spead2.ItemGroup()
     self._item_group.add_item(0x1600, "timestamp", "", (1, ), dtype=">I")
     self._item_group.add_item(0x4101, "feng_idx", "", (1, ), dtype=">I")
     self._item_group.add_item(0x4103, "chan_idx", "", (1, ), dtype=">I")
     self._item_group.add_item(0x4300, "raw_data", "", (1024, ), dtype="b")
     self._counts = {}
     self._integrations = {}
    def run(self):
        """Start the visibility receiver to receive SPEAD heaps.

        Reads SPEAD heaps and writes them to pickle files.
        """
        # ms = {}
        self._log.info('Waiting to receive...')
        for stream in self._streams:
            item_group = spead2.ItemGroup()

            # Loop over all heaps in the stream.
            for heap in stream:
                self._log.info("Received heap {}".format(heap.cnt))

                # Extract data from the heap into a dictionary.
                data = {}
                items = item_group.update(heap)
                for item in items.values():
                    data[item.name] = item.value

                # Skip if the heap does not contain visibilities.
                if 'complex_visibility' not in data:
                    continue

                # Get data dimensions.
                time_index = heap.cnt - 2  # Extra -1: first heap is empty.
                start_channel = data['channel_baseline_id'][0][0]
                num_channels = data['channel_baseline_count'][0][0]
                max_per_file = self._config['output']['max_times_per_file']

                # Find out which file this heap goes in.
                # Get the time and channel range for the file.
                file_start_time = (time_index // max_per_file) * max_per_file
                file_end_time = file_start_time + max_per_file - 1

                # Construct filename.
                base_name = 'vis_T%04d-%04d_C%04d-%04d' % (
                    file_start_time, file_end_time, start_channel,
                    start_channel + num_channels - 1)
                data['time_index'] = time_index

                # Write visibility data.
                with open(base_name + '.p', 'ab') as f:
                    pickle.dump(data, f, protocol=2)

                # Write to Measurement Set if required.
                # ms_name = base_name + '.ms'
                # if ms_name not in ms:
                #     if len(ms) > 5:
                #         ms.popitem()  # Don't open too many files at once.
                #     ms[ms_name] = _ms_create(
                #         ms_name, config, start_channel, num_channels) \
                #         if not os.path.isdir(ms_name) else _ms_open(ms_name)
                # _ms_write(ms[ms_name], file_start_time, start_channel, data)

            # Stop the stream when there are no more heaps.
            stream.stop()
Esempio n. 13
0
 async def transmit_item_groups_async(self,
                                      item_groups,
                                      *,
                                      memcpy,
                                      allocator,
                                      new_order='=',
                                      group_mode=None):
     if self.requires_ipv6:
         self.check_ipv6()
     recv_config = spead2.recv.StreamConfig(memcpy=memcpy)
     if allocator is not None:
         recv_config.memory_allocator = allocator
     receivers = [
         spead2.recv.asyncio.Stream(spead2.ThreadPool(), recv_config)
         for i in range(len(item_groups))
     ]
     await self.prepare_receivers(receivers)
     sender = await self.prepare_senders(spead2.ThreadPool(),
                                         len(item_groups))
     gens = [
         spead2.send.HeapGenerator(item_group) for item_group in item_groups
     ]
     if len(item_groups) != 1:
         # Use reversed order so that if everything is actually going
         # through the same transport it will get picked up.
         if group_mode is not None:
             await sender.async_send_heaps([
                 spead2.send.HeapReference(gen.get_heap(),
                                           substream_index=i)
                 for i, gen in reversed(list(enumerate(gens)))
             ], group_mode)
             # Use a HeapReferenceList to test it
             hrl = spead2.send.HeapReferenceList([
                 spead2.send.HeapReference(gen.get_end(), substream_index=i)
                 for i, gen in enumerate(gens)
             ])
             await sender.async_send_heaps(hrl, group_mode)
         else:
             for i, gen in reversed(list(enumerate(gens))):
                 await sender.async_send_heap(gen.get_heap(),
                                              substream_index=i)
             for i, gen in enumerate(gens):
                 await sender.async_send_heap(gen.get_end(),
                                              substream_index=i)
     else:
         # This is a separate code path to give coverage of the case where
         # the substream index is implicit.
         await sender.async_send_heap(gens[0].get_heap())
         await sender.async_send_heap(gens[0].get_end())
     await sender.async_flush()
     received_item_groups = []
     for receiver in receivers:
         ig = spead2.ItemGroup()
         for heap in receiver:
             ig.update(heap, new_order)
         received_item_groups.append(ig)
     return received_item_groups
Esempio n. 14
0
 def __init__(self, stream):
     self._stream = stream
     self._item_group = spead2.ItemGroup()
     self._item_group.add_item(5632, "timestamp", "", (1, ), dtype=">I")
     self._item_group.add_item(21845, "beam_idx", "", (1, ), dtype=">I")
     self._item_group.add_item(16643, "chan_idx", "", (1, ), dtype=">I")
     self._item_group.add_item(21846, "raw_data", "", (8192, ), dtype="b")
     self._counts = {}
     self._integrations = {}
Esempio n. 15
0
 def helper(header):
     packet = self.flavour.make_packet_heap(1, [
         self.flavour.make_numpy_descriptor_raw(0x1234, 'name',
                                                'description', header)
     ])
     heaps = self.data_to_heaps(packet)
     assert_equal(1, len(heaps))
     ig = spead2.ItemGroup()
     assert_raises(ValueError, ig.update, heaps[0])
Esempio n. 16
0
    def run(self):
        """Run the receiver. Each heap is put in a dictionary and sent to our output"""
        item_group = spead2.ItemGroup()

        for heap in self._stream:
            items = item_group.update(heap)
            data = {item.name: item.value for item in items.values()}
            self.outputs[0].write(pickle.dumps(data))

        self._stream.stop()
Esempio n. 17
0
 def test_nonascii_description(self):
     """Receiving non-ASCII characters in an item description must raise
     :py:exc:`UnicodeDecodeError`."""
     packet = self.flavour.make_packet_heap(1, [
         self.flavour.make_plain_descriptor(0x1234, 'name', b'\xEF',
                                            [('c', 8)], [None])
     ])
     heaps = self.data_to_heaps(packet)
     ig = spead2.ItemGroup()
     assert_raises(UnicodeDecodeError, ig.update, heaps[0])
Esempio n. 18
0
 def helper(header):
     packet = self.flavour.make_packet_heap(1, [
         self.flavour.make_numpy_descriptor_raw(0x1234, 'name',
                                                'description', header)
     ])
     heaps = self.data_to_heaps(packet)
     assert len(heaps) == 1
     ig = spead2.ItemGroup()
     with pytest.raises(ValueError):
         ig.update(heaps[0])
Esempio n. 19
0
 def test_replace_clobber_name(self):
     """When a new item is added that collides with an existing name, that
     other item is deleted."""
     ig = spead2.ItemGroup()
     ig.add_item(0x1000, 'item 1', 'item 1', (), np.int32)
     ig.add_item(0x1001, 'item 1', 'clobbered', (), np.int32)
     assert list(ig.ids()) == [0x1001]
     assert list(ig.keys()) == ['item 1']
     assert ig['item 1'].description == 'clobbered'
     assert ig['item 1'] is ig[0x1001]
Esempio n. 20
0
 def test_size_mismatch(self):
     packet = self.flavour.make_packet_heap(1, [
         self.flavour.make_plain_descriptor(
             0x1234, 'bad', 'an item with insufficient data', [('u', 32)],
             (5, 5)),
         Item(0x1234, b'\0' * 99)
     ])
     heaps = self.data_to_heaps(packet)
     assert_equal(1, len(heaps))
     ig = spead2.ItemGroup()
     assert_raises(ValueError, ig.update, heaps[0])
Esempio n. 21
0
 def test_nonascii_name(self):
     """Receiving non-ASCII characters in an item name must raise
     :py:exc:`UnicodeDecodeError`."""
     packet = self.flavour.make_packet_heap(1, [
         self.flavour.make_plain_descriptor(
             0x1234, b'\xEF', 'a byte string', [('c', 8)], [None])
     ])
     heaps = self.data_to_heaps(packet)
     ig = spead2.ItemGroup()
     with pytest.raises(UnicodeDecodeError):
         ig.update(heaps[0])
Esempio n. 22
0
 def test_replace_change_shape(self):
     """When a descriptor changes the shape, the old item must be discarded
     and ``None`` used in its place. The version must be bumped."""
     ig = spead2.ItemGroup()
     ig.add_item(0x1000, 'item 1', 'item 1', (), np.int32, value=np.int32(4))
     item1 = ig[0x1000]
     item1_version = item1.version
     ig.add_item(0x1000, 'item 1', 'bigger', (3, 4), np.int32)
     assert item1 is not ig[0x1000]
     assert ig[0x1000].value is None
     assert ig[0x1000].version > item1_version
def run_stream(stream, log):
    try:
        item_group = spead2.ItemGroup()
        num_heaps = 0
        while True:
            try:
                heap = yield from (stream.get())
                print("Received heap {} on stream".format(heap.cnt))
                desp = False
                write = True
                try:
                    if desp:
                        for raw_descriptor in heap.get_descriptors():
                            descriptor = spead2.Descriptor.from_raw(
                                raw_descriptor, heap.flavour)
                            print('''\
    Descriptor for {0.name} ({0.id:#x})
      description: {0.description}
      format:      {0.format}
      dtype:       {0.dtype}
      shape:       {0.shape}'''.format(descriptor))
                    changed = item_group.update(heap)
                    data = {}

                    for (key, item) in changed.items():
                        if write:
                            print(key, '=', item.value)
                            data[item.name] = item.value

                        # Get data dimensions.
                        time_index = heap.cnt - 2  # Extra -1 because first heap is empty.

                        # max_times_per_file = config['output']['max_times_per_file']

                        # Construct filename.
                        base_name = 'vis_T'
                        data['time_index'] = time_index

                        # Write visibility data.
                        _pickle_write('/home/sdp/output/' + base_name + '.p',
                                      data)

                except ValueError as e:
                    print("Error raised processing heap: {}".format(e))

            except (spead2.Stopped, asyncio.CancelledError):
                print("Shutting down stream after {} heaps".format(num_heaps))
                stats = stream.stats
                for key in dir(stats):
                    if not key.startswith('_'):
                        print("{}: {}".format(key, getattr(stats, key)))
                break
    finally:
        stream.stop()
Esempio n. 24
0
 def test_allocate_id(self):
     """Automatic allocation of IDs must skip over already-allocated IDs"""
     ig = spead2.ItemGroup()
     ig.add_item(0x1000, 'item 1', 'item 1', (), np.int32)
     ig.add_item(0x1003, 'item 2', 'item 2', (), np.int32)
     ig.add_item(None, 'item 3', 'item 3', (), np.int32)
     ig.add_item(None, 'item 4', 'item 4', (), np.int32)
     ig.add_item(None, 'item 5', 'item 5', (), np.int32)
     assert ig[0x1001].name == 'item 3'
     assert ig[0x1002].name == 'item 4'
     assert ig[0x1004].name == 'item 5'
Esempio n. 25
0
 def test_replace_clobber_both(self):
     """Adding a new item that collides with one item on name and another on
     ID causes both to be dropped."""
     ig = spead2.ItemGroup()
     ig.add_item(0x1000, 'item 1', 'item 1', (), np.int32)
     ig.add_item(0x1001, 'item 2', 'item 2', (), np.int32)
     ig.add_item(0x1000, 'item 2', 'clobber', (), np.int32)
     assert list(ig.ids()) == [0x1000]
     assert list(ig.keys()) == ['item 2']
     assert ig[0x1000] is ig['item 2']
     assert ig[0x1000].description == 'clobber'
Esempio n. 26
0
 def data_to_ig(self, data):
     """Take some data and pass it through the receiver to obtain a single heap,
     from which the items are extracted.
     """
     heaps = self.data_to_heaps(data)
     assert_equal(1, len(heaps))
     ig = spead2.ItemGroup()
     ig.update(heaps[0])
     for name, item in ig.items():
         assert_equal(name, item.name)
     return ig
Esempio n. 27
0
    def run(self):
        """Runs the receiver."""
        self._log.info("Initialising...")
        self.reset_cache()
        self.check_init()
        item_group = spead2.ItemGroup()

        # Iterate over all heaps in the stream.
        self._log.info("Waiting to receive on port {}".format(self._port))
        for heap in self._stream:
            # Extract data from the heap into a dictionary.
            data = {}
            items = item_group.update(heap)
            for item in items.values():
                data[item.name] = item.value

            # Read the header and set imager visibility meta-data.
            if 'num_channels' in data:
                self._header = {
                    'freq_start_hz': data['freq_start_hz'],
                    'freq_inc_hz': data['freq_inc_hz'],
                    'num_baselines': data['num_baselines'],
                    'num_channels': data['num_channels'],
                    'num_pols': data['num_pols'],
                    'num_stations': data['num_stations'],
                    'phase_centre_ra_deg': data['phase_centre_ra_deg'],
                    'phase_centre_dec_deg': data['phase_centre_dec_deg'],
                    'time_average_sec': data['time_average_sec'],
                    'time_inc_sec': data['time_inc_sec'],
                    'time_start_mjd_utc': data['time_start_mjd_utc']
                }
                self._log.info(
                    "Receiving {} channel(s) starting at {} MHz.".format(
                        data['num_channels'], data['freq_start_hz'] / 1e6))
                self.set_vis_frequency(data['freq_start_hz'],
                                       data['freq_inc_hz'],
                                       data['num_channels'])
                self.set_vis_phase_centre(data['phase_centre_ra_deg'],
                                          data['phase_centre_dec_deg'])

            # Update the imager with visibility data from the SPEAD heap.
            if 'vis' in data:
                vis = data['vis']
                self.update(vis['uu'],
                            vis['vv'],
                            vis['ww'],
                            vis['amp'],
                            start_channel=data['channel_index'],
                            end_channel=data['channel_index'],
                            num_pols=self._header['num_pols'])

        # Stop the stream when there are no more heaps, and finalise the image.
        self._stream.stop()
        self.finalise()
Esempio n. 28
0
 def test_nonascii_value(self):
     """Receiving non-ASCII characters in a c8 string must raise
     :py:exc:`UnicodeDecodeError`."""
     packet = self.flavour.make_packet_heap(1, [
         self.flavour.make_plain_descriptor(
             0x1234, 'test_string', 'a byte string', [('c', 8)], [None]),
         Item(0x1234, '\u0200'.encode())
     ])
     heaps = self.data_to_heaps(packet)
     ig = spead2.ItemGroup()
     assert_raises(UnicodeDecodeError, ig.update, heaps[0])
Esempio n. 29
0
 def test_substreams(self):
     item_groups = []
     for i in range(4):
         ig = spead2.ItemGroup()
         ig.add_item(id=0x2345,
                     name='int',
                     description='an integer',
                     shape=(),
                     format=[('i', 32)],
                     value=i)
         item_groups.append(ig)
     self._test_item_groups(item_groups)
Esempio n. 30
0
 def test_numpy_zero_size(self):
     """numpy dtypes can represent zero bytes."""
     dtype = np.dtype(np.str_)
     packet = self.flavour.make_packet_heap(1, [
         self.flavour.make_numpy_descriptor(
             0x1234, 'empty', 'an item with zero-byte dtype', dtype, (5, )),
         Item(0x1234, b'')
     ])
     heaps = self.data_to_heaps(packet)
     assert_equal(1, len(heaps))
     ig = spead2.ItemGroup()
     assert_raises(ValueError, ig.update, heaps[0])