Esempio n. 1
0
 def setUp(self):
     self.telstate = katsdptelstate.TelescopeState()
     self.telstate.clear()
     self.n_servers = 4
     self.executor = concurrent.futures.ThreadPoolExecutor(self.n_servers)
     self.server_chans = 1024
     self.bchan = 1100
     self.echan = 1300
     self._seq = 0
     self.parameters = [
         {
             'product_names': {
                 'G': 'product_G',
                 'K': 'product_K',
                 'KCROSS': 'product_KCROSS',
                 'B': 'product_B{}'.format(i)
             },
             'channel_freqs':
             np.arange(self.server_chans)  # only length matters
         } for i in range(self.n_servers)
     ]
     self.solution_stores = {
         'K': CalSolutionStoreLatest('K'),
         'B': CalSolutionStoreLatest('B'),
         'G': CalSolutionStore('G')
     }
Esempio n. 2
0
 def setup_telstate(cls, namespace: str) -> katsdptelstate.TelescopeState:
     telstate = katsdptelstate.TelescopeState().view(namespace)
     n_ants = 3
     telstate['n_chans'] = 4096
     telstate['n_chans_per_substream'] = 1024
     telstate['n_bls'] = n_ants * (n_ants + 1) * 2
     return telstate
Esempio n. 3
0
 def __init__(self, cbid_stream_rdb_file):
     self._ts = katsdptelstate.TelescopeState()
     self._ts.load_from_file(cbid_stream_rdb_file)
     metfilename = '{}.met'.format(self._ts['capture_block_id'] + '_' +
                                   self._ts['stream_name'])
     super(MeerKATFlagProductMetExtractor, self).__init__(metfilename)
     self.product_type = 'MeerKATFlagProduct'
Esempio n. 4
0
 def get_gains(self):
     val = yield self.get_telstate()
     telstate_address = "{}:{}".format(*eval(val))
     last_calibration = yield self.get_last_calibration_timestamp()
     telstate = katsdptelstate.TelescopeState(telstate_address)
     corrections = get_phaseup_corrections(telstate, last_calibration, 1.0,
                                           False)
     raise Return(corrections)
Esempio n. 5
0
    def setup(self):
        self.tempdir = tempfile.TemporaryDirectory()
        self.store = NpyFileChunkStore(self.tempdir.name)
        self.shape = (12, 96, len(ANTENNAS) * (len(ANTENNAS) + 1) * 2)
        self.telstate = katsdptelstate.TelescopeState()
        self._populate_telstate(self.telstate)

        self._open_patcher = mock.patch('katdal.open', autospec=True, side_effect=self._katdal_open)
        self._open_patcher.start()
Esempio n. 6
0
    def from_url(cls, url, chunk_store='auto', **kwargs):
        """Construct TelstateDataSource from URL (RDB file / REDIS server).

        Parameters
        ----------
        url : string
            URL serving as entry point to dataset (typically RDB file or REDIS)
        chunk_store : :class:`katdal.ChunkStore` object, optional
            Chunk store for visibility data (obtained automatically by default,
            or set to None for metadata-only dataset)
        kwargs : dict, optional
            Extra keyword arguments passed to telstate view and chunk store init
        """
        url_parts = urlparse.urlparse(url, scheme='file')
        # Merge key-value pairs from URL query with keyword arguments
        # of function (the latter takes precedence)
        url_kwargs = dict(urlparse.parse_qsl(url_parts.query))
        url_kwargs.update(kwargs)
        kwargs = url_kwargs
        # Extract Redis database number if provided
        db = int(kwargs.pop('db', '0'))
        if url_parts.scheme == 'file':
            # RDB dump file
            telstate = katsdptelstate.TelescopeState()
            try:
                telstate.load_from_file(url_parts.path)
            except OSError as err:
                raise DataSourceNotFound(str(err))
        elif url_parts.scheme == 'redis':
            # Redis server
            try:
                telstate = katsdptelstate.TelescopeState(url_parts.netloc, db)
            except katsdptelstate.ConnectionError as e:
                raise DataSourceNotFound(str(e))
        telstate = view_capture_stream(telstate, **kwargs)
        if chunk_store == 'auto':
            chunk_store = _infer_chunk_store(url_parts, telstate, **kwargs)
        return cls(telstate, chunk_store, source_name=url_parts.geturl())
Esempio n. 7
0
def telstate_detection(filename):
    """File is an .rdb files. Any .rdb files passed are assumed to be
    MeerKAT telescope products, and should be either an 'sdp.vis' or
    'sdp.flags' stream.

    Parameters
    ----------
    filename: string : full path to the product

    Returns
    -------
    MetExtractor: class : A metadata extractor class to extract metadata from the rdb file.
    """
    ts = katsdptelstate.TelescopeState()
    ts.load_from_file(filename)
    stream_name = ts['stream_name']
    v = ts.view(stream_name)
    if v['stream_type'] == 'sdp.vis':
        return MeerKATTelescopeProductMetExtractor
    elif v['stream_type'] == 'sdp.flags':
        return MeerKATFlagProductMetExtractor
    raise ProductTypeDetectionError('%s not a recognisable stream type')
Esempio n. 8
0
 def test_rdb_support(self):
     telstate = katsdptelstate.TelescopeState()
     view, cbid, sn, _, _ = make_fake_data_source(telstate, self.store,
                                                  (5, 16, 40), PREFIX)
     telstate['capture_block_id'] = cbid
     telstate['stream_name'] = sn
     # Save telstate to temp RDB file since RDBWriter needs a filename and not a handle
     rdb_filename = f'{cbid}_{sn}.rdb'
     temp_filename = os.path.join(self.tempdir, rdb_filename)
     with RDBWriter(temp_filename) as rdbw:
         rdbw.save(telstate)
     # Read the file back in and upload it to S3
     with open(temp_filename, mode='rb') as rdb_file:
         rdb_data = rdb_file.read()
     rdb_url = urllib.parse.urljoin(self.store_url,
                                    self.store.join(cbid, rdb_filename))
     self.store.create_array(cbid)
     self.store.complete_request('PUT', rdb_url, data=rdb_data)
     # Check that data source can be constructed from URL (with auto chunk store)
     source_from_url = TelstateDataSource.from_url(rdb_url,
                                                   **self.store_kwargs)
     source_direct = TelstateDataSource(view, cbid, sn, self.store)
     assert_telstate_data_source_equal(source_from_url, source_direct)
Esempio n. 9
0
def test_telescope_state_fetcher_override(telstate_fetcher,
                                          mock_responses) -> None:
    telstate2 = katsdptelstate.TelescopeState()
    telstate2['another_key'] = 'rfi_mask_ranges.h5'
    model = telstate_fetcher.get('another_key', DummyModel, telstate=telstate2)
    assert len(model.ranges) == 2
Esempio n. 10
0
 def setup(self):
     self.tempdir = tempfile.mkdtemp()
     self.store = NpyFileChunkStore(self.tempdir)
     self.telstate = katsdptelstate.TelescopeState()
     self.cbid = 'cb'
Esempio n. 11
0
    def from_url(cls, url, chunk_store='auto', **kwargs):
        """Construct TelstateDataSource from URL (RDB file / REDIS server).

        Parameters
        ----------
        url : string
            URL serving as entry point to dataset (typically RDB file or REDIS)
        chunk_store : :class:`katdal.ChunkStore` object, optional
            Chunk store for visibility data (obtained automatically by default,
            or set to None for metadata-only dataset)
        kwargs : dict, optional
            Extra keyword arguments passed to init, telstate view, chunk store init
        """
        url_parts = urllib.parse.urlparse(url, scheme='file')
        # Merge key-value pairs from URL query with keyword arguments
        # of function (the latter takes precedence)
        url_kwargs = dict(urllib.parse.parse_qsl(url_parts.query))
        url_kwargs.update(kwargs)
        kwargs = url_kwargs
        # Extract Redis database number if provided
        db = int(kwargs.pop('db', '0'))
        if url_parts.scheme == 'file':
            # RDB dump file
            telstate = katsdptelstate.TelescopeState()
            try:
                telstate.load_from_file(url_parts.path)
            except (OSError, katsdptelstate.RdbParseError) as e:
                raise DataSourceNotFound(str(e)) from e
        elif url_parts.scheme == 'redis':
            # Redis server
            try:
                telstate = katsdptelstate.TelescopeState(url_parts.netloc, db)
            except katsdptelstate.ConnectionError as e:
                raise DataSourceNotFound(str(e)) from e
        elif url_parts.scheme in {'http', 'https'}:
            # Treat URL prefix as an S3 object store (with auth info in kwargs)
            store_url = urllib.parse.urljoin(url, '..')
            # Strip off parameters, query strings and fragments to get basic URL
            rdb_url = urllib.parse.urlunparse(
                (url_parts.scheme, url_parts.netloc, url_parts.path, '', '',
                 ''))
            telstate = katsdptelstate.TelescopeState()
            try:
                rdb_store = S3ChunkStore(store_url, **kwargs)
                with rdb_store.request('GET', rdb_url) as response:
                    telstate.load_from_file(io.BytesIO(response.content))
            except ChunkStoreError as e:
                raise DataSourceNotFound(str(e)) from e
            # If the RDB file is opened via archive URL, use that URL and
            # corresponding S3 credentials or token to access the chunk store
            if chunk_store == 'auto' and not kwargs.get('s3_endpoint_url'):
                chunk_store = rdb_store
        else:
            raise DataSourceNotFound(
                f"Unknown URL scheme '{url_parts.scheme}' - "
                'telstate expects file, redis, or http(s)')
        telstate, capture_block_id, stream_name = view_l0_capture_stream(
            telstate, **kwargs)
        if chunk_store == 'auto':
            chunk_store = infer_chunk_store(url_parts, telstate, **kwargs)
        # Remove these from kwargs since they have already been extracted by view_l0_capture_stream
        kwargs.pop('capture_block_id', None)
        kwargs.pop('stream_name', None)
        return cls(telstate,
                   capture_block_id,
                   stream_name,
                   chunk_store,
                   source_name=url_parts.geturl(),
                   **kwargs)
Esempio n. 12
0
            local_redis = subprocess.Popen(shlex.split(launch_cmd),
                                           stderr=subprocess.PIPE,
                                           stdout=subprocess.PIPE)
        time.sleep(3)
        if local_redis.poll():
            logger.error(
                "Failed to launch local Redis instance, terminating. %s",
                local_redis.communicate())
            sys.exit()
        logger.info("Local Redis instance launched successfully")
        redis_host = 'localhost'
        redis_endpoint = '{}:{}'.format(redis_host, args.redis_port)
    else:
        redis_host = args.redis
        redis_endpoint = '{}:{}'.format(redis_host, args.redis_port)
    ts = katsdptelstate.TelescopeState(redis_endpoint)
    logger.info("Connected to Redis on %s. DB has %d existing keys",
                redis_endpoint, len(ts.keys()))

    r_str = ""
    for attr in h5_file['TelescopeState'].attrs:
        r_str += redis_gen_proto("SET", attr,
                                 h5_file['TelescopeState'].attrs[attr])
    if redis_endpoint:
        redis_bulk_str(r_str, redis_host, args.redis_port)

    if not args.obj_only:
        for d_count, dset in enumerate(h5_file['TelescopeState'].keys()):
            st = time.time()
            r_str = ""
            d_val = h5_file['TelescopeState'][dset].value
Esempio n. 13
0
def main():
    args = parse_args()
    try:
        h5_file = h5py.File(args.file)
    except Exception as e:
        logger.error("Failed to open specified HDF5 file. {}".format(e))
        sys.exit()
    try:
        data = h5_file['Data/correlator_data']
    except KeyError:
        logger.error("This does not appear to be a valid MeerKAT HDF5 file")
        sys.exit()

    if args.redis_only:
        logger.warning("Building Redis DB only - no data will be written...")

    if args.redis is None:
        logger.info("Launching local Redis instance")
        try:
            launch_cmd = "/usr/bin/redis-server --port {}".format(
                args.redis_port)
            local_redis = subprocess.Popen(shlex.split(launch_cmd),
                                           stderr=subprocess.PIPE,
                                           stdout=subprocess.PIPE)
        except OSError:
            launch_cmd = "/usr/local/bin/redis-server --port {}".format(
                args.redis_port)
            local_redis = subprocess.Popen(shlex.split(launch_cmd),
                                           stderr=subprocess.PIPE,
                                           stdout=subprocess.PIPE)
        time.sleep(3)
        if local_redis.poll():
            logger.error(
                "Failed to launch local Redis instance, terminating. {}".
                format(local_redis.communicate()))
            sys.exit()
        logger.info("Local Redis instance launched successfully")
        redis_host = 'localhost'
    else:
        redis_host = args.redis
    ts = katsdptelstate.TelescopeState(
        endpoint='{}:{}'.format(redis_host, args.redis_port))
    logger.info("Connected to Redis on {}:{}. DB has {} existing keys".format(
        redis_host, args.redis_port, len(ts.keys())))

    im_count = 0
    r_str = ""
    for attr in h5_file['TelescopeState'].attrs:
        r_str += gen_redis_proto("SET", attr,
                                 h5_file['TelescopeState'].attrs[attr])
        im_count += 1
    redis_bulk_str(r_str, redis_host, args.redis_port)

    d_count = 0
    for dset in h5_file['TelescopeState'].keys():
        i_count = 0
        st = time.time()
        r_str = ""
        d_val = h5_file['TelescopeState'][dset].value
        # much quicker to read it first and then iterate
        for (timestamp, pval) in d_val:
            packed_ts = struct.pack('>d', float(timestamp))
            r_str += gen_redis_proto("ZADD", dset, "0", packed_ts + pval)
            i_count += 1
        bss = time.time()
        redis_bulk_str(r_str, redis_host, args.redis_port)
        logger.info(
            "Added {} items in {}s to key {}. Bulk insert time: {}".format(
                i_count,
                time.time() - st, dset, (time.time() - bss)))
        d_count += 1
    logger.info("Added {} ranged keys to TelescopeState".format(d_count))

    if args.redis_only:
        if args.redis is None:
            logger.warning(
                "Terminating locally launched redis instance (also saves telstate to local dump.rdb)"
            )
            try:
                cli_cmd = "/usr/bin/redis-cli -p {} SHUTDOWN SAVE".format(
                    args.redis_port)
                subprocess.call(shlex.split(cli_cmd))
            except OSError:
                cli_cmd = "/usr/local/bin/redis-cli -p {} SHUTDOWN SAVE".format(
                    args.redis_port)
                subprocess.call(shlex.split(cli_cmd))
            local_redis.terminate()
        sys.exit(0)
    cluster = rados.Rados(conffile=args.ceph_conf)
    cluster.connect()
    available_pools = cluster.list_pools()
    if args.pool not in available_pools:
        logger.error(
            "Specified pool {} not available in this cluster ({})".format(
                args.pool, available_pools))
        sys.exit()
    ioctx = cluster.open_ioctx(args.pool)
    pool_stats = ioctx.get_stats()
    logger.info(
        "Connected to pool {}. Currently holds {} objects totalling {} GB".
        format(args.pool, pool_stats['num_objects'],
               pool_stats['num_bytes'] / 2**30))

    (freq_chunk, obj_size) = get_freq_chunk(data.shape, args.obj_size)
    if args.ts_limit > 0: ts_limit = args.ts_limit
    else: ts_limit = data.shape[0]
    obj_count_per_ts = data.shape[1] // freq_chunk
    obj_count = ts_limit * obj_count_per_ts

    ts.add("obj_basename", args.basename)
    ts.add("obj_chunk_size", freq_chunk)
    ts.add("obj_size", obj_size)
    ts.add("obj_count", obj_count)
    ts.add("obj_pool", args.pool)
    f = open(args.ceph_conf, "r")
    ts.add("obj_ceph_conf", f.readlines())
    f.close()
    logger.info("Inserted obj schema metadata into telstate")

    logger.info(
        "Processing {} timestamps into {} objects of size {} with basename {}".
        format(ts_limit, obj_count, obj_size, args.basename))
    ts_index = 0
    for ts_slice in data:
        st = time.time()
        (bytes_written, objs_written) = write_ts(ioctx, ts_index, ts_slice,
                                                 args.basename, freq_chunk)
        et = time.time() - st
        if objs_written == obj_count_per_ts and bytes_written == (
                obj_size * obj_count_per_ts):
            logger.info(
                "Stored ts index {} in {}s ({} objects totalling {}MBps)".
                format(ts_index, et, obj_count_per_ts,
                       obj_count_per_ts * obj_size / (1024 * 1024) / et))
        else:
            logger.error(
                "Failed to full write ts index {}. Wrote {}/{} objects and {}/{} bytes"
                .format(ts_index, objs_written, obj_count_per_ts,
                        bytes_written, obj_size * obj_count_per_ts))
        ts_index += 1
        if ts_index >= ts_limit:
            logger.info("Reached specified ts limit ({}).".format(ts_limit))
            break
    logger.info("Staging complete...")
    if args.redis is None:
        raw_input(
            "You have started a local Redis server. Hit enter to kill this and cleanup."
        )
        local_redis.terminate()
Esempio n. 14
0
 def setUp(self) -> None:
     self.tmpdir = tempfile.mkdtemp()
     self.addCleanup(shutil.rmtree, self.tmpdir)
     self.port = 7148
     self.n_channels = 1024
     self.spectra_per_heap = 256
     # No data actually travels through these multicast groups;
     # it gets mocked out to use local TCP sockets instead.
     self.endpoints = endpoint.endpoint_list_parser(self.port)(
         '239.102.2.0+7:{}'.format(self.port))
     self.tcp_acceptors = [
         _make_listen_socket() for endpoint in self.endpoints
     ]
     self.tcp_endpoints = [
         endpoint.Endpoint(*sock.getsockname())
         for sock in self.tcp_acceptors
     ]
     self.n_bengs = 16
     self.ticks_between_spectra = 8192
     self.adc_sample_rate = 1712000000.0
     self.heaps_per_stats = 6
     self.channels_per_heap = self.n_channels // self.n_bengs
     self.channels_per_endpoint = self.n_channels // len(self.endpoints)
     attrs = {
         'i0_tied_array_channelised_voltage_0x_n_chans':
         self.n_channels,
         'i0_tied_array_channelised_voltage_0x_n_chans_per_substream':
         self.channels_per_heap,
         'i0_tied_array_channelised_voltage_0x_spectra_per_heap':
         self.spectra_per_heap,
         'i0_tied_array_channelised_voltage_0x_src_streams':
         ['i0_antenna_channelised_voltage'],
         'i0_tied_array_channelised_voltage_0x_bandwidth':
         self.adc_sample_rate / 2,
         'i0_tied_array_channelised_voltage_0x_center_freq':
         3 * self.adc_sample_rate / 2,
         'i0_antenna_channelised_voltage_ticks_between_spectra':
         self.ticks_between_spectra,
         'i0_antenna_channelised_voltage_instrument_dev_name':
         'i0',
         'i0_sync_time':
         111111111.0,
         'i0_scale_factor_timestamp':
         self.adc_sample_rate
     }
     telstate = katsdptelstate.TelescopeState()
     for key, value in attrs.items():
         telstate[key] = value
     stats_int_time = (self.heaps_per_stats * self.ticks_between_spectra *
                       self.spectra_per_heap / self.adc_sample_rate)
     self.args = bf_ingest_server.parse_args([
         '--cbf-spead=' + endpoint.endpoints_to_str(self.endpoints),
         '--channels=128:768', '--file-base=' + self.tmpdir,
         '--stream-name=i0_tied_array_channelised_voltage_0x',
         '--interface=lo', '--stats=239.102.3.0:7149',
         '--stats-int-time={}'.format(stats_int_time),
         '--stats-interface=lo'
     ], argparse.Namespace(telstate=telstate))
     self.loop = asyncio.get_event_loop()
     self.patch_add_endpoint()
     self.patch_create_session_config()
     self.patch_session_factory()
Esempio n. 15
0
def telstate() -> katsdptelstate.TelescopeState:
    """Telescope state with some attributes populated."""
    telstate = katsdptelstate.TelescopeState()
    telstate['sdp_model_base_url'] = test_utils.BASE_URL
    telstate['model_key'] = 'rfi_mask_ranges.h5'
    return telstate