Beispiel #1
0
async def test_write_rdb_s3_full(telstate, s3_args, tmp_path_factory, mocker):
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    path = tmp_path_factory.mktemp('dump') / 'dump.rdb.uploading'
    ctx = mocker.MagicMock()
    mocker.patch('katsdpmetawriter.timer', side_effect=[100.0, 105.0])
    rate_bytes, key_errors = await katsdpmetawriter._write_rdb(
        ctx,
        telstate,
        str(path),
        CBID,
        STREAM_NAME,
        botocore_dict=botocore_dict,
        key_name='dump.rdb',
        lite=False)
    assert rate_bytes == path.stat().st_size / 5.0
    assert key_errors == 0, 'Should never be key errors with a full dump'
    ctx.inform.assert_called()

    session = botocore.session.Session()
    s3_conn = session.create_client('s3', **botocore_dict)
    assert s3_conn.get_object(
        Bucket=CBID, Key='dump.rdb')['Body'].read() == path.read_bytes()

    telstate2 = katsdptelstate.aio.TelescopeState()
    _load_from_file(telstate2, path)
    for key in await telstate.keys():
        assert await key_info(telstate, key) == await key_info(telstate2, key)
    assert await telstate2.get('capture_block_id') == CBID
    assert await telstate2.get('stream_name') == STREAM_NAME
    assert len(await telstate2.keys()) == len(await telstate.keys()) + 2
Beispiel #2
0
async def test_write_rdb_lost_connection(telstate, tmp_path_factory, mocker,
                                         caplog):
    @asynccontextmanager
    async def failed_get_s3_connection(*args, **kwargs):
        yield None

    s3_args = argparse.Namespace(access_key=ADMIN_USER.access_key,
                                 secret_key=ADMIN_USER.secret_key,
                                 s3_host='test.invalid',
                                 s3_port=0)
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    mocker.patch('katsdpmetawriter.get_s3_connection',
                 failed_get_s3_connection)

    path = tmp_path_factory.mktemp('dump') / 'dump.rdb.uploading'
    ctx = mocker.MagicMock()
    with caplog.at_level(logging.ERROR):
        rate_bytes, key_errors = await katsdpmetawriter._write_rdb(
            ctx,
            telstate,
            str(path),
            CBID,
            STREAM_NAME,
            botocore_dict=botocore_dict,
            key_name='dump.rdb',
            lite=False)
    assert rate_bytes is None
    assert 'Unable to store RDB dump' in caplog.text
Beispiel #3
0
async def test_get_s3_connection_bad_host(s3_args, caplog):
    s3_args.s3_host = 'test.invalid'
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    with caplog.at_level(logging.ERROR):
        async with katsdpmetawriter.get_s3_connection(botocore_dict) as conn:
            assert conn is None
    assert 'Please check network and host address' in caplog.text
Beispiel #4
0
async def test_get_s3_connection_bad_secret_key(s3_args, caplog):
    s3_args.secret_key = 'wrong'
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    with caplog.at_level(logging.ERROR):
        async with katsdpmetawriter.get_s3_connection(botocore_dict) as conn:
            assert conn is None
    assert 'secret key is not valid' in caplog.text
Beispiel #5
0
async def test_get_s3_connection_no_permissions(s3_args, caplog):
    s3_args.access_key = NOBODY_USER.access_key
    s3_args.secret_key = NOBODY_USER.secret_key
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    with caplog.at_level(logging.ERROR):
        async with katsdpmetawriter.get_s3_connection(botocore_dict) as conn:
            assert conn is None
    assert 'has no permissions' in caplog.text
Beispiel #6
0
async def test_get_s3_connection_fail_on_boto(s3_args, caplog):
    s3_args.secret_key = 'wrong'
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    with caplog.at_level(logging.ERROR):
        with pytest.raises(botocore.exceptions.ClientError):
            async with katsdpmetawriter.get_s3_connection(botocore_dict,
                                                          fail_on_boto=True):
                pass
    assert 'secret key is not valid' in caplog.text
Beispiel #7
0
async def device_server(request, s3_args, telstate, tmp_path_factory):
    """Create and start a :class:`~.MetaWriterServer`.

    It is parametrized by whether to connect it to an S3 server.
    """
    rdb_path = tmp_path_factory.mktemp('dump')
    botocore_dict = katsdpmetawriter.make_botocore_dict(
        s3_args) if request.param else None
    server = katsdpmetawriter.MetaWriterServer('127.0.0.1', 0, botocore_dict,
                                               str(rdb_path), telstate)
    await server.start()
    yield server
    await server.stop()
Beispiel #8
0
async def test_write_rdb_s3_permission_error(telstate, s3_args,
                                             tmp_path_factory, mocker, caplog):
    s3_args.access_key = READONLY_USER.access_key
    s3_args.secret_key = READONLY_USER.secret_key
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    path = tmp_path_factory.mktemp('dump') / 'dump.rdb.uploading'
    ctx = mocker.MagicMock()
    with caplog.at_level(logging.ERROR):
        rate_bytes, key_errors = await katsdpmetawriter._write_rdb(
            ctx,
            telstate,
            str(path),
            CBID,
            STREAM_NAME,
            botocore_dict=botocore_dict,
            key_name='dump.rdb',
            lite=False)
    assert rate_bytes is None
    assert 'does not have permission' in caplog.text
Beispiel #9
0
async def main():
    katsdpservices.setup_logging()
    logger = logging.getLogger("katsdpmetawriter")
    katsdpservices.setup_restart()

    parser = katsdpservices.ArgumentParser()
    parser.add_argument('--rdb-path',
                        default="/var/kat/data",
                        metavar='RDBPATH',
                        help='Root in which to write RDB dumps')
    parser.add_argument('--store-s3',
                        dest='store_s3',
                        default=False,
                        action='store_true',
                        help='Enable storage of RDB dumps in S3')
    parser.add_argument(
        '--access-key',
        default="",
        metavar='ACCESS',
        help=
        'S3 access key with write permission to the specified bucket [unauthenticated]'
    )
    parser.add_argument(
        '--secret-key',
        default="",
        metavar='SECRET',
        help='S3 secret key for the specified access key [unauthenticated]')
    parser.add_argument('--s3-host',
                        default='localhost',
                        metavar='HOST',
                        help='S3 gateway host address [%(default)s]')
    parser.add_argument('--s3-port',
                        default=7480,
                        metavar='PORT',
                        help='S3 gateway port [%(default)s]')
    parser.add_argument('-p',
                        '--port',
                        type=int,
                        default=2049,
                        metavar='N',
                        help='KATCP host port [%(default)s]')
    parser.add_argument('-a',
                        '--host',
                        default="",
                        metavar='HOST',
                        help='KATCP host address [all hosts]')

    args = parser.parse_args()

    if not os.path.exists(args.rdb_path):
        logger.error("Specified RDB path, %s, does not exist.", args.rdb_path)
        sys.exit(2)

    botocore_dict = None
    if args.store_s3:
        botocore_dict = katsdpmetawriter.make_botocore_dict(args)
        async with katsdpmetawriter.get_s3_connection(
                botocore_dict, fail_on_boto=True) as s3_conn:
            if s3_conn:
                # we rebuild the connection each time we want to write a meta-data dump
                logger.info("Successfully tested connection to S3 endpoint.")
            else:
                logger.warning(
                    "S3 endpoint %s:%s not available. Files will only be written locally.",
                    args.s3_host, args.s3_port)
    else:
        logger.info(
            "Running in disk only mode. RDB dumps will not be written to S3")

    telstate = await get_async_telstate(args.telstate_endpoint)
    server = katsdpmetawriter.MetaWriterServer(args.host, args.port,
                                               botocore_dict, args.rdb_path,
                                               telstate)
    logger.info("Started meta-data writer server.")
    loop = asyncio.get_event_loop()
    await server.start()
    for sig in [signal.SIGINT, signal.SIGTERM]:
        loop.add_signal_handler(sig, lambda: on_shutdown(loop, server))
    await server.join()
    telstate.backend.close()
    await telstate.backend.wait_closed()
Beispiel #10
0
async def test_get_s3_connection(s3_args):
    botocore_dict = katsdpmetawriter.make_botocore_dict(s3_args)
    async with katsdpmetawriter.get_s3_connection(botocore_dict) as conn:
        assert conn is not None