def test_containers_matched_by_label(self):
        """
        There are two docker containers. Matching is by label org.riotkit.domain: {{ domain }}

        Checks:
            - Parsing of the labels syntax
        """

        first = DockerContainer(image='nginx:1.19').with_kwargs(
            labels={
                'org.riotkit.domain': 'duckduckgo.com'
            }).start()
        second = DockerContainer(image='nginx:1.19')\
            .with_kwargs(labels={'org.riotkit.domain': 'riseup.net,bing.com'}).start()

        try:
            wait_for_logs(first, 'ready for start up')
            wait_for_logs(second, 'ready for start up')

            os.environ['PATH'] = path + ':' + os.environ['PATH']
            check = TlsDockerNetworkCheck(param_type='label', param_name='org.riotkit.domain', alert_days_before=1) \
                .main()

        finally:
            first.stop()
            second.stop()

        self.assertIn('Domain duckduckgo.com is OK', check[0])
        self.assertIn('Domain bing.com is OK', check[0])
        self.assertIn('Domain riseup.net is OK', check[0])
        self.assertTrue(check[1])
    def test_multiple_containers_with_multiple_domains_per_container(self):
        """
        There are two containers. Matching is by environment variable INFR_VIRTUAL_HOST={{ domain }}
        One with single domain: google.com
        Second one: duckduck.com and bing.com

        Checks:
            - There is a correct connection to docker, and proper parsing of the data from docker
            - "tls" check is correctly called
        """

        first = DockerContainer(image='nginx:1.19').with_env(
            'INFR_VIRTUAL_HOST', 'duckduckgo.com,bing.com').start()
        second = DockerContainer(image='nginx:1.19').with_env(
            'INFR_VIRTUAL_HOST', 'google.com').start()

        try:
            wait_for_logs(first, 'ready for start up')
            wait_for_logs(second, 'ready for start up')

            os.environ['PATH'] = path + ':' + os.environ['PATH']
            check = TlsDockerNetworkCheck(param_type='environment', param_name='INFR_VIRTUAL_HOST', alert_days_before=1)\
                .main()

        finally:
            first.stop()
            second.stop()

        self.assertIn('Domain google.com is OK', check[0])
        self.assertIn('Domain bing.com is OK', check[0])
        self.assertIn('Domain duckduckgo.com is OK', check[0])
        self.assertTrue(check[1])
예제 #3
0
class RedisOnlineStoreCreator(OnlineStoreCreator):
    def __init__(self, project_name: str):
        super().__init__(project_name)
        self.container = DockerContainer("redis").with_exposed_ports("6379")

    def create_online_store(self) -> Dict[str, str]:
        self.container.start()
        log_string_to_wait_for = "Ready to accept connections"
        wait_for_logs(
            container=self.container, predicate=log_string_to_wait_for, timeout=5
        )
        exposed_port = self.container.get_exposed_port("6379")
        return {"type": "redis", "connection_string": f"localhost:{exposed_port},db=0"}

    def teardown(self):
        self.container.stop()
예제 #4
0
    def test_waits_for_nginx_to_be_ready(self):
        container = DockerContainer(
            image='nginx:1.19-alpine').with_name('nginx')
        container.start()

        try:
            io = IO()
            signal = execute_app(
                WaitForOutputApp(
                    container='ngin(.*)',
                    command='',
                    pattern='Configuration complete; ready for start up',
                    timeout=15,
                    io=io))

            self.assertIn('Match found', signal.message)
            self.assertEqual(0, signal.exit_code)

        finally:
            container.stop()
예제 #5
0
    def test_regexp_patterns(self):
        container = DockerContainer(
            image='nginx:1.19-alpine').with_name('nginx_0')
        container.start()

        try:
            io = IO()

            for pattern in ['ngin*', 'nginx.*', 'ng.*x']:
                signal = execute_app(
                    WaitForOutputApp(
                        container=pattern,
                        command='',
                        pattern='Configuration complete; ready for start up',
                        timeout=15,
                        io=io))

                self.assertIn('Match found', signal.message)
                self.assertEqual(0, signal.exit_code)

        finally:
            container.stop()
예제 #6
0
파일: dynamodb.py 프로젝트: feast-dev/feast
class DynamoDBOnlineStoreCreator(OnlineStoreCreator):
    def __init__(self, project_name: str):
        super().__init__(project_name)
        self.container = DockerContainer(
            "amazon/dynamodb-local:latest").with_exposed_ports("8000")

    def create_online_store(self) -> Dict[str, str]:
        self.container.start()
        log_string_to_wait_for = (
            "Initializing DynamoDB Local with the following configuration:")
        wait_for_logs(container=self.container,
                      predicate=log_string_to_wait_for,
                      timeout=5)
        exposed_port = self.container.get_exposed_port("8000")
        return {
            "type": "dynamodb",
            "endpoint_url": f"http://localhost:{exposed_port}",
            "region": "us-west-2",
        }

    def teardown(self):
        self.container.stop()
예제 #7
0
    def test_too_many_containers_found(self):
        first = DockerContainer(image='nginx:1.19-alpine').with_name('nginx_0')
        second = DockerContainer(
            image='nginx:1.19-alpine').with_name('nginx_1')

        first.start()
        second.start()

        io = IO()

        try:
            signal = execute_app(
                WaitForOutputApp(
                    container='nginx_*',
                    command='',
                    pattern='Configuration complete; ready for start up',
                    timeout=15,
                    io=io))

            self.assertEqual('Too many containers found', signal.message)
            self.assertEqual(1, signal.exit_code)
        finally:
            first.stop()
            second.stop()
예제 #8
0
class SpannerHelper(object):
  def __init__(self, project_id, instance_id, table, use_emulator):
    self.use_emulator = use_emulator
    self.table = table
    self.host = None
    if use_emulator:
      self.emulator = DockerContainer(
          'gcr.io/cloud-spanner-emulator/emulator:latest').with_exposed_ports(
              9010, 9020)
      retry(self.emulator.start, 3, 'Could not start spanner emulator.')
      time.sleep(3)
      self.host = f'{self.emulator.get_container_host_ip()}:' \
                  f'{self.emulator.get_exposed_port(9010)}'
      os.environ['SPANNER_EMULATOR_HOST'] = self.host
    self.client = spanner.Client(project_id)
    self.instance = self.client.instance(instance_id)
    if use_emulator:
      self.create_instance()

  def create_instance(self):
    self.instance.create().result(120)

  def create_database(self, database_id):
    database = self.instance.database(
        database_id,
        ddl_statements=[
            f'''
          CREATE TABLE {self.table} (
              f_string  STRING(1024) NOT NULL,
              f_int64   INT64,
              f_boolean BOOL
          ) PRIMARY KEY (f_string)'''
        ])
    database.create().result(120)

  def insert_values(self, database_id, values, columns=None):
    values = values or []
    columns = columns or ('f_string', 'f_int64', 'f_boolean')
    with self.instance.database(database_id).batch() as batch:
      batch.insert(
          table=self.table,
          columns=columns,
          values=values,
      )

  def get_emulator_host(self):
    return f'http://{self.host}'

  def read_data(self, database_id, prefix):
    database = self.instance.database(database_id)
    with database.snapshot() as snapshot:
      results = snapshot.execute_sql(
          f'''SELECT * FROM {self.table}
              WHERE f_string LIKE "{prefix}%"
              ORDER BY f_int64''')
      try:
        rows = list(results) if results else None
      except IndexError:
        raise ValueError(f"Spanner results not found for {prefix}.")
    return rows

  def drop_database(self, database_id):
    database = self.instance.database(database_id)
    database.drop()

  def shutdown(self):
    if self.use_emulator:
      try:
        self.emulator.stop()
      except:  # pylint: disable=bare-except
        logging.error('Could not stop Spanner Cloud emulator.')
예제 #9
0
class CrossLanguageKinesisIOTest(unittest.TestCase):
    @unittest.skipUnless(
        TestPipeline().get_option('aws_kinesis_stream'),
        'Cannot test on real aws without pipeline options provided')
    def test_kinesis_io_roundtrip(self):
        # TODO: enable this test for localstack once BEAM-10664 is resolved
        self.run_kinesis_write()
        self.run_kinesis_read()

    @unittest.skipIf(
        TestPipeline().get_option('aws_kinesis_stream'),
        'Do not test on localstack when pipeline options were provided')
    def test_kinesis_write(self):
        # TODO: remove this test once BEAM-10664 is resolved
        self.run_kinesis_write()
        records = self.kinesis_helper.read_from_stream(self.aws_kinesis_stream)
        self.assertEqual(
            sorted(records),
            sorted([RECORD + str(i).encode() for i in range(NUM_RECORDS)]))

    def run_kinesis_write(self):
        with TestPipeline(options=PipelineOptions(self.pipeline_args)) as p:
            p.not_use_test_runner_api = True
            _ = (
                p
                | 'Impulse' >> beam.Impulse()
                | 'Generate' >> beam.FlatMap(lambda x: range(NUM_RECORDS))  # pylint: disable=bad-option-value
                | 'Map to bytes' >> beam.Map(lambda x: RECORD + str(x).encode(
                )).with_output_types(bytes)
                | 'WriteToKinesis' >> WriteToKinesis(
                    stream_name=self.aws_kinesis_stream,
                    aws_access_key=self.aws_access_key,
                    aws_secret_key=self.aws_secret_key,
                    region=self.aws_region,
                    service_endpoint=self.aws_service_endpoint,
                    verify_certificate=(not self.use_localstack),
                    partition_key='1',
                    producer_properties=self.producer_properties,
                ))

    def run_kinesis_read(self):
        records = [RECORD + str(i).encode() for i in range(NUM_RECORDS)]

        with TestPipeline(options=PipelineOptions(self.pipeline_args)) as p:
            result = (p
                      | 'ReadFromKinesis' >> ReadDataFromKinesis(
                          stream_name=self.aws_kinesis_stream,
                          aws_access_key=self.aws_access_key,
                          aws_secret_key=self.aws_secret_key,
                          region=self.aws_region,
                          service_endpoint=self.aws_service_endpoint,
                          verify_certificate=not self.use_localstack,
                          max_num_records=NUM_RECORDS,
                          max_read_time=MAX_READ_TIME,
                          request_records_limit=REQUEST_RECORDS_LIMIT,
                          watermark_policy=WatermarkPolicy.ARRIVAL_TIME,
                          watermark_idle_duration_threshold=MAX_READ_TIME,
                          initial_position_in_stream=InitialPositionInStream.
                          AT_TIMESTAMP,
                          initial_timestamp_in_stream=NOW_MILLIS,
                      ).with_output_types(bytes))
            assert_that(result, equal_to(records))

    def set_localstack(self):
        self.localstack = DockerContainer('localstack/localstack:{}'
                                          .format(LOCALSTACK_VERSION))\
          .with_env('SERVICES', 'kinesis')\
          .with_env('KINESIS_PORT', '4568')\
          .with_env('USE_SSL', 'true')\
          .with_exposed_ports(4568)\
          .with_volume_mapping('/var/run/docker.sock', '/var/run/docker.sock', 'rw')

        # Repeat if ReadTimeout is raised.
        for i in range(4):
            try:
                self.localstack.start()
                break
            except Exception as e:  # pylint: disable=bare-except
                if i == 3:
                    logging.error('Could not initialize localstack container')
                    raise e

        self.aws_service_endpoint = 'https://{}:{}'.format(
            self.localstack.get_container_host_ip(),
            self.localstack.get_exposed_port('4568'),
        )

    def setUp(self):
        parser = argparse.ArgumentParser()

        parser.add_argument(
            '--aws_kinesis_stream',
            default='beam_kinesis_xlang',
            help='Kinesis stream name',
        )
        parser.add_argument(
            '--aws_access_key',
            default='accesskey',
            help=('Aws access key'),
        )
        parser.add_argument(
            '--aws_secret_key',
            default='secretkey',
            help='Aws secret key',
        )
        parser.add_argument(
            '--aws_region',
            default='us-east-1',
            help='Aws region',
        )
        parser.add_argument(
            '--aws_service_endpoint',
            default=None,
            help='Url to external aws endpoint',
        )
        parser.add_argument(
            '--use_real_aws',
            default=False,
            dest='use_real_aws',
            action='store_true',
            help='Flag whether to use real aws for the tests purpose',
        )
        parser.add_argument(
            '--expansion_service',
            help='Url to externally launched expansion service.',
        )

        pipeline = TestPipeline()
        argv = pipeline.get_full_options_as_args()

        known_args, self.pipeline_args = parser.parse_known_args(argv)

        self.aws_kinesis_stream = known_args.aws_kinesis_stream
        self.aws_access_key = known_args.aws_access_key
        self.aws_secret_key = known_args.aws_secret_key
        self.aws_region = known_args.aws_region
        self.aws_service_endpoint = known_args.aws_service_endpoint
        self.use_localstack = not known_args.use_real_aws
        self.expansion_service = known_args.expansion_service
        self.producer_properties = {
            'CollectionMaxCount': str(NUM_RECORDS),
            'ConnectTimeout': str(MAX_READ_TIME),
        }

        if self.use_localstack:
            self.set_localstack()

        self.kinesis_helper = KinesisHelper(
            self.aws_access_key,
            self.aws_secret_key,
            self.aws_region,
            self.aws_service_endpoint.replace('https', 'http')
            if self.aws_service_endpoint else None,
        )

        if self.use_localstack:
            self.kinesis_helper.create_stream(self.aws_kinesis_stream)

    def tearDown(self):
        if self.use_localstack:
            self.kinesis_helper.delete_stream(self.aws_kinesis_stream)

            try:
                self.localstack.stop()
            except:  # pylint: disable=bare-except
                logging.error('Could not stop the localstack container')