def __init__(self, test_context): self.s3_bucket_name = f"panda-bucket-{uuid.uuid1()}" self._extra_rp_conf = dict( cloud_storage_enabled=True, cloud_storage_access_key=ArchivalTest.s3_access_key, cloud_storage_secret_key=ArchivalTest.s3_secret_key, cloud_storage_region=ArchivalTest.s3_region, cloud_storage_bucket=self.s3_bucket_name, cloud_storage_disable_tls=True, cloud_storage_api_endpoint=ArchivalTest.s3_host_name, cloud_storage_api_endpoint_port=9000, cloud_storage_reconciliation_interval_ms=500, cloud_storage_max_connections=5, log_compaction_interval_ms=self.log_compaction_interval_ms, log_segment_size=self.log_segment_size, ) if test_context.function_name == "test_timeboxed_uploads": self._extra_rp_conf.update( log_segment_size=1024 * 1024 * 1024, cloud_storage_segment_max_upload_interval_sec=1) super(ArchivalTest, self).__init__(test_context=test_context, extra_rp_conf=self._extra_rp_conf) self.kafka_tools = KafkaCliTools(self.redpanda) self.rpk = RpkTool(self.redpanda) self.s3_client = S3Client( region='panda-region', access_key=u"panda-user", secret_key=u"panda-secret", endpoint=f'http://{ArchivalTest.s3_host_name}:9000', logger=self.logger)
def __init__(self, test_context): self.s3_bucket_name = f"panda-bucket-{uuid.uuid1()}" extra_rp_conf = dict( developer_mode=True, cloud_storage_enabled=True, cloud_storage_access_key=ArchivalTest.s3_access_key, cloud_storage_secret_key=ArchivalTest.s3_secret_key, cloud_storage_region=ArchivalTest.s3_region, cloud_storage_bucket=self.s3_bucket_name, cloud_storage_disable_tls=True, cloud_storage_api_endpoint=ArchivalTest.s3_host_name, cloud_storage_api_endpoint_port=9000, cloud_storage_reconciliation_interval_ms=500, cloud_storage_max_connections=5, log_segment_size=1048576 # 1MB ) super(ArchivalTest, self).__init__(test_context=test_context, extra_rp_conf=extra_rp_conf) self.kafka_tools = KafkaCliTools(self.redpanda) self.s3_client = S3Client( region='panda-region', access_key=u"panda-user", secret_key=u"panda-secret", endpoint=f'http://{ArchivalTest.s3_host_name}:9000', logger=self.logger)
def __init__(self, test_context): self.s3_bucket_name = f"panda-bucket-{uuid.uuid1()}" extra_rp_conf = dict( log_segment_size=self.segment_size, cloud_storage_enabled=True, cloud_storage_access_key=self.s3_access_key, cloud_storage_secret_key=self.s3_secret_key, cloud_storage_region=self.s3_region, cloud_storage_bucket=self.s3_bucket_name, cloud_storage_disable_tls=True, cloud_storage_api_endpoint=self.s3_host_name, cloud_storage_api_endpoint_port=9000, cloud_storage_reconciliation_interval_ms=500, cloud_storage_max_connections=5, enable_idempotence=True, enable_transactions=True, enable_leader_balancer=False, enable_auto_rebalance_on_node_add=False, ) super(ShadowIndexingTxTest, self).__init__(test_context=test_context, extra_rp_conf=extra_rp_conf) s3client = S3Client( region=self.s3_region, access_key=self.s3_access_key, secret_key=self.s3_secret_key, endpoint=f"http://{self.s3_host_name}:9000", logger=self.logger, ) s3client.create_bucket(self.s3_bucket_name)
def __init__(self, test_context): self.s3_bucket_name = f"panda-bucket-{uuid.uuid1()}" self._extra_rp_conf = dict( cloud_storage_enabled=True, cloud_storage_access_key=MultiRestartTest.s3_access_key, cloud_storage_secret_key=MultiRestartTest.s3_secret_key, cloud_storage_region=MultiRestartTest.s3_region, cloud_storage_bucket=self.s3_bucket_name, cloud_storage_disable_tls=True, cloud_storage_api_endpoint=MultiRestartTest.s3_host_name, cloud_storage_api_endpoint_port=9000, cloud_storage_reconciliation_interval_ms=500, cloud_storage_max_connections=5, log_compaction_interval_ms=self.log_compaction_interval_ms, log_segment_size=self.log_segment_size, ) super(MultiRestartTest, self).__init__(test_context=test_context, extra_rp_conf=self._extra_rp_conf) self.s3_client = S3Client( region='panda-region', access_key=u"panda-user", secret_key=u"panda-secret", endpoint=f'http://{MultiRestartTest.s3_host_name}:9000', logger=self.logger)
def __init__(self, test_context): self.s3_bucket = test_context.globals.get(self.GLOBAL_S3_BUCKET, None) self.s3_region = test_context.globals.get(self.GLOBAL_S3_REGION, None) self.s3_access_key = test_context.globals.get( self.GLOBAL_S3_ACCESS_KEY, None) self.s3_secret_key = test_context.globals.get( self.GLOBAL_S3_SECRET_KEY, None) self.s3_endpoint = None self.real_thing = self.s3_bucket and self.s3_region and self.s3_access_key and self.s3_secret_key if self.real_thing: extra_rp_conf = dict( developer_mode=True, disable_metrics=False, cloud_storage_enabled=True, cloud_storage_access_key=self.s3_access_key, cloud_storage_secret_key=self.s3_secret_key, cloud_storage_region=self.s3_region, cloud_storage_bucket=self.s3_bucket, cloud_storage_reconciliation_interval_ms=10000, cloud_storage_max_connections=10, cloud_storage_trust_file="/etc/ssl/certs/ca-certificates.crt", log_segment_size=32 * 1048576 # 32MB ) else: bucket_name = f"{ArchivalTest.MINIO_BUCKET_NAME}-{uuid.uuid1()}" self.s3_bucket = bucket_name self.s3_region = ArchivalTest.MINIO_REGION self.s3_access_key = ArchivalTest.MINIO_ACCESS_KEY self.s3_secret_key = ArchivalTest.MINIO_SECRET_KEY extra_rp_conf = dict( developer_mode=True, disable_metrics=False, cloud_storage_enabled=True, cloud_storage_access_key=ArchivalTest.MINIO_ACCESS_KEY, cloud_storage_secret_key=ArchivalTest.MINIO_SECRET_KEY, cloud_storage_region=ArchivalTest.MINIO_REGION, cloud_storage_bucket=bucket_name, cloud_storage_disable_tls=True, cloud_storage_api_endpoint=ArchivalTest.MINIO_HOST_NAME, cloud_storage_api_endpoint_port=9000, cloud_storage_reconciliation_interval_ms=10000, cloud_storage_max_connections=5, log_segment_size=32 * 1048576 # 32MB ) self.s3_endpoint = f'http://{ArchivalTest.MINIO_HOST_NAME}:9000' super(ArchivalTest, self).__init__(test_context=test_context, extra_rp_conf=extra_rp_conf) self.kafka_tools = KafkaCliTools(self.redpanda) self.s3_client = S3Client(region=self.s3_region, access_key=self.s3_access_key, secret_key=self.s3_secret_key, endpoint=self.s3_endpoint, logger=self.logger)
def __init__(self, test_context): super(EndToEndShadowIndexingTest, self).__init__(test_context=test_context) self.s3_bucket_name = f"panda-bucket-{uuid.uuid1()}" self.topic = EndToEndShadowIndexingTest.s3_topic_name self._extra_rp_conf = dict( cloud_storage_enabled=True, cloud_storage_enable_remote_read=True, cloud_storage_enable_remote_write=True, cloud_storage_access_key=EndToEndShadowIndexingTest.s3_access_key, cloud_storage_secret_key=EndToEndShadowIndexingTest.s3_secret_key, cloud_storage_region=EndToEndShadowIndexingTest.s3_region, cloud_storage_bucket=self.s3_bucket_name, cloud_storage_disable_tls=True, cloud_storage_api_endpoint=EndToEndShadowIndexingTest.s3_host_name, cloud_storage_api_endpoint_port=9000, cloud_storage_reconciliation_interval_ms=500, cloud_storage_max_connections=5, log_segment_size=EndToEndShadowIndexingTest.segment_size, # 1MB ) self.scale = Scale(test_context) self.redpanda = RedpandaService( context=test_context, num_brokers=3, client_type=KafkaCliTools, extra_rp_conf=self._extra_rp_conf, topics=EndToEndShadowIndexingTest.topics, ) self.kafka_tools = KafkaCliTools(self.redpanda) self.s3_client = S3Client( region=EndToEndShadowIndexingTest.s3_region, access_key=EndToEndShadowIndexingTest.s3_access_key, secret_key=EndToEndShadowIndexingTest.s3_secret_key, endpoint=f"http://{EndToEndShadowIndexingTest.s3_host_name}:9000", logger=self.logger, )