def test_integration_broker_connect(ensure_broker_service: Fixture, ensure_connect_service: Fixture) -> None: """Test kafkaconnect with a Kafka broker and Kafka Connect. pytest-docker uses the docker-compose.yaml in the test directory. """ broker_url = Config.broker_url admin_client = AdminClient({"bootstrap.servers": broker_url}) t1 = NewTopic(topic="test.t1", num_partitions=1) t2 = NewTopic(topic="test.t2", num_partitions=1) t3 = NewTopic(topic="test.t3", num_partitions=1) # Create test topics in Kafka try: admin_client.create_topics([t1, t2, t3]) time.sleep(1) except KafkaException: return None # Test topic discovery topic = Topic(broker_url=broker_url, topic_regex="test.*", excluded_topics="test.t1") assert "test.t2" in topic.names assert "test.t3" in topic.names # Configure the connector connect = Connect(connect_url=Config.connect_url) connect_config = InfluxConfig() connect_config.update_topics(topic.names) # Create the connector using the Kafka Connect API connect.create_or_update(name="influxdb-sink", connect_config=connect_config.asjson()) # List connectors from the Kafka Connect API list = connect.list() assert "influxdb-sink" in list
def test_create_or_update() -> None: connect = Connect(connect_url="http://localhost:8083") connect_config = InfluxConfig() connect_config.update_topics(["t1", "t2", "t3"]) result = connect.create_or_update(name="influxdb-sink", connect_config=connect_config.asjson()) assert "influxdb-sink" in result
def test_integration_broker_connect(ensure_broker_service: Fixture, ensure_connect_service: Fixture) -> None: """Test kafkaconnect with a Kafka broker and Kafka Connect. pytest-docker uses the docker-compose.yaml in the test directory. """ admin_client = AdminClient({"bootstrap.servers": BROKER_URL}) t1 = NewTopic(topic="test.t1", num_partitions=1) t2 = NewTopic(topic="test.t2", num_partitions=1) t3 = NewTopic(topic="test.t3", num_partitions=1) # Create test topics in Kafka try: admin_client.create_topics([t1, t2, t3]) time.sleep(5) except KafkaException: return None # Test topic discovery topic = Topic( broker_url=BROKER_URL, topic_regex="test.*", excluded_topic_regex="test.t1", ) assert "test.t1" not in topic.names assert "test.t2" in topic.names assert "test.t3" in topic.names # Configure the connector connect = Connect(connect_url=CONNECT_URL) connect_config = InfluxConfig( name="influxdb-sink", connect_influx_url="http://localhost:8086", connect_influx_db="mydb", tasks_max=1, connect_influx_username="******", connect_influx_password="******", connect_influx_error_policy="foo", connect_influx_max_retries="1", connect_influx_retry_interval="1", connect_progress_enabled=True, ) connect_config.update_topics(topic.names) # Create the connector using the Kafka Connect API connect.create_or_update(name="influxdb-sink", connect_config=connect_config.asjson()) # List connectors from the Kafka Connect API list = connect.list() assert "influxdb-sink" in list
def test_create_or_update() -> None: """Test create_or_update method.""" connect = Connect(connect_url="http://localhost:8083") connect_config = InfluxConfig( name="influxdb-sink", connect_influx_url="http://localhost:8086", connect_influx_db="mydb", tasks_max=1, connect_influx_username="******", connect_influx_password="******", connect_influx_error_policy="foo", connect_influx_max_retries="1", connect_influx_retry_interval="1", connect_progress_enabled=True, ) connect_config.update_topics(["t1", "t2", "t3"]) result = connect.create_or_update(name="influxdb-sink", connect_config=connect_config.asjson()) assert "influxdb-sink" in result
def create_influxdb_sink( ctx: click.Context, topiclist: tuple, name: str, connect_influx_url: str, connect_influx_db: str, tasks_max: str, connect_influx_username: str, connect_influx_password: str, topic_regex: str, dry_run: bool, auto_update: bool, validate: bool, check_interval: str, excluded_topic_regex: str, connect_influx_error_policy: str, connect_influx_max_retries: str, connect_influx_retry_interval: str, connect_progress_enabled: str, timestamp: str, ) -> int: """Create an instance of the InfluxDB Sink connector. A list of topics can be specified using the TOPICLIST argument. If not, topics are discovered from Kafka. Use the ``--topic-regex`` and ``--excluded_topics`` options to help in selecting the topics that you want to write to InfluxDB. To check for new topics and update the connector configuration use the ``--auto-update`` and ``--check-interval`` options. """ # Get configuration from the main command if ctx.parent: config = ctx.parent.obj["config"] # Connector configuration influx_config = InfluxConfig( name=name, connect_influx_url=connect_influx_url, connect_influx_db=connect_influx_db, tasks_max=int(tasks_max), connect_influx_username=connect_influx_username, connect_influx_password=connect_influx_password, connect_influx_error_policy=connect_influx_error_policy, connect_influx_max_retries=connect_influx_max_retries, connect_influx_retry_interval=connect_influx_retry_interval, connect_progress_enabled=(connect_progress_enabled == "true"), ) # The variadic argument is a tuple topics: List[str] = list(topiclist) if not topics: click.echo("Discoverying Kafka topics...") topics = Topic(config.broker_url, topic_regex, excluded_topic_regex).names n = 0 if not topics else len(topics) click.echo(f"Found {n} topics.") connect = Connect(connect_url=config.connect_url) if topics: influx_config.update_topics(topics, timestamp) # --validate option if validate: click.echo( connect.validate( name=influx_config.connector_class, connect_config=influx_config.asjson(), )) return 0 # --dry-run option returns the connector configuration if dry_run: click.echo(influx_config.asjson()) return 0 # Validate configuration before creating the connector validation = connect.validate( name=influx_config.connector_class, connect_config=influx_config.asjson(), ) try: error_count = json.loads(validation)["error_count"] click.echo(f"Validation returned {error_count} error(s).") if error_count > 0: click.echo( "Use the ``--validate`` option to return the validation " "results.") return 1 except Exception: click.echo(validation) return 1 click.echo(f"Uploading {name} connector configuration...") connect.create_or_update(name=name, connect_config=influx_config.asjson()) if auto_update: while True: time.sleep(int(check_interval) / 1000) try: # Current list of topics from Kafka current_topics = Topic(config.broker_url, topic_regex, excluded_topic_regex).names new_topics = list(set(current_topics) - set(topics)) if new_topics: click.echo("Found new topics, updating the connector...") influx_config.update_topics(current_topics, timestamp) connect.create_or_update( name=name, connect_config=influx_config.asjson()) topics = current_topics except KeyboardInterrupt: raise click.ClickException("Interruped.") return 0
def create_s3_sink( ctx: click.Context, topiclist: tuple, name: str, s3_bucket_name: str, s3_region: str, topics_dir: str, flush_size: int, rotate_interval_ms: int, partition_duration_ms: int, path_format: str, tasks_max: int, topic_regex: str, dry_run: bool, auto_update: bool, validate: bool, check_interval: int, excluded_topics: str, locale: str, timezone: str, timestamp_extractor: str, timestamp_field: str, ) -> int: """Create an instance of the S3 Sink connector. A list of topics can be specified using the TOPICLIST argument. If not, topics are discovered from Kafka. Use the ``--topic-regex`` and ``--excluded_topics`` options to help in selecting the topics that you want to write to S3. To check for new topics and update the connector configuration use the ``--auto-update`` and ``--check-interval`` options. """ # Connector configuration s3config = S3Config( name=name, s3_bucket_name=s3_bucket_name, s3_region=s3_region, topics_dir=topics_dir, flush_size=flush_size, rotate_interval_ms=rotate_interval_ms, partition_duration_ms=partition_duration_ms, path_format=path_format, tasks_max=tasks_max, locale=locale, timezone=timezone, timestamp_extractor=timestamp_extractor, timestamp_field=timestamp_field, ) if ctx.parent: config = ctx.parent.obj["config"] # The variadic argument is a tuple topics: List[str] = list(topiclist) if not topics: click.echo("Discoverying Kafka topics...") topics = Topic(config.broker_url, topic_regex, excluded_topics).names n = 0 if not topics else len(topics) click.echo(f"Found {n} topics.") connect = Connect(connect_url=config.connect_url) if topics: s3config.update_topics(topics) # --validate option if validate: click.echo( connect.validate( name=s3config.connector_class, connect_config=s3config.asjson(), )) return 0 # --dry-run option returns the connector configuration if dry_run: click.echo(s3config.asjson()) return 0 # Validate configuration before creating the connector validation = connect.validate( name=s3config.connector_class, connect_config=s3config.asjson(), ) try: error_count = json.loads(validation)["error_count"] click.echo(f"Validation returned {error_count} error(s).") if error_count > 0: click.echo( "Use the ``--validate`` option to return the validation " "results.") return 0 except Exception: click.echo(validation) return 1 click.echo(f"Uploading {name} connector configuration...") connect.create_or_update(name=name, connect_config=s3config.asjson()) if auto_update: while True: time.sleep(int(check_interval) / 1000) try: # Current list of topics from Kafka current_topics = Topic(config.broker_url, topic_regex, excluded_topics).names new_topics = list(set(current_topics) - set(topics)) if new_topics: click.echo("Found new topics, updating the connector...") s3config.update_topics(current_topics) connect.create_or_update(name=name, connect_config=s3config.asjson()) topics = current_topics except KeyboardInterrupt: raise click.ClickException("Interruped.") return 0