def test_topic_deletions_piped( non_interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic: str): topics_to_delete = [ randomly_generated_topics(confluent_admin_client) for _ in range(3) ] remaining_topic = randomly_generated_topics(confluent_admin_client) topics_pre_deletion = confluent_admin_client.list_topics( timeout=5).topics.keys() assert all(topic in topics_pre_deletion for topic in topics_to_delete) assert remaining_topic in topics_pre_deletion assert "not_in_the_list_of_topics" not in topics_pre_deletion result = non_interactive_cli_runner.invoke( esque, args=["delete", "topics", "--no-verify"], input="\n".join(topics_to_delete + ["not_in_the_list_of_topics"]), catch_exceptions=False, ) assert result.exit_code == 0 topics_post_deletion = confluent_admin_client.list_topics( timeout=5).topics.keys() assert all(topic not in topics_post_deletion for topic in topics_to_delete) assert remaining_topic in topics_post_deletion assert all(existing_topic in topics_pre_deletion for existing_topic in topics_post_deletion)
def test_create_topic_without_topic_name_fails( non_interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient): n_topics_before = len(confluent_admin_client.list_topics(timeout=5).topics) result = non_interactive_cli_runner.invoke(esque, args=["create", "topic"]) n_topics_after = len(confluent_admin_client.list_topics(timeout=5).topics) assert result.exit_code != 0 assert n_topics_before == n_topics_after
def test_delete_topic_singular_without_topic_name_is_handled( interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient ): n_topics_before = len(confluent_admin_client.list_topics(timeout=5).topics) result = interactive_cli_runner.invoke(esque, args=["delete", "topic"]) n_topics_after = len(confluent_admin_client.list_topics(timeout=5).topics) assert result.exit_code == 0 assert n_topics_before == n_topics_after assert "doesn't exist on the cluster." in result.output
def test_delete_topic_plural_without_topic_name_is_handled( interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient ): n_topics_before = len(confluent_admin_client.list_topics(timeout=5).topics) result = interactive_cli_runner.invoke(esque, args=["delete", "topics"]) n_topics_after = len(confluent_admin_client.list_topics(timeout=5).topics) assert result.exit_code == 0 assert n_topics_before == n_topics_after assert "The provided list contains no existing topics" in result.output
def test_topic_creation_works( topic_controller: TopicController, confluent_admin_client: confluent_kafka.admin.AdminClient, topic_id: str): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic_id not in topics topic_controller.create_topics([Topic(topic_id, replication_factor=1)]) topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic_id in topics
def test_topic_deletion_works( topic_controller: TopicController, confluent_admin_client: confluent_kafka.admin.AdminClient, topic: str): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics topic_controller.delete_topic(Topic(topic)) # Invalidate cache confluent_admin_client.poll(timeout=1) topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic not in topics
def test_topic_deletion_singular_without_verification_does_not_work( interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic: str ): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics result = interactive_cli_runner.invoke(esque, args=["delete", "topic", topic], catch_exceptions=False) assert result.exit_code == 0 topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics
def test_topic_deletion_stops_in_non_interactive_mode_without_no_verify( non_interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic: str ): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics result = non_interactive_cli_runner.invoke(esque, args=["delete", "topics"], input=topic) assert result.exit_code != 0 assert isinstance(result.exception, NoConfirmationPossibleException) topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics
def test_topic_deletion_as_stdin_works( non_interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic: str ): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics result = non_interactive_cli_runner.invoke( esque, args=["delete", "topics", "--no-verify"], input=topic, catch_exceptions=False ) assert result.exit_code == 0 topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic not in topics
def test_topic_deletion_as_argument_plural_works( interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic: str ): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics result = interactive_cli_runner.invoke( esque, args=["delete", "topics", topic], input="y\n", catch_exceptions=False ) assert result.exit_code == 0 topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic not in topics
def test_create_topic_as_argument_with_verification_works( interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic_id: str): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic_id not in topics result = interactive_cli_runner.invoke(esque, args=["create", "topic", topic_id], input="Y\n", catch_exceptions=False) assert result.exit_code == 0 topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic_id in topics
def test_edit_topic_works( interactive_cli_runner: CliRunner, monkeypatch: MonkeyPatch, topic_controller: TopicController, confluent_admin_client: confluent_kafka.admin.AdminClient, topic: str, ): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic in topics config_dict = { "config": { "cleanup.policy": "delete", "compression.type": "producer", "delete.retention.ms": "123456789", "file.delete.delay.ms": "60000", "flush.messages": "123456789", "flush.ms": "9223372036854775807", "follower.replication.throttled.replicas": "", "index.interval.bytes": "4096", "leader.replication.throttled.replicas": "", "max.message.bytes": "1000012", "message.downconversion.enable": "true", "message.format.version": "2.2-IV1", "message.timestamp.difference.max.ms": "123456789", "message.timestamp.type": "CreateTime", "min.cleanable.dirty.ratio": "0.5", "min.compaction.lag.ms": "0", "min.insync.replicas": "1", "preallocate": "false", "retention.bytes": "-1", "retention.ms": "123456789", "segment.bytes": "123456789", "segment.index.bytes": "123456789", "segment.jitter.ms": "0", "segment.ms": "123456789", "unclean.leader.election.enable": "true", } } def mock_edit_function(text=None, editor=None, env=None, require_save=None, extension=None, filename=None): return yaml.dump(config_dict, default_flow_style=False) monkeypatch.setattr(click, "edit", mock_edit_function) result = interactive_cli_runner.invoke(edit_topic, topic, input="y\n", catch_exceptions=False) assert result.exit_code == 0 topic_config_dict = topic_controller.get_cluster_topic(topic).as_dict( only_editable=True) for key, value in config_dict["config"].items(): assert (key, topic_config_dict["config"][key]) == (key, value)
def test_keep_dash_delete_dot( interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic_factory ): basic_topic, _ = topic_factory(1, "basic-topic") duplicate_topic, _ = topic_factory(1, "basic.topic") topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert basic_topic in topics assert duplicate_topic in topics result = interactive_cli_runner.invoke( esque, args=["delete", "topics", duplicate_topic], input="y\n", catch_exceptions=False ) assert result.exit_code == 0 topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert duplicate_topic not in topics assert basic_topic in topics
def test_topic_creation_raises_for_wrong_config( topic_controller: TopicController, confluent_admin_client: confluent_kafka.admin.AdminClient, topic_id: str): topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic_id not in topics # We only have 1 broker for tests, so a higher replication should fail with pytest.raises(KafkaException): topic_controller.create_topics([Topic(topic_id, replication_factor=2)])
def test_produce_to_non_existent_topic_fails( confluent_admin_client: confluent_kafka.admin.AdminClient, non_interactive_cli_runner: CliRunner, topic_id: str): target_topic_id = topic_id data = "".join([json.dumps(dict(key='"key1"', value='"value1"')) + "\n"]) result = non_interactive_cli_runner.invoke( esque, args=["produce", "--stdin", target_topic_id], input=data) assert isinstance(result.exception, NoConfirmationPossibleException) topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert target_topic_id not in topics
def test_produce_to_non_existant_topic_fails( confluent_admin_client: confluent_kafka.admin.AdminClient, interactive_cli_runner: CliRunner, topic_id: str): target_topic_id = topic_id result = interactive_cli_runner.invoke(produce, args=["--stdin", target_topic_id], input="n\n") assert isinstance(result.exception, TopicDoesNotExistException) topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert target_topic_id not in topics
def test_create_without_confirmation_does_not_create_topic( interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient, topic_id: str): result = interactive_cli_runner.invoke(esque, args=["create", "topic", topic_id], catch_exceptions=False) assert result.exit_code == 0 topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic_id not in topics
def test_topic_list_output_compatibility_for_piping( non_interactive_cli_runner: CliRunner, confluent_admin_client: confluent_kafka.admin.AdminClient): prefix = "foo_" topics_to_delete = [ randomly_generated_topics(confluent_admin_client, prefix=prefix) for _ in range(3) ] existing_topics = confluent_admin_client.list_topics( timeout=5).topics.keys() assert all(t in existing_topics for t in topics_to_delete) all_topics = non_interactive_cli_runner.invoke( esque, args=["get", "topics", "--prefix", prefix]).stdout result = non_interactive_cli_runner.invoke( esque, args=["delete", "topics", "--no-verify"], input=all_topics, catch_exceptions=False) assert result.exit_code == 0 existing_topics = confluent_admin_client.list_topics( timeout=5).topics.keys() assert not any(t in existing_topics for t in topics_to_delete)
def test_topic_creation_with_template_works( non_interactive_cli_runner: CliRunner, state: State, confluent_admin_client: confluent_kafka.admin.AdminClient, topic_id: str, ): topic_1 = topic_id + "_1" topic_2 = topic_id + "_2" topics = confluent_admin_client.list_topics(timeout=5).topics.keys() assert topic_1 not in topics replication_factor = 1 num_partitions = 1 config = { "cleanup.policy": "delete", "delete.retention.ms": "123456", "file.delete.delay.ms": "789101112", "flush.messages": "12345678910111213", "flush.ms": "123456789", } state.cluster.topic_controller.create_topics([ Topic(topic_1, replication_factor=replication_factor, num_partitions=num_partitions, config=config) ]) result = non_interactive_cli_runner.invoke( esque, args=["create", "topic", "--no-verify", "-l", topic_1, topic_2], catch_exceptions=False) assert result.exit_code == 0 config_from_template = state.cluster.topic_controller.get_cluster_topic( topic_2) assert config_from_template.replication_factor == replication_factor assert config_from_template.num_partitions == num_partitions for config_key, value in config.items(): assert config_from_template.config[config_key] == value