Exemple #1
0
    def __verify_topology(self):
        for input_topic in self.input_topic_names:

            input_topic_exists = exist_topic(self.brokers, input_topic)

            if not input_topic_exists:
                raise ValueError(f"The input topic {input_topic} doesn't exist")
        output_topic_exists = exist_topic(self.brokers, self.output_topic_name)
        if not output_topic_exists:
            raise ValueError(f"The output topic {self.output_topic_name} doesn't exist")
        if self.error_topic_name:
            error_topic_exists = exist_topic(self.brokers, self.output_topic_name)
            if not error_topic_exists:
                raise ValueError(f"The error topic {self.error_topic_name} doesn't exist")
Exemple #2
0
def test_topic_exist(cluster_metadata):
    client_admin = create_admin_client_from_metadata(cluster_metadata)
    setup_cluster_with_topics(client_admin, ["test_topic_exist"])
    brokers = [
        f"kafka://{cluster_metadata['kafka']['host']}:{cluster_metadata['kafka']['port']}"
    ]
    result = exist_topic(brokers, "test_topic_exist")
    delete_topic(client_admin, "test_topic_exist")
    assert result is True
def test_command_cleanup_with_output_topic_delete(cluster_metadata,
                                                  client_schema_registry,
                                                  schema_registry_url,
                                                  dummy_avro_schema,
                                                  monkeypatch, app):
    clean_arguments()
    input_topic_name = "dummy-topic-input"
    output_topic_name = "dummy-topic-output"
    key_subject = f"{output_topic_name}-key"
    value_subject = f"{output_topic_name}-value"
    brokers = [
        f"kafka://{cluster_metadata['kafka']['host']}:{cluster_metadata['kafka']['port']}"
    ]
    amount_of_messages = 5
    group_id = "dummy-group"
    client_admin = create_admin_client_from_metadata(cluster_metadata)

    set_environment_test(brokers, group_id, input_topic_name, monkeypatch,
                         output_topic_name, schema_registry_url, "true")

    client_schema_registry.register(key_subject, dummy_avro_schema)
    client_schema_registry.register(value_subject, dummy_avro_schema)
    subjects = client_schema_registry.get_subjects()

    assert key_subject in subjects
    assert value_subject in subjects

    setup_cluster_with_topics(client_admin,
                              [input_topic_name, output_topic_name])

    generate_dummy_messages(cluster_metadata, amount_of_messages,
                            input_topic_name)
    read_messages_from_topic(cluster_metadata, input_topic_name, group_id,
                             amount_of_messages, [0, 1, 2, 3, 4])

    app.start_app()
    time.sleep(5)

    read_messages_from_topic(cluster_metadata, input_topic_name, group_id,
                             amount_of_messages, [0, 1, 2, 3, 4])

    output_topic_exist = exist_topic(brokers, output_topic_name)
    assert output_topic_exist is False

    subjects = client_schema_registry.get_subjects()
    assert key_subject not in subjects
    assert value_subject not in subjects

    delete_topic(client_admin, input_topic_name)
Exemple #4
0
def clean_up_command(app: App,
                     brokers: List[str],
                     input_topic: str,
                     output_topic: str,
                     app_name: str,
                     schema_registry_url: str,
                     error_topic: str = None,
                     schema_registry_config: dict = None,
                     delete_output_topic: bool = False):
    schema_registry = build_schema_registry_client(schema_registry_url,
                                                   schema_registry_config)
    clean_tables_from_app(app)
    client = build_admin_client(brokers)
    brokers_endpoint = parse_brokers_from_kafka_format(brokers)

    input_topic_exist = exist_topic(brokers, input_topic)

    if input_topic_exist:
        reset_offsets_from_partitions(client, ",".join(brokers_endpoint),
                                      app_name, input_topic)

    if delete_output_topic:

        output_topic_exist = exist_topic(brokers, output_topic)

        if output_topic_exist:
            delete_topic(client, output_topic)
            delete_subject(schema_registry, output_topic)

        if error_topic:

            error_topic_exist = exist_topic(brokers, error_topic)

            if error_topic_exist:
                delete_topic(client, error_topic)
                delete_subject(schema_registry, error_topic)
def test_command_cleanup_with_error_topic_delete(cluster_metadata,
                                                 client_schema_registry,
                                                 schema_registry_url,
                                                 dummy_avro_schema,
                                                 monkeypatch, app):
    clean_arguments()
    input_topic_name = "dummy-topic-input"
    output_topic_name = "dummy-topic-output"
    error_topic_name = "dummy-topic-error"
    output_key_subject = f"{output_topic_name}-key"
    output_value_subject = f"{output_topic_name}-value"
    error_key_subject = f"{error_topic_name}-key"
    error_value_subject = f"{error_topic_name}-value"

    client_schema_registry.register(output_key_subject, dummy_avro_schema)
    client_schema_registry.register(output_value_subject, dummy_avro_schema)
    client_schema_registry.register(error_key_subject, dummy_avro_schema)
    client_schema_registry.register(error_value_subject, dummy_avro_schema)
    subjects = client_schema_registry.get_subjects()

    assert error_key_subject in subjects
    assert error_value_subject in subjects

    group_id = "dummy-group"
    brokers = [
        f"kafka://{cluster_metadata['kafka']['host']}:{cluster_metadata['kafka']['port']}"
    ]
    client_admin = create_admin_client_from_metadata(cluster_metadata)

    set_environment_test(brokers, group_id, input_topic_name, monkeypatch,
                         output_topic_name, schema_registry_url, "true",
                         error_topic_name)

    setup_cluster_with_topics(
        client_admin, [input_topic_name, output_topic_name, error_topic_name])

    app.start_app()
    time.sleep(5)

    error_topic_exist = exist_topic(brokers, error_topic_name)
    assert error_topic_exist is False

    subjects = client_schema_registry.get_subjects()
    assert error_key_subject not in subjects
    assert error_value_subject not in subjects

    delete_topic(client_admin, input_topic_name)
Exemple #6
0
def test_topic_not_exist(cluster_metadata):
    brokers = [
        f"kafka://{cluster_metadata['kafka']['host']}:{cluster_metadata['kafka']['port']}"
    ]
    result = exist_topic(brokers, "test_topic_exist")
    assert result is False