Пример #1
0
def instance_manager_requires_kafka(loaded_instance_manager):
    def _format(_type, b):
        return (b['id'], _type, b)

    # things that require kafka

    # kafka_message XF
    messages_topic_name = str(uuid4())
    xf_kafka_msg = deepcopy(examples.XF_KAFKA_MESSAGE)
    xf_kafka_msg['topic'] = messages_topic_name

    # error reporting pipeline
    reports_topic_name = str(uuid4())

    reporting_pipeline = deepcopy(examples.PIPELINE_KAFKA_LOGS)
    reporting_pipeline['error_handling']['error_topic'] = reports_topic_name

    for i in [('kafkamessage', xf_kafka_msg),
              ('pipeline', reporting_pipeline)]:

        _id, _type, body = _format(*i)
        loaded_instance_manager.update(_id, _type, TENANT, body)
    yield loaded_instance_manager
    # delete topics
    kafka_security = config.get_kafka_admin_config()
    kadmin = get_admin_client(kafka_security)
    for topic in (messages_topic_name, reports_topic_name):
        delete_topic(kadmin, f'{TENANT}.{topic}')
def create_remote_kafka_assets(request, sample_generator, *args):
    # @mark annotation does not work with autouse=True.
    if 'integration' not in request.config.invocation_params.args:
        LOG.debug(f'NOT creating Kafka Assets')
        # return
    LOG.debug(f'Creating Kafka Assets')
    kafka_security = config.get_kafka_admin_config()
    kadmin = get_admin_client(kafka_security)
    new_topic = f'{TENANT}.{TEST_TOPIC}'
    create_topic(kadmin, new_topic)
    GENERATED_SAMPLES[new_topic] = []
    producer = get_producer(kafka_security)
    schema = parse(json.dumps(ANNOTATED_SCHEMA))
    for subset in sample_generator(max=100, chunk=10):
        GENERATED_SAMPLES[new_topic].extend(subset)
        produce(subset, schema, new_topic, producer)
    yield None  # end of work before clean-up
    LOG.debug(f'deleting topic: {new_topic}')
    delete_topic(kadmin, new_topic)