def create_remote_kafka_assets(request, sample_generator, *args): # @mark annotation does not work with autouse=True. if 'integration' not in request.config.invocation_params.args: LOG.debug(f'NOT creating Kafka Assets') # return LOG.debug(f'Creating Kafka Assets') kafka_security = config.get_kafka_admin_config() kadmin = get_admin_client(kafka_security) new_topic = f'{TENANT}.{TEST_TOPIC}' create_topic(kadmin, new_topic) GENERATED_SAMPLES[new_topic] = [] producer = get_producer(kafka_security) schema = parse(json.dumps(ANNOTATED_SCHEMA)) for subset in sample_generator(max=100, chunk=10): GENERATED_SAMPLES[new_topic].extend(subset) produce(subset, schema, new_topic, producer) yield None # end of work before clean-up LOG.debug(f'deleting topic: {new_topic}') delete_topic(kadmin, new_topic)
def _send_kafka(objs: List[Any], schema, _type, max_size=MAX_KAFKA_MESSAGE_SIZE, callback=None): # check size total_size = fb_utils.utf8size(schema) + fb_utils.utf8size(objs) _logger.debug( f'Sending {len(objs)} of {_type} to kafka @ size {total_size}') if total_size >= max_size: raise RuntimeError( f'Message size: {total_size} exceeds maximum: {max_size}. Chunking.' ) if not get_broker_info(KADMIN): raise ConnectionError('Could not connect to Kafka.') schema = parse(schema) TENANT = CONF.get('tenant') topic = fb_utils.sanitize_topic(f'{TENANT}.fbs.{_type}') produce(objs, schema, topic, PRODUCER, callback=callback) return