def __enter__(self): logger.debug("Start class OutputData!") self.content_format = self.args.op_format.upper() self.output_type = self.args.op_type.upper() if self.output_type == "FILE": self.fp = open(self.args.op_file, "w") self.csv_list = [] elif self.output_type == "KAFKA": # check topic exists self.topic = self.args.op_topic kafka_topic = NewTopic(name=self.topic, num_partitions=1, replication_factor=1) client = KafkaAdminClient(bootstrap_servers=self.args.op_bootstrap) try: client.delete_topics([kafka_topic]) client.create_topics([kafka_topic]) except KafkaError: logger.warn( "delete or create kafka topic raised error, ignore it!") self.producer = KafkaProducer( bootstrap_servers=self.args.op_bootstrap) elif self.output_type == "ES" or self.output_type == "ElasticSearch".upper( ): self.es = Elasticsearch( hosts=self.args.op_es_hosts, sniff_on_start=True, # sniff_on_connection_fail=True, sniffer_timeout=20, # http_auth=('user', 'secret') ) self.es_index = self.args.op_index return self
def delete_kafka_topic(): logger.info("Request received - DELETE /delete_kafka_topic") if not request.is_json: logger.warning("Format not valid") return 'Format not valid', 400 try: admin_client = KafkaAdminClient(bootstrap_servers=kafka_ip_port, client_id='delete_kafka_topic') # Parse JSON data = request.get_json() logger.info("Data received: %s", data) topic = data["topic"] logger.info("Deleting topic %s in Kafka", topic) topic_list = [] topic_list.append(topic) admin_client.delete_topics(topics=topic_list) admin_client.close() except Exception as e: logger.error("Error while parsing request") logger.exception(e) return str(e), 400 return '', 201
class AdminClient: def __init__(self, record): ''' record: not null, a list of input needed to create kafka topic ''' self.topic_name = record[0] self.replication_factor = record[1] self.partition = record[2] self.zoo_conn = record[3] self.client = KafkaAdminClient(bootstrap_servers=zoo_conn) def create_kafka_topic(self): try: self.client.creat_topics(NewTopic( name=self.topic_name, num_partitions=self.partition, replication_factor=self.replication_factor), validate_only=False) except Exception as ex: print("Error creating Kafka Topic \n") print(str(ex)) def delete_kafka_topic(self, topic_name): try: self.client.delete_topics(NewTopic(name=topic_name)) print(topic_name + ' is successfully deleted') except Exception as ex: print('error deleting topic') print(str(ex))
def test_validate_number_in_interval_double(self): prod, cons = app.run(Config.K_MONITOR_TEST_TOPIC, Config.PS_DATABASE_NAME, Config.PS_TEST_WEBSITE_TABLE_NAME, "tests/t_monitor_heavy_test.yml") interval = File.read_time_interval("tests/t_monitor_heavy_test.yml") time.sleep(interval * 2) app.stop_monitor(prod, cons) admin_client = KafkaAdminClient( bootstrap_servers=[Config.K_HOST + ':' + Config.K_PORT], security_protocol=Config.K_SECURITY_PROTOCOL, ssl_cafile=Config.K_SSL_CAT_FILE, ssl_certfile=Config.K_SSL_CERT_FILE, ssl_keyfile=Config.K_SSL_KEY_FILE) admin_client.delete_topics([Config.K_MONITOR_TEST_TOPIC]) monitors = File.read_monitors("tests/t_monitor_heavy_test.yml") #send messages equals total urls count in 2 cycle is double the urls size self.assertEqual(prod.get_message_count(), len(monitors) * 2)
def delete_topic(username, title): # Deletes a kafak topic, based on username and title admin_client = KafkaAdminClient(bootstrap_servers='localhost:9092') topic_lst = [] topic_name = "{}-{}".format(username, title) topic_lst.append(topic_name) admin_client.delete_topics(topic_lst)
def delete(self): logger.debug('Deleting topic %s', self.name) try: admin_client = KafkaAdminClient(bootstrap_servers=self._cluster.kafka.brokers, request_timeout_ms=300000) admin_client.delete_topics([self.name]) except Exception as e: logger.warning('Unable to delete Kafka topic %s: %s', self.name, str(e)) finally: self.topic = None
class Kafka: def __init__(self, bootstrap_servers): self.bootstrap_servers = bootstrap_servers self.client = KafkaAdminClient(bootstrap_servers=bootstrap_servers) self.producer = KafkaProducer(bootstrap_servers=bootstrap_servers) def create_topics(self, new_topics): ''' create a list of topics :param topic_name: a list of tuple including 'topic_name', 'partition_number', 'replica_factor' :param num_partitions: :param replica_factor: :return: ''' topics = [] for item in new_topics: topic = NewTopic(item[0], item[1], item[2]) topics.append(topic) self.client.create_topics(topics) def delete_topics(self, topic_names): ''' delete a list of topics :param topic_names: a list of topics :type list :return: ''' self.client.delete_topics(topic_names) def send(self, topic_name, msg): ''' produce message to this topic :param topic_name: topic name :param msg: message string :type str :return: ''' self.producer.send(topic_name, msg.encode('utf-8')) def get_consumer(self, topic_names, group_id=None): ''' a list of topics for subscription :param topic_names: :return: topic to list of records since the last fetch for subscribed list of topics and partitions :type dict ''' consumer = KafkaConsumer(bootstrap_servers=self.bootstrap_servers, group_id=group_id) consumer.subscribe(topic_names) return consumer def pause(self, consumer, topic, partition): consumer.pause(TopicPartition(topic=topic, partition=partition)) def resume(self, consumer, topic, partition): consumer.resume(TopicPartition(topic=topic, partition=partition))
def main(): logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) topic_name = [] topic_name.append(sys.argv[1]) admin_client = KafkaAdminClient(bootstrap_servers="localhost:9092", client_id='test') admin_client.delete_topics(topics=topic_name) logging.info('Topic {}, has been deleted'.format(topic_name))
def delete(self, topicName): """ Delete Topic. """ app.logger.info( "Request to delete topic witn name {0}.".format(topicName)) try: admin = KafkaAdminClient( bootstrap_servers=config['cluster.broker.listeners'], security_protocol=config['cluster.security.protocol'], ssl_cafile=config['cluster.ssl.cafile'], ssl_certfile=config['cluster.ssl.certfile'], ssl_keyfile=config['cluster.ssl.keyfile']) result = admin.delete_topics([topicName]) except UnknownTopicOrPartitionError as e: api.abort(400, e.description) except Exception as e: api.abort(500, str(e.args)) finally: admin.close() app.logger.debug(result) if result.topic_error_codes[0][1] == 0: return {"deleted": topicName} else: api.abort(400, "Bad Request(Topic Deletion Failed)")
def __delTopic(topic): try: admin_client = KafkaAdminClient( bootstrap_servers=current_app.config["KFKA_URL"], ) # logging.info(f"admin_client={admin_client}") if admin_client == None: return "admin_client is none", RespCode.ARGS_ERROR topic_list = [] topic_list.append(topic) res = admin_client.delete_topics(topic_list) admin_client.close() # logging.info(f"NewTopic res={res}") with MsgCount.get_lock(): MsgCount.value = 0 return "" except KafkaError as ke: admin_client.close() # logging.info(f"except ={dir(ke)}") # logging.info(f"except ={ke}") # logging.info(f"except ke.errno={ke.errno}") # logging.info(f"except ke.message={ke.message}") return f"{ke.errno}-{ke.message}", RespCode.ARGS_ERROR except Exception as e: admin_client.close() logging.info(f"exception ={e}") # logging.info(f"exception ={dir(e)}") return "exception ", RespCode.ARGS_ERROR
def handle(event, context): if event.method == "POST": try: data = json.loads(event.body) topic = data["topic"] admin_client = KafkaAdminClient( bootstrap_servers="kafka.deployment8:9092", client_id="delete_kafka_topic", ) admin_client.delete_topics(topics=[topic]) admin_client.close() return {"statusCode": 201, "body": "No Content"} except Exception as e: return {"statusCode": 400, "body": f"Error parsing request: {e}"} else: return {"statusCode": 200, "body": "No action for this endpoint"}
def delete_topic(self) -> bool: log.debug("Deleting topic " + self.topic_id) ## Connect to kafka admin_client = KafkaAdminClient( bootstrap_servers=self._get_bootstrap_servers(), client_id=self._get_client_id()) ## Delete topic try: admin_client.delete_topics([self.topic_id]) return True except UnknownTopicOrPartitionError as e: log.error(f"Topic {self.topic_id} does not exists.") raise EventTopicDeleteException from e except Exception as e: log.error(f"Error deleting the Topic {self.topic_id} --> {e}", exc_info=True) raise EventGeneralException from e
def test_producer_equal_consumer(self): prod, cons = app.run(Config.K_MONITOR_TEST_TOPIC, Config.PS_DATABASE_NAME, Config.PS_TEST_WEBSITE_TABLE_NAME) interval = File.read_time_interval() time.sleep(interval - 1) app.stop_monitor(prod, cons) admin_client = KafkaAdminClient( bootstrap_servers=[Config.K_HOST + ':' + Config.K_PORT], security_protocol=Config.K_SECURITY_PROTOCOL, ssl_cafile=Config.K_SSL_CAT_FILE, ssl_certfile=Config.K_SSL_CERT_FILE, ssl_keyfile=Config.K_SSL_KEY_FILE) admin_client.delete_topics([Config.K_MONITOR_TEST_TOPIC]) self.assertEqual(prod.get_message_count(), cons.get_message_count())
def create_kafka_topic(server: str, topic_name_list: list): topic_list = [ NewTopic(name=topic_name, num_partitions=1, replication_factor=1) for topic_name in topic_name_list ] admin_client = KafkaAdminClient(bootstrap_servers=[server], client_id='test') mock_consumer = KafkaConsumer( bootstrap_servers=[server], auto_offset_reset="earliest", enable_auto_commit=True, auto_commit_interval_ms=300, value_deserializer=lambda x: loads(x.decode("utf-8")), group_id="my-group") admin_client.delete_topics(mock_consumer.topics()) mock_consumer.close() if len(mock_consumer.topics()) == 0: admin_client.create_topics(new_topics=topic_list, validate_only=False) return True else: return False
class KafkaClient: def __init__(self, client, url="localhost:9092"): try: self.client = KafkaAdminClient(bootstrap_servers=url, client_id=client) except Exception as excp: print(f"[ERROR] - could not connect to kafka: {str(excp)}") def create_topics(self, topics): topics_list = [ NewTopic(name=topic, num_partitions=1, replication_factor=1) for topic in topics ] self.client.create_topics(new_topics=topics_list, validate_only=False) def close_topics(self, topics): self.client.delete_topics(topics) def close(self): self.client.close() print("[INFO] - kafka connection closed")
class KafkaAdminClient: def __init__(self, host): self.admin_client = VanillaKafkaAdminClient(bootstrap_servers=host) self.client = KafkaClient(host) def create_topics(self, topics, num_partitions=1, replication_factor=1): new_topics = [ NewTopic( name=topic, num_partitions=num_partitions, replication_factor=replication_factor, ) for topic in topics ] return self.admin_client.create_topics( new_topics=new_topics, validate_only=False ) def list_topics(self): return self.client.consumer.topics() def delete_topics(self, topics): self.admin_client.delete_topics(topics, timeout_ms=300)
from kafka.admin import KafkaAdminClient from kafka import KafkaConsumer from config import * delete_topic = "mytopic" admin_client = KafkaAdminClient(bootstrap_servers=BOOTSTRAP_SERVERS) consumer = KafkaConsumer(bootstrap_servers=[BOOTSTRAP_SERVERS]) if delete_topic in consumer.topics(): admin_client.delete_topics(topics=[delete_topic]) if delete_topic not in consumer.topics(): print(f"Topic: {delete_topic} deleted successfully !!") else: print(f"Unable to delete topic: {delete_topic}") else: print(f"Topic: {delete_topic} not found !!") consumer.close() admin_client.close()
def delete_topic(client: KafkaAdminClient, name: str): client.delete_topics([name])
'cam_32', 'cam_33', 'cam_34', 'cam_35', 'cam_36', 'cam_37', 'cam_38', 'cam_39', 'cam_40', 'cam_41', 'cam_42', 'cam_43', 'cam_44', 'cam_45', 'cam_46', 'cam_47', 'cam_48', 'cam_49', 'cam_50', 'cam_51', 'cam_52', 'cam_53', 'cam_54', 'cam_55', 'cam_56'] topic_list = all_cams admin_client.delete_topics(topics=topic_list)
def delete_topic(topic_name): client = KafkaAdminClient(bootstrap_servers='192.168.1.6:19092') client.delete_topics(topic_name)
def rep(self): """Builds and runs the pipeline for the current parameter permutation.""" origin, pipeline_builder = self._get_origin(self.origin) destination, pipeline_builder = self._get_destination( self.destination, pipeline_builder) pipeline_builder.add_error_stage('Discard') if self.number_of_processors == 4: stream_selector, pipeline_builder = self._get_stream_selector( pipeline_builder) expression_evaluator, pipeline_builder = self._get_expression_evaluator( pipeline_builder) field_type_converter, pipeline_builder = self._get_field_type_converter( pipeline_builder) schema_generator, pipeline_builder = self._get_schema_generator( pipeline_builder) trash, pipeline_builder = self._get_destination( 'Trash', pipeline_builder) origin >> stream_selector stream_selector >> trash stream_selector >> expression_evaluator >> field_type_converter >> schema_generator >> destination stream_selector.condition = [{ 'outputLane': stream_selector.output_lanes[0], 'predicate': '${record:attribute("sourceId") == "DOESNOTEXIST"}' }, { 'outputLane': stream_selector.output_lanes[1], 'predicate': 'default' }] else: origin >> destination for environment in self.environments.values(): if environment is not None: pipeline = pipeline_builder.build().configure_for_environment( environment) results = self.sdc_executor.benchmark_pipeline( pipeline, record_count=self.record_count, runs=self.runs) results['generated_date'] = str(datetime.now()) results['sdc_version'] = self.sdc_builder.version results['origin'] = self.origin results['destination'] = self.destination results['record_count'] = self.record_count results['threads'] = self.number_of_threads results['dataset'] = DATASETS[self.dataset]['label'] results['batch_size'] = self.batch_size results['destination_data_format'] = self.destination_format results['processor_count'] = self.number_of_processors results['cpu_count'] = len(psutil.Process().cpu_affinity()) results['memory_gb'] = round(psutil.virtual_memory().total / (1000**3)) try: results['instance_type'] = urlopen( 'http://169.254.169.254/latest/meta-data/instance-type').read( ).decode('utf-8') except (HTTPError, URLError): results['instance_type'] = 'unknown' results['origin_system'] = self.origin_system results['destination_system'] = self.destination_system # Remove outliers if len(results['runs']) > 1: results['runs'] = [ x for x in results['runs'] if -STD_DEV_THRESHOLD < (x - results['throughput_mean']) / results['throughput_std_dev'] < STD_DEV_THRESHOLD ] results['throughput_mean'] = statistics.mean(results['runs']) with open(f"results/{results['pipeline_id']}.json", "w") as file: json.dump(results, file) # Cleanup if self.destination == 'Kafka Producer': admin_client = KafkaAdminClient( bootstrap_servers=self.environments['cluster'].kafka.brokers, request_timeout_ms=180000) admin_client.delete_topics([self.destination_kafka_topic]) if self.destination == 'JDBC Producer': self.destination_table.drop(self.environments['database'].engine) if self.origin == 'HTTP Client': self.http_mock.delete_mock()
def RemoveKafkaTopic(topic_name): admin_client = KafkaAdminClient(bootstrap_servers="localhost:9092", client_id='test') admin_client.delete_topics([topic_name], timeout_ms=6000)
class WarriorKafkaClient(): """ This class contains all kafka admin client related methods """ def __init__(self, **configs): """ create a kafka client """ print_info("Creating kafka client") try: self.kafka_client = KafkaAdminClient(**configs) except KafkaError as exc: print_error("kafka client - Exception during connecting to broker- {}".format(exc)) def create_topics(self, topic_sets, **kwargs): """ create topics for the producer or consumer to use Arguments: topic_sets(list) : list of ['topic_name', 'num_partitions', 'replication_factor'] lists example : ['topic1',1,1] timeout(int): time in milliseconds Returns: result(bool) : False if exception occures, True otherwise None. """ timeout = kwargs.get("timeout", None) validate = kwargs.get("validate", False) new_topics = [NewTopic(name=tup[0], num_partitions=tup[1],\ replication_factor=tup[2]) for tup in topic_sets] print_info("creating topics") try: self.kafka_client.create_topics(new_topics=new_topics, timeout_ms=timeout, validate_only=validate) result = True except KafkaError as exc: print_error("Exception during creating topics - {}".format(exc)) result = False return result def delete_topics(self, topics, timeout=None): """ Delete topics Arguments: topics(list): list of topic names timeout(int): timeout in milliseconds Returns: result(bool) : False if exception occures, True otherwise """ print_info("deleting topics {}".format(topics)) try: self.kafka_client.delete_topics(topics=topics, timeout_ms=timeout) result = True except KafkaError as exc: print_error("Exception during deleting topics - {}".format(exc)) result = False return result def create_partitions_in_topic(self, partitions, **kwargs): """ create partitions in topic Arguments: partitions(list) : list of ['topic_name','num_partitions'] lists example : [['topic1',4], ['topic2',5]] timeout(int): timeout in milliseconds Returns: result(bool) : False if exception occures, True otherwise """ timeout = kwargs.get("timeout", None) validate = kwargs.get("validate", False) topic_partitions = {tup[0]:NewPartitions(total_count=tup[1]) for tup in partitions} print_info("creating partitions in topic") try: self.kafka_client.create_partitions(topic_partitions=topic_partitions, timeout_ms=timeout, validate_only=validate) result = True except KafkaError as exc: print_error("Exception during creating partitions - {}".format(exc)) result = False return result
existingTopics = consumer.topics() # clean out old topics? if args.cleantopics: cleanTopics = [ t for t in existingTopics if t.startswith('w-') or t.startswith('wc-') or t.startswith('wf-') ] if cleanTopics: print('\nThe following topics will be removed:\n', ','.join(cleanTopics), '\n') res = input('enter \'yes\' to proceed:') if res == 'yes': log.info("removing topics: " + ','.join(cleanTopics)) try: adm.delete_topics(cleanTopics, timeout_ms=10) finally: pw_log("An error occured while deleting topics") else: pi_log("Aborted removing of topics") else: pi_log('No topics found to delete') newTopics = [ NewTopic(f"wc-{c}", 1, 2) for c in cities if not f"wc-{c}" in existingTopics ] newTopics.extend([ NewTopic(f"wf-{c}", 1, 2) for c in cities if not f"wf-{c}" in existingTopics ])
from kafka.admin import KafkaAdminClient, NewTopic from kafka.errors import KafkaError print('\nSTART PROGRAM <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<') print('') try: admin_client = KafkaAdminClient(bootstrap_servers="localhost:9092", client_id='test') except: print('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>') print('A CLIENT ADMIN CONNECTION error occured.') print('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>') print('') topic_list = [] topic_name = "sports-event" topic_list.append(topic_name) try: admin_client.delete_topics(topic_list, timeout_ms=None) print('TOPIC: ' + topic_name + ' DELETED CORRECTLY ...') print('') except: print('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>') print('A CLIENT DELETE TOPIC error occured.') print('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>') print('') print('END PROGRAM <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<') print('')
class KafkaAdmin: """ Администрирование кафки через библиотеку kafka_proc-python 1. Управление топиками """ """ Инициализация """ def __init__(self, hosts=None, connection_option=None, confluent=False): """ :param hosts: :param connection_option: """ if hosts is None: self.hosts = GeneralConfig.KAFKA_URL.split(',') else: self.hosts = hosts if connection_option is None: self.connection_option = default_cfg.DEFAULT_CONNECTION_OPTION_ADMIN else: self.connection_option = connection_option self.confluent = confluent """ Контекст """ def __enter__(self): """ Активация клиента админки :return: """ if self.confluent: self.admin = AdminClient( {"bootstrap.servers": ",".join(self.hosts)}) else: try: self.admin = DefaultAdminKafka(bootstrap_servers=self.hosts) except Exception as exp: # Админка конфлюента убога и не может выяснить сколько топиков raise ErrorKafkaAdmin(exp) # self.admin = AdminClient({ # "bootstrap.servers": ",".join(self.hosts) # }) # # self.confluent = True return self def __exit__(self, exc_type, exc_val, exc_tb): """ Закрытие клиента админки :param exc_type: :param exc_val: :param exc_tb: :return: """ if self.confluent: pass else: self.admin.close() """ Топики with KafkaAdmin() as ka: ka.create_topic(topics={'name': 'top_func4', 'num_partitions': 3, 'replication_factor': 3}) """ def create_topic(self, topics, topic_global_option=None): """ Создание топиков :param topics: list[dict={'name': 'top_func', 'num_partitions': 3, 'replication_factor': 3}] or simple dict class NewTopic :param topic_global_option: - create_topics add params :return: """ if self.confluent: NewTopicClass = NewTopicConf else: NewTopicClass = NewTopic topic_to_kafka = [] if isinstance(topics, list): for topic in topics: if topic.get('replication_factor') is None: topic['replication_factor'] = len(self.hosts) if self.confluent: topic['topic'] = topic.pop('name') topic_to_kafka.append(NewTopicClass(**topic)) elif isinstance(topics, dict): if topics.get('replication_factor') is None: topics['replication_factor'] = len(self.hosts) if self.confluent: topics['topic'] = topics.pop('name') topic_to_kafka.append(NewTopicClass(**topics)) else: raise ValueError('incorrect topics!') if self.confluent: result = self.admin.create_topics(topic_to_kafka) else: if topic_global_option is None: topic_global_option = { 'validate_only': False, 'timeout_ms': default_cfg.DEFAULT_BROKER_TIMEOUT_MS_OPERATIONS } result = self.admin.create_topics(new_topics=topic_to_kafka, **topic_global_option) return result def delete_topics(self, topics_name, timeout_ms=None): """ Удаление топиков. НЕ ИСПОЛЬЗОВАТЬ!!! ВНИМАНИЕ! Удаление всех топиков привело к разрушению кластера. Докер просто не смог восстановить их. Пришлось создавать заново. Предварительно ВАЖНО очистить всё, что в волюмах лежит. Кароче вообще не работает. Умирает контейнер и всё!!! Без очистки волюмов ничего не работает. :param topics_name: - список имен тем через запятую :param timeout_ms: - время ожидания ответа от брокеров :return: """ raise ValueError('THIS MAY BE DESTROY YOUR SYSTEM') if timeout_ms is None: timeout_ms = default_cfg.DEFAULT_BROKER_TIMEOUT_MS_OPERATIONS result = self.admin.delete_topics(topics=topics_name, timeout_ms=timeout_ms) return result def get_topics(self): if self.confluent: return None else: return self.admin.list_topics()
class KafkaService(object): def __init__(self, str_bootstrap_server, log_obj): self.logger = log_obj.logger self.__str_host = str_bootstrap_server self.kafka_client = KafkaClient(hosts=self.__str_host) self.__admin = KafkaAdminClient(bootstrap_servers=self.__str_host, client_id='iot_platform') def get_topic(self, kafka_topic_name): return self.kafka_client.topics[kafka_topic_name] def consumer(self, kafka_topic_name): return self.get_topic( kafka_topic_name=kafka_topic_name).get_simple_consumer() def publisher(self, kafka_topic): return self.get_topic(kafka_topic_name=kafka_topic).get_producer() def publish(self, kafka_topic, msg): try: self.logger.info( 'Published message [MESSAGE: %s] -> kafka_topic [TOPIC: %s]' % (msg, kafka_topic)) self.publisher(kafka_topic=kafka_topic).produce(message=msg) except Exception as e: self.logger.error( 'Cannot publish message: [MESSAGE: %s] to topic: [TOPIC: %s], [ERROR: %s]' % (msg, kafka_topic, e)) def create_topic(self, list_topic_names, num_partitions=1, replication_factor=1): try: list_topics = [] topics = self.get_all_topics() for name in list_topic_names: if name in topics: return False topic = NewTopic(name=name, num_partitions=num_partitions, replication_factor=replication_factor) list_topics.append(topic) self.__admin.create_topics(new_topics=list_topics) self.logger.info('Create topics: [NAMES: %s]' % list_topic_names) except Exception as e: self.logger.error('Cannot create topic: [ERROR: %s]' % e) def delete_topic(self, topic_names): """ :param topic_names: a list of string topic names :return: """ try: self.__admin.delete_topics(topics=topic_names, timeout_ms=6000) self.logger.info('Deleted topic [TOPIC: %s]' % topic_names) except Exception as e: self.logger.error('Cannot delete topic [TOPIC: %s], [ERROR: %s]' % (topic_names, e)) def get_all_topics(self): return self.__admin.list_topics()
from kafka.admin import KafkaAdminClient, NewTopic from kafka.errors import KafkaError # Load Config config = yaml.load(open('./kafka_admin.yaml'), Loader=yaml.FullLoader) # Initialize Consumer consumer = KafkaConsumer(bootstrap_servers=config['bootstrap_servers']) # Initialize Admin admin = KafkaAdminClient(bootstrap_servers=config['bootstrap_servers']) # Clean up topics topics = consumer.topics() print("Found {} topics".format(str(len(topics)))) if config['clean_topics_on_start']: for topic in topics: print("Deleting Topic: {}".format(topic)) admin.delete_topics([topic]) # Create topics print("Creating {} topics...".format(config['num_topics'])) for i in range(1, config['num_topics'] + 1): topic_name = config['topic_prefix'] + str(i) print("Creating Topic: {}".format(topic_name)) admin.create_topics([ NewTopic(name=topic_name, num_partitions=config['num_partitions'], replication_factor=config['replication_factor']) ]) print("Done!")
replication_factor=1, topic_configs={'retention.ms': '300000'} ) ] # Retrieving already-created list of topics and then deleting client = KafkaClient(bootstrap_servers=['localhost:9092']) metadata = client.cluster future = client.cluster.request_update() client.poll(future=future) broker_topics = metadata.topics() admin_client = KafkaAdminClient(bootstrap_servers=['localhost:9092']) if topic_name in broker_topics: deletion = admin_client.delete_topics([topic_name]) sleep(2) try: future = client.cluster.request_update() client.poll(future=future) except KafkaError as e: print(e) pass #admin_client.create_topics(new_topics=topic_list, validate_only=False) producer = KafkaProducer(bootstrap_servers=['localhost:9092'], value_serializer=lambda x: dumps(x).encode('utf-8')) def addToMongo(data): collection.insert_one(data)
class Admin(object): def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs self.kwargs['bootstrap_servers'] = self.kwargs.get( 'bootstrap_servers', ['localhost:9092']) self._client = None self.connect() def __enter__(self): return self def __exit__(self, errortype, value, traceback): self.close() def _connect(self): """Connect to kafka admin client""" try: self._client = KafkaAdminClient(**self.kwargs) logger.info('Connected to Kafka Admin client!') except Exception as ex: logger.error('Unable to connect to Kafka Admin client. %s', str(ex)) def connect(self): """Connect to kafka admin""" if self._client is None: self._connect() def create_topics(self, topics: List[dict]): """Create topics in kafka""" topics_to_create = [] for topic in topics: _topic = NewTopic(name=topic['name'], num_partitions=topic.get('num_partitions', 1), replication_factor=topic.get( 'replication_factor', 1)) topics_to_create.append(_topic) try: self._client.create_topics(new_topics=topics_to_create, validate_only=False) logger.info('Topic {%s} creates successfully', topics) except TopicAlreadyExistsError as ex: logger.debug('Skipping to create topic as topic already exists %s', str(ex)) except Exception as ex: logger.error('Unable to create topics. %s', str(ex)) def delete_topics(self, topics: List[str]): """Delete topics""" try: self._client.delete_topics(topics) logger.info('Delete topics: {%s}', topics) except UnknownTopicOrPartitionError as ex: logger.debug('Skipping topic delete as topic dos not exists %s', str(ex)) except Exception as ex: logger.error('Unable to delete topics. %s', str(ex)) def close(self): if self._client is None: return try: self._client.close() logger.info('Closed Kafka Admin client connection') except Exception as ex: logger.error('Unable to close Kafka Admin client cnnection. %s', str(ex))