def create_job_queue_topic(topic_name: str, number_of_partitions: int, broker: str, stop_topic: str, job_report_topic: str): print("Checking if Kafka is up...", flush=True) admin_client = AdminClient({'bootstrap.servers': broker}) kafka_up = False while not kafka_up: try: print("Checking if Kafka is up...") admin_client.list_topics(timeout=10) except: continue kafka_up = True print("Kafka is up!") fs = admin_client.create_topics([NewTopic(topic_name, num_partitions=number_of_partitions, replication_factor=1), NewTopic(stop_topic, num_partitions=1, replication_factor=1), NewTopic(job_report_topic, num_partitions=1, replication_factor=1)]) for topic, f in fs.items(): try: f.result() # The result itself is None print("Topic {} created".format(topic)) except Exception as e: print("Failed to create topic {}: {}".format(topic, e))
def create_topic(topicname, partitions, replication_factor, new_conf): # type: (str, int, int, dict) """Create a new topic, setting partition and replication-factor immediately. Keyword arguments: topicname -- topicname partitions -- number of partitions replication_factor -- number of replications, which is once set only mutable with a zookeeper new_conf -- dictionary with topic-config, for example containing retention.ms """ all_valid = False # while not sure all topic-configs are valid while not all_valid: topic = [ NewTopic(topicname, num_partitions=partitions, replication_factor=replication_factor, config=new_conf) ] try: # only validate creation of topic fs = admin.create_topics(topic, validate_only=True) y = list(fs.values()) y[0].result() all_valid = True except KafkaException as e: # Errorcode == 40 says we want to set a not-supported topic-config if e.args[0].code == 40: faultiemsg = e.args[0].str() faultiemsg = faultiemsg.split(" ") # extract which config is not supported faultieconf = faultiemsg[-1] # remove unsupported config new_conf.pop(faultieconf) # print warning module.warn( "Will not set Topic-config %s, because this Kafka-Cluster" " does no support this config." % (faultieconf)) else: msg = ( "For some reason we won't be able to create Topic %s: %s." % (topic, e)) fail_module(msg) # Create Topic for real topic = [ NewTopic(topicname, num_partitions=partitions, replication_factor=replication_factor, config=new_conf) ] try: fs = admin.create_topics(topic) y = list(fs.values()) y[0].result() except KafkaException as e: msg = ("Failed to create topic %s: %s." % (topic, e)) fail_module(msg)
def test_integration_broker_connect(ensure_broker_service: Fixture, ensure_connect_service: Fixture) -> None: """Test kafkaconnect with a Kafka broker and Kafka Connect. pytest-docker uses the docker-compose.yaml in the test directory. """ broker_url = Config.broker_url admin_client = AdminClient({"bootstrap.servers": broker_url}) t1 = NewTopic(topic="test.t1", num_partitions=1) t2 = NewTopic(topic="test.t2", num_partitions=1) t3 = NewTopic(topic="test.t3", num_partitions=1) # Create test topics in Kafka try: admin_client.create_topics([t1, t2, t3]) time.sleep(1) except KafkaException: return None # Test topic discovery topic = Topic(broker_url=broker_url, topic_regex="test.*", excluded_topics="test.t1") assert "test.t2" in topic.names assert "test.t3" in topic.names # Configure the connector connect = Connect(connect_url=Config.connect_url) connect_config = InfluxConfig() connect_config.update_topics(topic.names) # Create the connector using the Kafka Connect API connect.create_or_update(name="influxdb-sink", connect_config=connect_config.asjson()) # List connectors from the Kafka Connect API list = connect.list() assert "influxdb-sink" in list
def create_topics(adminclient: AdminClient, topics: list) -> None: """ Create topics """ new_topics = [ NewTopic(topic, num_partitions=PARTITIONS, replication_factor=1) for topic in topics if '_connect' not in topic ] connect_topics = [ NewTopic(topic, num_partitions=CONNECT_PARTITIONS, replication_factor=1) for topic in topics if '_connect' in topic ] if connect_topics: new_topics.extend(connect_topics) print('\tCreating topics. . .') # Call create_topics to asynchronously create topics, a dict # of <topic,future> is returned. futures = adminclient.create_topics(new_topics) # Wait for operation to finish. # Timeouts are preferably controlled by passing request_timeout=15.0 # to the create_topics() call. # All futures will finish at the same time. for topic, future in futures.items(): try: future.result() # The result itself is None print(f"\t\tTopic {topic} created") except Exception as error: print(f"\t\tFailed to create topic {topic}: ", error)
def __init__(self): self.admin_client = AdminClient({ 'bootstrap.servers': 'ip-172-31-87-92.ec2.internal:9092,ip-172-31-92-117.ec2.internal:9092,ip-172-31-93-248.ec2.internal:9092' }) self.topic_list = [] self.topic_list.append( NewTopic(topic="speed", num_partitions=2, replication_factor=1)) self.topic_list.append( NewTopic(topic="lap", num_partitions=3, replication_factor=1)) self.topic_list.append( NewTopic(topic="telemetry", num_partitions=5, replication_factor=1))
def register_message_type(self, admin_id: str, message_types: list, \ partitions: int): """ Creates a list of message types. Parameters: admin_id A String that represents Admin client ID. message_types This is essentially equivalent to the list of queue/topic name. For e.g. ["Alert"] partitions Integer that represents number of partitions to be created. """ admin = self._clients['admin'][admin_id] new_message_type = [NewTopic(each_message_type, \ num_partitions=partitions) for each_message_type in message_types] created_message_types = admin.create_topics(new_message_type) self._task_status(created_message_types) for each_message_type in message_types: for list_retry in range(1, self._max_list_message_type_count+2): if each_message_type not in \ list(self._get_metadata(admin).keys()): if list_retry > self._max_list_message_type_count: raise MessageBusError(errno.EINVAL, "Maximum retries \ exceeded for creating %s.", each_message_type) time.sleep(list_retry*1) continue else: break
def create_topic(self): """Creates the producer topic if it does not already exist""" client = AdminClient({"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) #if self.topic_name in Producer.existing_topics: if self.check_topic_exists(client, self.topic_name): print(f"the topic {self.topic_name} has already been created") logger.info(f"the topic {self.topic_name} has already been created - skipping") return futures = client.create_topics( [ NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas ) ] ) for topic, future in futures.items(): try: future.result() print(f"topic created {self.topic_name}") logger.info(f"topic created successfully, {self.topic_name}") except Exception as e: print(f"failed to create topic {self.topic_name}: {e}") logger.error(f"failed to create topic {self.topic_name}")
def create_topic(self): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. # draft client = AdminClient({"bootstrap.servers": BROKER_URL}) topic_existence = client.list_topics(timeout=5) if self.topic_name in set( a.topic for a in iter(topic_existence.topics.values())): logger.info(f"topic already exists {self.topic_name}") return else: futures = client.create_topics([ # TODO NewTopic(topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, config={ "cleanup.policy": "delete", "compression.type": "lz4", "delete.retention.ms": "2000", "file.delete.delay.ms": "2000" }) ]) for topic, future in futures.items(): try: future.result() print("topic created") except Exception as e: print(f"failed to create topic {self.topic_name}: {e}")
def create_topic(self, topic_name): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does # not already exist on the Kafka Broker. # # client = AdminClient( {"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) does_topic_exist = self.check_topic_exists(client, self.topic_name) if does_topic_exist: logger.info(f"Topic {self.topic_name} exists. Will not create") return logger.info(f"Creating topic: {self.topic_name}") topic = NewTopic( self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, ) futures = client.create_topics([topic]) for topic, future in futures.items(): try: future.result() print("topic created") except Exception as e: msg = f"failed to create topic {topic_name}: {e}" logger.fatal(msg) print(msg) raise
def create_topic(conf, topic): """ Create a topic if needed Examples of additional admin API functionality: https://github.com/confluentinc/confluent-kafka-python/blob/master/examples/adminapi.py """ a = AdminClient({ 'bootstrap.servers': conf['bootstrap.servers'], 'sasl.mechanisms': 'PLAIN', 'security.protocol': 'SASL_SSL', 'sasl.username': conf['sasl.username'], 'sasl.password': conf['sasl.password'] }) fs = a.create_topics([NewTopic( topic, num_partitions=1, replication_factor=3 )]) for topic, f in fs.items(): try: f.result() # The result itself is None print("Topic {} created".format(topic)) except Exception as e: # Continue if error code TOPIC_ALREADY_EXISTS, which may be true # Otherwise fail fast if e.args[0].code() != KafkaError.TOPIC_ALREADY_EXISTS: print("Failed to create topic {}: {}".format(topic, e)) sys.exit(1)
def create_topic(self): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. # # # logger.info("topic creation kafka integration incomplete - skipping") client = AdminClient( {"bootstrap.servers": "PLAINTEXT://localhost:9092"}) # Get the list of existing topics exists_topic_list = client.list_topics() # if exists_topic_list.topics.get(self.topic_name) if self.topic_name in exists_topic_list.topics: logger.info(f"Topic Skipped - Exists: {self.topic_name}") return futures = client.create_topics([ NewTopic(topic=self.topic_name, num_partitions=5, replication_factor=1) ]) for topic, future in futures.items(): try: future.result() logger.info(f"Topic has been created: {topic}") except Exception as e: logger.warning(f"failed to create topic {topic}: {e}")
def create_topic(self): """Creates the producer topic if it does not already exist""" admin_client = AdminClient(self.broker_properties) new_topic = NewTopic(self.topic_name, 1, 1) admin_client.create_topics([new_topic])
def create_topic(self): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. # # a = AdminClient( {'bootstrap.servers': self.broker_properties['broker_url']}) new_topics = [ NewTopic(self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, config={ "cleanup.policy": "compact", "compression.type": "lz4", "delete.retention.ms": 100, "file.delete.delay.ms": 100 }) ] fs = a.create_topics(new_topics) for topic, f in fs.items(): try: f.result() logger.info('Topic {} is created.'.format(topic)) except Exception as e: logger.info("Failed to create topic {}".format( self.topic_name))
def create_topic(self): """Creates the producer topic if it does not already exist""" logger.info(f"Creating topic: {self.topic_name}") futures = self.adminclient.create_topics([ NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, config={ "cleanup.policy": "delete", "compression.type": "lz4", "delete.retention.ms": "2000", "file.delete.delay.ms": "2000", }, ) ]) for topic, future in futures.items(): try: future.result() except Exception as e: if 36 == e.args[0].code(): logger.info(f"topic {topic} already exists") pass else: logger.error(f"failed to create topic {topic}: {e}") raise
def create_or_ensure_topic(admin, topic, min_partitions, min_replicas): metadata = admin.list_topics(timeout=10) if topic not in metadata.topics: # Create the topic for the first time print("Topic %s does not exist - creating..." % (topic)) new_topics = [ NewTopic(topic, num_partitions=min_partitions, replication_factor=min_replicas) ] fs = admin.create_topics(new_topics) for topic, f in fs.items(): try: f.result() print("Topic %s created" % (topic)) except Exception as e: print("Failed to create topic %s - %s" % (topic, e)) else: # Check the partitions and replication is at least high enough if min_partitions > len(metadata.topics[topic].partitions): new_partitions = [NewPartitions(topic, min_partitions)] fs = admin.create_partitions(new_partitions) for topic, f in fs.items(): try: f.result() print("Additional partitions for %s created" % (topic)) except Exception as e: print("Failed to add partitions to topic %s - %s" % (topic, e))
def main(): """Runs the exercise""" # TODO: Configure the AdminClient with `bootstrap.servers` # See: https://docs.confluent.io/current/clients/confluent-kafka-python/#confluent_kafka.admin.AdminClient # client = AdminClient(...) client = AdminClient({"bootstrap.servers": BROKER_URL}) # TODO: Create a NewTopic object. Don't forget to set partitions and replication factor to 1! # See: https://docs.confluent.io/current/clients/confluent-kafka-python/#confluent_kafka.admin.NewTopic # topic = NewTopic(...) topic = NewTopic(TOPIC_NAME, num_partitions=1, replication_factor=1) # TODO: Using `client`, create the topic # See: https://docs.confluent.io/current/clients/confluent-kafka-python/#confluent_kafka.admin.AdminClient.create_topics # client.create_topics(...) client.create_topics([topic]) try: asyncio.run(produce_consume()) except KeyboardInterrupt as e: print("shutting down") finally: # TODO: Using `client`, delete the topic # See: https://docs.confluent.io/current/clients/confluent-kafka-python/#confluent_kafka.admin.AdminClient.delete_topics # client.delete_topics(...) client.delete_topics([topic])
def create_topic(self): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. # # logger.info("beginning topic creation for %s", self.topic_name) client = AdminClient( {"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) topic_metadata = client.list_topics(timeout=5) if self.topic_name in set( t.topic for t in iter(topic_metadata.topics.values())): logger.info("not recreating existing topic %s", self.topic_name) return logger.info( "creating topic %s with partition %s replicas %s", self.topic_name, self.num_partitions, self.num_replicas, ) futures = client.create_topics([ NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, ) ])
def create_topic(client, topic_name): """Creates the topic with the given topic name""" # TODO: Create the topic. Make sure to set the topic name, the number of partitions, the # replication factor. Additionally, set the config to have a cleanup policy of delete, a # compression type of lz4, delete retention milliseconds of 2 seconds, and a file delete delay # milliseconds of 2 second. # # See: https://docs.confluent.io/current/clients/confluent-kafka-python/#confluent_kafka.admin.NewTopic # See: https://docs.confluent.io/current/installation/configuration/topic-configs.html futures = client.create_topics([ # TODO NewTopic(topic=topic_name, num_partitions=5, replication_factor=1, config={ 'cleanup.policy': 'compact', 'compression.type': 'lz4', 'delete.retention.ms': 100, 'file.delete.delay.ms': 100, }) ]) for topic, future in futures.items(): try: future.result() print("topic created") except Exception as e: print(f"failed to create topic {topic_name}: {e}") raise
def create_topic(self): """Creates the producer topic if it does not already exist""" client = AdminClient( {"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) topic_metadata = client.list_topics(timeout=5) if self.topic_name in set( t.topic for t in iter(topic_metadata.topics.values())): logger.info("Skipping existing topic {}".format(self.topic_name)) return logger.info("Creating topic {} with partition {} replicas {}".format( self.topic_name, self.num_partitions, self.num_replicas)) futures = client.create_topics([ NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, ) ]) for topic, future in futures.items(): try: future.result() logger.info("Topic created") except Exception as e: logger.fatal("Failed to create topic {}: {}".format(topic, e))
def create_topic(self): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. # # client = AdminClient( {"bootstrap.servers": "PLAINTEXT://localhost:9092"}) topic_meta = client.list_topics() futures = client.create_topics([ NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, config={ "cleanup.policy": "delete", "compression.type": "lz4", "delete.retention.ms": "100", "file.delete.delay.ms": "100", }, ) ]) for topic, future in futures.items(): try: future.result() print("topic created") except Exception as e: print(f"failed to create topic {topic}: {e}")
def create_topic(self): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. # # client = AdminClient({"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) topic_metadata = client.list_topics(timeout=5) if topic_metadata.topics.get(self.topic_name) is None: newTopic = NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, config={ "cleanup.policy": "compaction", "compression.type": "lz4", "delete.retention.ms": 100, "file.delete.delay.ms": 100 } ) client.create_topics([newTopic]) ## topic created logger.info("producer create")
def main(): """Checks for topic and creates the topic if it does not exist""" client = AdminClient({"bootstrap.servers": "PLAINTEXT://kafka0:19092"}) pages = NewTopic("com.udacity.streams.pages", 10, 1) clickevents = NewTopic("com.udacity.streams.clickevents", 10, 1) try: client.create_topics([pages, clickevents]) except Exception as e: print(f"encountered error creating topics: {e}") else: print("created topics pages and clickevents") try: produce() except KeyboardInterrupt as e: print("shutting down")
def create_topic(self): """Creates the producer topic if it does not already exist""" client = AdminClient({ 'bootstrap.servers': config['broker']['bootstrap.servers'] }) if len(Producer.existing_topics) == 0: topics = client.list_topics(timeout=10).topics for topic in topics.values(): Producer.existing_topics.add(str(topic)) if self.topic_name in Producer.existing_topics: return Producer.existing_topics.add(self.topic_name) futures = client.create_topics([ NewTopic( self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas ) ]) for _, future in futures.items(): try: future.result() except KafkaException as ke: logger.error( f"topic creation error - {self.topic_name} - {ke}" )
def create_topic(self): """Creates the producer topic if it does not already exist""" # If topic does not exist, create it if not self.topic_exists(): # Create new topic object new_topic = NewTopic( self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas) # Create new topic self.admin_client.create_topics([new_topic], operation_timeout=10.0) # Confirm that topic has been created logger.info(f"Confirming topic {self.topic_name} creation") # Maybe this is not the best idea to wait here. # However, better than not having any errors # Any suggestions on how to handle this ? time.sleep(0.1) if not self.topic_exists(): logger.error(f"Could not create topic {self.topic_name}")
def create_topic(self): """Creates the producer topic if it does not already exist""" # # # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. client = AdminClient( {"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) if self.topic_name in client.list_topics().topics: logger.info( f'Topic {self.topic_name} exists in client. Topic will not be created. Finishing process.' ) return futures = client.create_topics([ NewTopic(topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas) ]) for topic, future in futures.items(): try: future.result() print("topic created") except Exception as e: print(f"failed to create topic {self.topic_name}: {e}")
def create_topic(self): """Creates the producer topic if it does not already exist""" # TODO: Write code that creates the topic for this producer if it does not already exist on # the Kafka Broker. logger.info("topic creation kafka integration incomplete - skipping") client = AdminClient( {"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) topic_metadata = client.list_topics(timeout=5) if self.topic_name in set( t.topic for t in iter(topic_metadata.topics.values())): return futures = client.create_topics([ NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, ) ]) for topic, future in futures.items(): try: future.result() logger.info("Topic created") except Exception as e: logger.error("failed to create topic %s: %s", topic, e)
def bootstrap(bootstrap_server, force): """ Warning: Not intended to be used in production yet. """ if not force: raise click.ClickException('Must use --force to run') from confluent_kafka.admin import AdminClient, NewTopic client = AdminClient({ 'bootstrap.servers': ','.join(bootstrap_server), 'socket.timeout.ms': 1000, }) topics = [ NewTopic(o.pop('topic'), **o) for o in settings.KAFKA_TOPICS.values() ] for topic, future in client.create_topics(topics).items(): try: future.result() print("Topic %s created" % topic) except Exception as e: print("Failed to create topic %s: %s" % (topic, e)) from snuba.clickhouse import ClickhousePool, get_table_definition, get_test_engine # Need to better figure out if we are configured to use replicated # tables or distributed tables, etc. ClickhousePool().execute( get_table_definition( settings.DEFAULT_LOCAL_TABLE, get_test_engine(), ))
def create_topic(client, topic_name): """ Creates a Kafka topic Args: client (Kafka Client): The Kafka admin client topic_name (str): the topic to be created """ futures = client.create_topics([ NewTopic( topic=topic_name, num_partitions=5, replication_factor=1, config={ "cleanup.policy": "delete", "delete.retention.ms": "2000", "file.delete.delay.ms": "2000", }, ) ]) for topic, future in futures.items(): try: future.result() logger.info("topic created") except Exception as e: logger.error(f"failed to create topic {topic_name}: {e}")
def create_topics(self, *topic_name, num_partitions=1, replication_factor=1): """Create topics Args: *topic_name: num_partitions: replication_factor: """ new_topics = [ NewTopic(topic, num_partitions=num_partitions, replication_factor=replication_factor) for topic in topic_name ] # Call create_topics to asynchronously create topics, a dict # of <topic,future> is returned. fs = self.a.create_topics(new_topics) # Wait for operation to finish. # Timeouts are preferably controlled by passing request_timeout=15.0 # to the create_topics() call. # All futures will finish at the same time. for topic, f in fs.items(): try: f.result() # The result itself is None print("Topic {} created".format(topic)) except Exception as e: print("Failed to create topic {}: {}".format(topic, e))
def create_topic(self): """Creates the producer topic if it does not already exist""" # TODO: Write code that creates the topic for this producer if it does not already exist on the Kafka Broker. client = AdminClient( {"bootstrap.servers": self.broker_properties["bootstrap.servers"]}) topic_exsists = self.check_topic_exists(client, self.topic_name) if (topic_exsists): logger.info(f'Topic {self.topic_name} exsists') return logger.info(f"Creating topic: {self.topic_name}") futures = client.create_topics([ NewTopic(topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas) ]) for topic, future in futures.items(): try: future.result() logger.info("topic created") except Exception as e: logger.fatal("failed to create topic %s: %s", topic, e)