Esempio n. 1
0
 def __init__(self,
              brokers,
              n_workers=5,
              topic_work=mjolnir.kafka.TOPIC_REQUEST,
              topic_result=mjolnir.kafka.TOPIC_RESULT,
              topic_complete=mjolnir.kafka.TOPIC_COMPLETE,
              max_request_size=4 * 1024 * 1024):
     self.brokers = brokers
     self.n_workers = n_workers
     self.topic_work = topic_work
     self.topic_result = topic_result
     self.topic_complete = topic_complete
     # Standard producer for query results
     self.producer = kafka.KafkaProducer(
         bootstrap_servers=brokers,
         max_request_size=max_request_size,
         compression_type='gzip',
         api_version=mjolnir.kafka.BROKER_VERSION)
     # More reliable producer for reflecting end run sigils. As this
     # is only used for sigils and not large messages like es responses
     # compression is unnecessary here.
     self.ack_all_producer = kafka.KafkaProducer(
         bootstrap_servers=brokers,
         acks='all',
         api_version=mjolnir.kafka.BROKER_VERSION)
     # TODO: 10 items? No clue how many is appropriate...10 seems reasonable
     # enough.  We want enough to keep the workers busy, but not so many
     # that the commited offsets are siginficantly ahead of the work
     # actually being performed.
     self.work_queue = Queue.Queue(10)
Esempio n. 2
0
    def __init__(self, *, config):
        self.log = logging.getLogger(self.__class__.__name__)
        self.config = config

        object_storage_config = self.config.get("object_storage", {})
        object_storage_type = object_storage_config.get("type")
        if object_storage_type == "gcs":
            self.object_storage = GCSProvider(config=object_storage_config)
        else:
            raise ValueError(
                f"Unknown object storage type: {object_storage_type}")

        kafka_config = self.config.get("kafka", {})

        if ("ssl_cafile" in kafka_config
                and "ssl_access_certificate_file" in kafka_config
                and "ssl_access_key_file" in kafka_config):
            self.kafka_producer = kafka.KafkaProducer(
                bootstrap_servers=kafka_config["kafka_url"],
                security_protocol="SSL",
                ssl_cafile=kafka_config["ssl_ca_file"],
                ssl_certfile=kafka_config["ssl_access_certificate_file"],
                ssl_keyfile=kafka_config["ssl_access_key_file"],
            )
        else:
            self.kafka_producer = kafka.KafkaProducer(
                bootstrap_servers=kafka_config["kafka_url"], )
Esempio n. 3
0
    def publishKafkaData(self, jsonData):
        """ Publish data to Kafka topic """
        logging.debug("Executing sendStatistics.publishKafkaData")

        logging.info("Sending data to Kafka topic")
        logging.debug("Sending the following JSON to Kafka: %s" % (jsonData))

        kafkaError = False
        result = True
        #		logging.error("SEND TO KAFKA DISABLED")
        #		return result

        try:
            ssl_context = ssl.create_default_context(
                cafile=self.kafkaTrustCAFile)

        except FileNotFoundError:
            logging.error("SSL CA file for Kafka cant be found")
            ssl_context = None

        try:
            if self.kafkaSaslMechanism != None and self.kafkaSecurityProtocol != None:
                producer = kafka.KafkaProducer(
                    bootstrap_servers=self.kafkaBrokers,
                    security_protocol=self.kafkaSecurityProtocol,
                    sasl_mechanism=self.kafkaSaslMechanism,
                    ssl_context=ssl_context)
            else:
                producer = kafka.KafkaProducer(
                    bootstrap_servers=self.kafkaBrokers,
                    ssl_context=ssl_context)

            producer.send(topic=self.kafkaTopic,
                          value=jsonData.encode('utf-8'))

        except AssertionError as e:
            logging.error("Kafka Error: %s" % e)
            kafkaError = True

        except kafka.errors.NoBrokersAvailable:
            logging.error("None of the Kafka brokers configured are available")
            kafkaError = True

        except kafka.errors.UnrecognizedBrokerVersion:
            logging.error(
                "Unrecognized broker version. Are you connecting to a secure Kafka broker without the correct security protocols?"
            )
            kafkaError = True

        if kafkaError == True:
            result = False

        logging.debug("Executing sendStatistics.publishKafkaData - Finished")
        return result
Esempio n. 4
0
    def _create_producer(self) -> None:
        import kafka

        if self.security_protocol == "PLAINTEXT":
            self.producer = kafka.KafkaProducer(
                client_id=self.client_id,
                bootstrap_servers=self.url,
                value_serializer=lambda v: json.dumps(v).encode(
                    DEFAULT_ENCODING),
                security_protocol=self.security_protocol,
                ssl_check_hostname=False,
            )
        elif self.security_protocol == "SASL_PLAINTEXT":
            self.producer = kafka.KafkaProducer(
                client_id=self.client_id,
                bootstrap_servers=self.url,
                value_serializer=lambda v: json.dumps(v).encode(
                    DEFAULT_ENCODING),
                sasl_plain_username=self.sasl_username,
                sasl_plain_password=self.sasl_password,
                sasl_mechanism="PLAIN",
                security_protocol=self.security_protocol,
            )
        elif self.security_protocol == "SSL":
            self.producer = kafka.KafkaProducer(
                client_id=self.client_id,
                bootstrap_servers=self.url,
                value_serializer=lambda v: json.dumps(v).encode(
                    DEFAULT_ENCODING),
                ssl_cafile=self.ssl_cafile,
                ssl_certfile=self.ssl_certfile,
                ssl_keyfile=self.ssl_keyfile,
                ssl_check_hostname=False,
                security_protocol=self.security_protocol,
            )
        elif self.security_protocol == "SASL_SSL":
            self.producer = kafka.KafkaProducer(
                client_id=self.client_id,
                bootstrap_servers=self.url,
                value_serializer=lambda v: json.dumps(v).encode(
                    DEFAULT_ENCODING),
                sasl_plain_username=self.sasl_username,
                sasl_plain_password=self.sasl_password,
                ssl_cafile=self.ssl_cafile,
                ssl_certfile=self.ssl_certfile,
                ssl_keyfile=self.ssl_keyfile,
                ssl_check_hostname=self.ssl_check_hostname,
                security_protocol=self.security_protocol,
                sasl_mechanism="PLAIN",
            )
        else:
            raise ValueError(
                f"Cannot initialise `KafkaEventBroker`: "
                f"Invalid `security_protocol` ('{self.security_protocol}').")
Esempio n. 5
0
    def __init__(self, config: Dict[str, Any]) -> None:
        token = config['slack']['api_token']
        self.slack_client = BlockingSlackClient(token)

        self.destination_kafka_topic = config['kafka']['from_slack']['topic']
        self.kafka_producer = kafka.KafkaProducer(
            bootstrap_servers=config['kafka']['bootstrap_servers'], )
Esempio n. 6
0
    def __init__(self, client_id, host, port, ca_file, cert_file, key_file,
                 kafka_topic):
        # Set up kafka producer
        self.producer = kafka.KafkaProducer(
            bootstrap_servers="{}:{}".format(host, port),
            client_id=client_id,
            security_protocol="SSL",
            ssl_cafile=ca_file,
            ssl_certfile=cert_file,
            ssl_keyfile=key_file,
        )
        self.client_id = client_id
        self.kafka_topic = kafka_topic
        self.cpu_percentages = None
        self.memory_percentages = None

        # Get basic OS information
        self.mac_id = ':'.join(re.findall('..', '%012x' % uuid.getnode()))
        self.os_name = platform.system()
        self.os_release = platform.release()
        self.os_architecture = "{} ({})".format(platform.architecture()[0],
                                                platform.machine())
        self.os_version = platform.version()

        # Get information about processor and memory
        self.processor = platform.processor()
        self.cpu_physical_cores = psutil.cpu_count(logical=False)
        self.cpu_logical_cores = psutil.cpu_count(logical=True)
        self.total_ram = psutil.virtual_memory().total
        self.total_swap_space = psutil.swap_memory().total
Esempio n. 7
0
def a():
    # 正常发送,没有主动调用close是不会发送的,是强制退出
    producer = kafka.KafkaProducer(**conf)
    for i in range(3):
        producer.send('partopic', str(i).encode())
    logging.info('手动触发close----------')
    producer.close()
Esempio n. 8
0
def _main(camid, mjpg, kafka, topic):
    stream = urllib.urlopen(mjpg)
    bytes = ''

    while True:
        bytes += stream.read(1024)
        a = bytes.find('\xff\xd8')
        b = bytes.find('\xff\xd9')
        if a != -1 and b != -1:
            jpg = bytes[a:b + 2]
            bytes = bytes[b + 2:]
            frame = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8),
                                 cv2.IMREAD_COLOR)
            payload = {
                'camera_id': camid,
                'timestamp': int(time.time()),
                'rows': frame.shape[0],
                'cols': frame.shape[1],
                'type': 'uint8',
                'data': base64.b64encode(jpg)
            }

            producer = kafka_client.KafkaProducer(bootstrap_servers=kafka,
                                                  batch_size=512000,
                                                  api_version=(0, 10, 1))

            producer.send(topic, key=camid, value=json.dumps(payload))
def main(argv=sys.argv):
    """ Produce simulated data to the kafka for stress test

        Return: Void
    """
    producer = kafka.KafkaProducer(bootstrap_servers=PUBLIC_DNS)
    periodic_request(producer, TIME_PERIOD)
Esempio n. 10
0
def stopKafkaProducer():
	import kafka
	producer_id = "diskeyteststop"
	producer = kafka.KafkaProducer(bootstrap_servers='192.168.1.130:9092')
	producer.flush()	
	producer.close()
	return json.dumps({'status': 'OK stop producer','producer': producer_id})
Esempio n. 11
0
 def __init__(self, topic=settings.DEFAULT_TOPIC):
     log.debug("producer.init")
     self.topic = topic
     self.producer = kafka.KafkaProducer(
         value_serializer=lambda m: json.dumps(m).encode("utf-8"),
         bootstrap_servers=settings.KAFKA_SERVER)
     log.debug("producer.created")
Esempio n. 12
0
def producer(args):
    # connect to kafka
    producer = kafka.KafkaProducer(
        bootstrap_servers=args.kafka_brokers.split(","),
        partitioner=partitioner)

    # initialize packet capture
    capture = pcapy.open_live(args.interface, 65535, True, 3000)
    packet_count = 0

    # start packet capture
    while True:
        (pkt_hdr, pkt_raw) = capture.next()
        if pkt_hdr is not None:

            # send packet to kafka
            pkt_ts = timestamp(pkt_hdr)
            producer.send(args.topic, key=pack_ts(pkt_ts), value=pkt_raw)

            # debug messages, if needed
            packet_count += 1
            if args.debug > 0 and packet_count % args.debug == 0:
                print 'Sent Packet: count=%s dt=%s topic=%s' % (
                    packet_count, to_date(pkt_ts), args.topic)
                print to_hex(pkt_raw)
Esempio n. 13
0
 def __init__(self,
              hosts='127.0.0.1:9092',
              topic='default',
              timeout=None,
              compression=None,
              acks=1,
              retries=0,
              job_size=1048576,
              cafile=None,
              certfile=None,
              keyfile=None,
              crlfile=None):
     self._hosts = hosts
     self._topic = topic
     self._timeout = timeout
     self._logger = logging.getLogger('kq')
     self._producer = kafka.KafkaProducer(
         bootstrap_servers=self._hosts,
         compression_type=compression,
         acks=acks,
         retries=retries,
         max_request_size=job_size,
         buffer_memory=max(job_size, 33554432),
         ssl_cafile=cafile,
         ssl_certfile=certfile,
         ssl_keyfile=keyfile,
         ssl_crlfile=crlfile
     )
Esempio n. 14
0
 def test_create_producer(self):
     print("test_create_producer")
     is_exception = False
     config_file = "config.ini"
     config = configparser.ConfigParser()
     config.read(config_file)
     try:
         kafka_config = config["kafka"]
         topic = kafka_config["topic"]
         sleep_second = int(kafka_config["sleep"])
         producer_config = config["producer"]
         records = int(producer_config["records"])
     except KeyError as e:
         print("Config file missing section or key \"" + e.args[0] + "\"")
         is_exception = True
     try:
         producer = kafka.KafkaProducer(
             bootstrap_servers=kafka_config["host"] + ":" +
             kafka_config["port"],
             security_protocol="SSL",
             ssl_cafile=kafka_config["cafile"],
             ssl_certfile=kafka_config["certfile"],
             ssl_keyfile=kafka_config["keyfile"],
         )
     except KeyError as e:
         print("Kafka config missing key \"" + e.args[0] + "\"")
         is_exception = True
     self.assertEqual(False, is_exception)
Esempio n. 15
0
    def task_2():
        try:
            connection = psycopg2.connect(
                user="******",
                password="******",
                host="localhost",
                port="5432",
                database="test_aut",
            )
            cursor = connection.cursor()

            postgresSQL_select_Query = "select * from table_1"

            cursor.execute(postgresSQL_select_Query)
            user_record = cursor.fetchall()
            # print(user_record)
            producer = kafka.KafkaProducer(
                bootstrap_servers=["localhost:9092"],
                value_serializer=lambda x: dumps(x).encode("utf-8"),
            )

            for record in user_record:
                id_, name1, name2, name3, name4, name5 = record
                print(name1)
                data = {"name": name1}
                producer.send("trump", value=data)

        except (Exception, psycopg2.Error) as error:
            print("Error:::::", str(error))
        else:
            if connection:
                cursor.close()
                connection.close()
                print("connection closed")
Esempio n. 16
0
def face_submit():
    ans = {'status': 200, 'err_msg': ''}
    try:
        face_name1 = flask.request.form.get('face_name1')
        face_name2 = flask.request.form.get('face_name2')
        print(face_name1, face_name2)
        info = {
            'id': str(uuid.uuid1()),
            'face_name1': face_name1,
            'face_name2': face_name2,
            'status': 'unfinished',
            'owner': flask_login.current_user.id,
            'create_date': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        }
        info_str = json.dumps(info)
        producer = kafka.KafkaProducer(
            bootstrap_servers=[configs.app_kafka_host])
        future = producer.send(configs.app_kafka_topic,
                               key=bytes(configs.app_kafka_key,
                                         encoding='utf-8'),
                               value=bytes(info_str, encoding='utf-8'))
        producer.close()
        try:
            future.get(timeout=5)  # 监控是否发送成功
        except Exception as e:  # 发送失败抛出kafka_errors
            traceback.print_exc()
        ans['id'] = info['id']
        ans['face_name1'] = info['face_name1']
        ans['face_name2'] = info['face_name2']
    except Exception as e:
        ans['status'] = 500
        ans['err_msg'] = str(e)
    return ans
Esempio n. 17
0
 def connect(self, *args: Any, **kwargs: Any) -> kafka.KafkaProducer:
     if 'value_serializer' not in self.config and 'value_serializer' not in kwargs:
         kwargs['value_serializer'] = lambda x: json.dumps(x).encode('utf-8'
                                                                     )
     self.session = kafka.KafkaProducer(**self.config, **kwargs)
     self._closed = False
     return self.session
Esempio n. 18
0
def createProducer(**configOptions):
	"""This initializes a Kafka producer for use in the main function"""

	while(True):
		# Checking to see if the user wants to use a non-default address
		while(True):
			brokerId = input('Would you like to connect to a non-default broker address? su[Default: localhost:9092] (Y/N)')
			if(brokerId.lower().startswith('y')):
				brokerId = input("Please enter your broker address in the format 'host:port' ")
				break
			elif(brokerId.lower().startswith('n')):
				brokerId = 'localhost:9092'
				break

		# Configuring the producer with the necessary arguments
		# The value serializer is given a lambda that converts the sent data into bytes for submission to the cluster
		producer = kf.KafkaProducer(client_id="pii-generator", 
			bootstrap_servers=[brokerId], 
			value_serializer=lambda v: json.dumps(v).encode('utf-8'))

		# ! Make sure that this break statement is not cutting the creation of a producer off
		if(producer.bootstrap_connected()):
			print("Successfully connected to bootstrap server...")
			break

	return producer
Esempio n. 19
0
 def __init__(self,config_file):
     super().__init__()
     self.args = AlphaConfig.loadConfig( os.path.join( AlphaPathLookUp.ConfigPath, config_file ) )
     #self.is_ready = Event()
     self.producer=kafka.KafkaProducer(bootstrap_servers=self.args.producer["servers"],value_serializer= lambda m: json.dumps(m).encode('ascii') )
     self.consumer=kafka.KafkaConsumer(self.args.consumer["topic"], group_id=self.args.consumer["group_id"], 
         bootstrap_servers=self.args.consumer["servers"], value_deserializer= lambda m: json.loads(m.decode("ascii")))
Esempio n. 20
0
 def doPreinit(self, mode):
     if mode != SIMULATION:
         self._producer = kafka.KafkaProducer(
             bootstrap_servers=self.brokers,
             max_request_size=self.max_request_size)
     else:
         self._producer = None
Esempio n. 21
0
def main(consumer_topic=TOPIC,
         producer_topic=VALIDATION_TOPIC,
         boot_servers=BOOT_SERVERS):
    consumer = kafka.KafkaConsumer(consumer_topic,
                                   bootstrap_servers=boot_servers,
                                   auto_offset_reset="earliest",
                                   value_deserializer=_deserializer)
    producer = kafka.KafkaProducer(
        bootstrap_servers=boot_servers,
        value_serializer=lambda m: json.dumps(m).encode('utf-8'))

    def validate_data(request_id, result="success"):
        msg = {"request_id": request_id, "validation": result}
        producer.send(producer_topic, msg)

    for msg in consumer:
        data = msg.value
        if not data or not isinstance(data, dict):
            # smth is wrong, not processing
            continue
        reports = fetch_report(data["url"])
        if not reports:
            # couldn't fetch report data, discarding message
            validate_data(data.get("request_id"), result="handoff")
        else:
            LOG.info("Fetched and validated tgz from %s", data["url"])
            # data is valid, waving green flag
            validate_data(data.get("request_id"))
            # process data (upload to db in our case)
            for report in reports:
                res = store_in_db(report)
                LOG.info("Uploaded: %s", json.dumps(res).encode('utf-8'))
Esempio n. 22
0
def entry_point():
    params = parse_args()

    if params.cp == ConsumerProducer.PRODUCER:
        producer = kafka.KafkaProducer(
            bootstrap_servers=[params.addr],
            value_serializer=lambda x: json.dumps(x).encode('utf-8'))

        while True:
            for i in range(0, sys.maxsize):
                data = {'number': i}
                producer.send(params.topic, value=data)
                time.sleep(1)
    else:
        consumer = kafka.KafkaConsumer(
            params.topic,
            bootstrap_servers=[params.addr],
            group_id='my-group',
            value_deserializer=lambda x: json.loads(x.decode('utf-8')))

        for message in consumer:
            message = message.value
            print("message is %s" % message)

    pass
Esempio n. 23
0
    def run(self):
        i = 0

        try:
            producer = kafka.KafkaProducer(
                bootstrap_servers=
                'broker-0.broker.default.svc.cluster.local:9092')
        except kafka.errors.NoBrokersAvailable:
            time.sleep(30)
            self.run()

        while not self.stop_event.is_set():
            # Send a job in the kafka queue
            offset = i + 1
            requests.get(
                "http://pacman:80/metrics/" + str(offset)
            )  # Request pacman to tell him that a new job is in the queue
            producer.send('topic_1', ("ciao" + str(i)).encode())
            i += 1
            #w = random()
            #t = np.exp(w)
            b = 1.0
            t = np.random.exponential(b)
            time.sleep(t)

        producer.close()
Esempio n. 24
0
    def __init__(self, connection_info, advanced_info, topic_in, topic_out,
                 predictor, _type):
        self.connection_info = connection_info
        self.advanced_info = advanced_info
        self.predictor = predictor
        self.stream_in_name = topic_in
        self.stream_out_name = topic_out
        self.consumer = kafka.KafkaConsumer(
            **self.connection_info, **self.advanced_info.get('consumer', {}))
        self.consumer.subscribe(topics=[self.stream_in_name])
        self.producer = kafka.KafkaProducer(
            **self.connection_info, **self.advanced_info.get('producer', {}))
        self.admin = kafka.KafkaAdminClient(**self.connection_info)
        try:
            self.topic = NewTopic(self.stream_out_name,
                                  num_partitions=1,
                                  replication_factor=1)
            self.admin.create_topics([self.topic])
        except kafka.errors.TopicAlreadyExistsError:
            pass
        self._type = _type
        self.native_interface = NativeInterface()
        self.format_flag = 'explain'

        self.stop_event = Event()
        self.company_id = os.environ.get('MINDSDB_COMPANY_ID', None)
        self.caches = {}
        if self._type == 'timeseries':
            super().__init__(target=KafkaStream.make_timeseries_predictions,
                             args=(self, ))
        else:
            super().__init__(target=KafkaStream.make_prediction, args=(self, ))
def sendmessage(host, label, series, message):
    if kafka.__version__.startswith("0"):
        c = kafka.client.KafkaClient(hosts=[host])
        if series:
            p = kafka.producer.keyed.KeyedProducer(c)
        else:
            p = kafka.producer.simple.SimpleProducer(c)
    else:
        p = kafka.KafkaProducer(bootstrap_servers=host)
    success = False
    t = 0.2
    while not success:
        try:
            if kafka.__version__.startswith("0"):
                if series:
                    p.send_messages(label, series.encode("utf-8"),
                                    message.encode("utf-8"))
                else:
                    p.send_messages(label, message.encode("utf-8"))
            else:
                p.send(label,
                       key=series.encode("utf-8"),
                       value=message.encode("utf-8"))
                p.close()
            print("success")
            success = True
        except Exception as e:
            print("error (sleep {})".format(t), e)
            time.sleep(t)
            t *= 2
Esempio n. 26
0
    def __init__(self, *args):
        super(Srv, self).__init__(*args)
        self.ListenForEvent(serverApi.GetEngineNamespace(),
                            serverApi.GetEngineSystemName(),
                            'AddServerPlayerEvent', self, self.connection_made)
        self.ListenForEvent(serverApi.GetEngineNamespace(),
                            serverApi.GetEngineSystemName(),
                            'DelServerPlayerEvent', self, self.connection_lost)
        self.ListenForEvent(serverApi.GetEngineNamespace(),
                            serverApi.GetEngineSystemName(),
                            'LoadServerAddonScriptsAfter', self,
                            self.serve_forever)
        self.ListenForEvent(cfg.MOD_NAMESPACE, cfg.MOD_CLI_NAME, 'G_DEBUT',
                            self, self.debut)
        self.ListenForEvent(cfg.MOD_NAMESPACE, cfg.MOD_CLI_NAME, 'G_MATCH',
                            self, self.match)
        self.ListenForEvent(cfg.MOD_NAMESPACE, cfg.MOD_CLI_NAME, 'G_COURT',
                            self, self.rcall)

        self.CreateComponent(serverApi.GetLevelId(), 'Minecraft',
                             'game').OpenCityProtect()

        self.s = set()
        self._mapping = {}
        self._q = Queue()
        self._alive = True
        self._consumer = kafka.KafkaConsumer('duel',
                                             bootstrap_servers=KAFKA_SERVERS,
                                             value_deserializer=pickle.loads)
        self._producer = kafka.KafkaProducer(bootstrap_servers=KAFKA_SERVERS,
                                             value_serializer=pickle.dumps)

        redisPool.InitDB(10)
        mysqlPool.InitDB(20)
Esempio n. 27
0
    def _send_message(self, mes, topic):
        '''
        Send the message to next topic
        :param mes{map}: the message need to be sent
        :param topic{string}: the topic of the message
        '''
        self.logger.info(mes)
        self.logger.info(type(mes))
        self.logger.info(topic)


        try:
            mesg = str(json.dumps(mes)).encode('utf-8')
            # mesg = json.dumps(mes)
            producer = kafka.KafkaProducer(
                bootstrap_servers=self.kafka_cluster)  #
            if producer:
                producer.send(topic, mesg)
                self.logger.info("Send the message to next topic successully!")
                producer.close()
        except Exception:
            self.logger.error(
                "Errors occured while sending message to next topic")
            if producer:
                producer.close()
            raise
Esempio n. 28
0
 def Produce(self, topic, value):
     """ 生产消息
     topic:  主题
     value:  值, 二进制值, 如:b'12'
     """
     try:
         if isinstance(value, str):
             print('Produce invalid data: %s:%s' % (topic, value))
             value = value.encode('utf8')
         elif isinstance(value, bytes) is not True:
             print('Produce invalid data: %s:%s' % (topic, value))
             return
         err = None
         for i in range(3):
             try:
                 self.producer.send(topic, value)
                 return True
             except Exception as e:
                 err = e
                 self.producer = kafka.KafkaProducer(
                     bootstrap_servers=self.brokers)
         Log(LOG_ERROR, "KafkaProducer",
             '向kafka(%s)发送消息%s异常: %s' % (topic, value, err))
         return False
     except Exception as err:
         Log(LOG_ERROR, "KafkaProducer",
             '向kafka(%s)发送消息%s异常: %s' % (topic, value, err))
         return False
Esempio n. 29
0
 def start(self):
     try:
         self.producer = kafka.KafkaProducer(
             bootstrap_servers=self.bootstrap_servers)
     except kafka.errors.NoBrokersAvailable as err:
         log.error(err, exc_info=True)
         raise NapalmLogsException(err)
Esempio n. 30
0
def _main(base_word, kafka, topic):
    """TweePub reads tweets from Twitter Streaming API with provided
       characteristics and pushes them to specified Apache Kafka instance.
    """
    click.echo("Using base word: %s" % base_word)
    click.echo("Kafka bootstrap servers: %s" % kafka)
    producer = kafka_client.KafkaProducer(bootstrap_servers=kafka,
                                          value_serializer=str.encode)

    for _ in range(0, 100):
        hashtags = []

        for i in range(1, 16):
            if random.randint(1, i+1) == i:
                hashtags.append(base_word + "-" + str(i))

        click.echo("Pushing hashtags: %s" % hashtags)

        producer.send(topic, json.dumps(dict(
            entities=dict(
                hashtags=[
                    dict(text=ht) for ht in hashtags
                ]
            )
        )))
        time.sleep(5)

    producer.flush()