Beispiel #1
0
 def __init__(self, host, port, schema_path, topic, nbmsg, consumer_timeout):
     self.topic = topic
     self.nbmsg = nbmsg
     self.sent_msg = 0
     self.host = host
     self.port = port
     self.sent = [-100] * self.nbmsg
     self.rcv = [-100] * self.nbmsg
     self.runtag = str(random.randint(10, 100000))
     try:
         self.broker = KafkaClient("%s:%d" % (self.host, self.port))
     except:
         raise ValueError(
             "KafkaClient (%s:%d) - init failed" % (self.host, self.port))
     try:
         self.producer = SimpleProducer(self.broker)
     except:
         raise ValueError(
             "SimpleProducer (%s:%d) - init failed" % (self.host, self.port))
     try:
         self.consumer = SimpleConsumer(
             self.broker, "testbot", topic, iter_timeout=consumer_timeout)
     except:
         raise ValueError(
             "SimpleConsumer (%s:%d) - init failed" % (self.host, self.port))
     try:
         self.schema = avro.schema.parse(open(schema_path).read())
     except:
         raise ValueError(
             "Prod2Cons load schema (%s) - init failed" % (schema_path))
    def run(self):
        client = KafkaClient("localhost:9092")
        producer = SimpleProducer(client)

        ctime_start = int(time.mktime(time.strptime("30-12-2010 14:00", "%d-%m-%Y %H:%M")))

        for i in range(1):
            ctime_length = 60
            ctime_interval = 60*60

            print range(ctime_start,
                        ctime_start+ctime_length,
                        ctime_interval            )

            ctime_starts = [ctime_start]

            for i,photo_id in enumerate(GetPhotoIDs_batch_iter(ctime_starts,
                                                               interval=ctime_interval)):

                print i, ctime_start, photo_id
                producer.send_messages('flickr-photo_id-dist', photo_id)
                #print photo_id
                time.sleep(0.3)

            ctime_start += ctime_interval

            time.sleep(1)
Beispiel #3
0
    def run(self):
        client = KafkaClient(settings.KAFKA_SERVER)
        producer = SimpleProducer(client)

        while True:
            producer.send_messages('heartbeats', """{"id":"yes-is-a-fake-uuide", "status": 200, "serviceName":"chewit_cam" }""")
            time.sleep(5)
class KafkaMetrics(object):
    
    def __init__(self, broker_list, kafka_topic):
        logging.basicConfig(level=logging.INFO)
        self.log = logging.getLogger('druid-kafka-metrics')
        self.log.info("Kafka (brokers=%s, topic=%s)" %(broker_list, kafka_topic))
        client = KafkaClient(broker_list)
        self.producer = SimpleProducer(client)
        self.msg_count = 0
        self.kafka_topic = kafka_topic
        
    @cherrypy.expose
    @cherrypy.tools.json_out()
    @cherrypy.tools.json_in()
    def metrics(self):
        messages = cherrypy.request.json

        for message in messages:
            self.msg_count += 1
            self.log.debug("%s - %s" % (self.msg_count, str(message)))
            self.producer.send_messages(self.kafka_topic, json.dumps(message))

            if self.msg_count % 100 == 0 :
                self.log.info("%s messages processed." % (self.msg_count, ))

        return "{'code':200}"
Beispiel #5
0
    def run(self):
        #client = KafkaClient("localhost:9092")
        client = KafkaClient("kafka_host:9092")
        producer = SimpleProducer(client)

        while True:
            try:
              messages = []
              for i in xrange(1, 10):
                messageStr = SelfGeneratedMessage().asJson()
                logger.debug('Generated message: %s', messageStr)
                messages.append(messageStr)
            
              producer.send_messages('test', *messages)    
#            producer.send_messages('test', '{"publisher": "publisher-id", "time": "2015-11-03 15:03:30.352", "readings": [ 1, 1,1,1,1,1,1,1,1,1,1,1,4,3,3,3,32,2,1,1,1,1]}')

              time.sleep(1)
            except LeaderNotAvailableError as e:
              logging.exception('LeaderNotAvailableError')
              time.sleep(10)
            except KafkaUnavailableError as e:
              logging.exception('KafkaUnavailableError')
              time.sleep(30)
            except ConnectionError as e:
              logging.exception('ConnectionError')
              time.sleep(60)    
            except KafkaError as e:
              logging.exception('KafkaError')
              time.sleep(60) 
            except Exception as e:
              logging.exception('Exception')
              time.sleep(60) 
Beispiel #6
0
def run():
    db = getdb()

    writer = WriterProcess()
    writer.start()

    client = KafkaClient(config.KAFKA_HOST)
    producer = SimpleProducer(client)

    timings = []

    for i in range(1000, 2000):
        key = random_key()
        start = time.clock()
        message = IWAMessage(key, i)
        # print('Sending: %s' % message.dumps())
        producer.send_messages(config.KAFKA_TOPIC, message.dumps())
        # print('Queued: %s => %s' % (message.key, message.value))
        while True:
            with db.begin() as txn:
                if txn.get(key):
                    timings.append(time.clock() - start)
                    break

    writer.terminate()

    print('Average write time: %s' % (sum(timings) / len(timings)))
    print('Total write time: %s' % (sum(timings)))
class Producer():#threading.Thread):
	daemon = True

	# Initializes producer with commandline options
	def __init__(self,options):
		self.host = options.host
		self.port = options.port
		self.topic = options.topic
		self.logfile = options.logfile
		self.metadata = options.metadata
		self.batch_size = options.batch_size
		self.delay = options.delay
		self.pending_messages = []

	# Formats message to be sent to kafka
	def create_message_data(self,data):
		if self.metadata is not None:
			return "%s::%s" % (self.metadata, data)
		elif re.search("GET", data) != None:
			data = re.split('[ ,]', data)
			csv = data[0] + ' ' + data[1] + ',' + data[7] + ',' + data[9]
			return csv

	''' batch not currently working
	def flush_messages(self):
		global pending_messages
		print "flushing %d messages " % len(pending_messages)
		self.producer.send_messages(self.topic,pending_messages)
		pending_messages = []

	def send_to_kafka(self,message_text):
		global pending_messages
		pending_messages.append(message_text)
		if len(pending_messages) == self.batch_size:
			self.flush_messages(self.producer)
	'''

	def log_lines_generator(self, logfile, delay_between_iterations=None):
		global should_stop
		cmd = ['tail', '-n', '0', '-F']
		if delay_between_iterations is not None:
			cmd.append('-s')
			cmd.append(delay_between_iterations)
		cmd.append(logfile)
		process = subprocess.Popen(cmd,stdout=subprocess.PIPE,stderr=None)
		while not should_stop:
			line = process.stdout.readline().strip()
			yield line

	def run(self):
		self.client = KafkaClient(self.host + ':' + str(self.port))
		self.producer = SimpleProducer(self.client)

		try:
			for line in self.log_lines_generator(self.logfile):
				msg = self.create_message_data(line)
				self.producer.send_messages(self.topic, msg)

		except KeyboardInterrupt,e:
			pass
Beispiel #8
0
 def emit(self, container, event):
     try:
         producer = self.producers[event.evt_type]
     except KeyError:
         producer = SimpleProducer(self.client, event.evt_type)
         self.producers[event.evt_type] = producer
     producer.send_messages(json.dumps(event.serialize()))
Beispiel #9
0
def output_kafka(graph_db, registry,
                 kafka_url=None):
    ldict = {"step": MODULEFILE + "/" + inspect.stack()[0][3],
             "hostname": platform.node().split(".")[0]}
    l = logging.LoggerAdapter(common.fetch_lg(), ldict)
    kafka_topic = "cs"
    if kafka_url is None:
        kafka_url = registry.get_config("kafka_url",
                                        "localhost:9092")
    else:
        l.info("Updating registry with kafka_url: {}".format(kafka_url))
        registry.put_config("kafka_url",
                            kafka_url)
    (nodes, rels) = out.output_json(graph_db, None, None, as_list=True)
    l.info("Connecting to kafka_url {}".format(kafka_url))
    kafka = KafkaClient(kafka_url)
    # To send messages asynchronously
    producer = SimpleProducer(kafka)
    l.info("Sending nodes to kafka {}/{}".format(kafka_url, kafka_topic))
    for n in nodes:
        producer.send_messages(kafka_topic, n)
    l.info("Sending rels to kafka {}/{}".format(kafka_url, kafka_topic))
    for n in rels:
        producer.send_messages(kafka_topic, n)
    kafka.close()
def main():
    happy_log_probs, sad_log_probs = readSentimentList(
        'twitter_sentiment_list.csv')

    consumer = KafkaConsumer("tweets", bootstrap_servers=["localhost:9092"],
                             auto_offset_reset='smallest')

    kafka = KafkaClient("localhost:9092")
    producer = SimpleProducer(kafka)
    topic = 'hashtag_sentiment'

    positive_tags = Counter()
    negative_tags = Counter()

    while True:
        for message in consumer.fetch_messages():
            txt = message.value
            txt = re.sub(r'[^\x00-\x7F]', ' ', txt)

            hashtags, sentiment = classifySentiment(
                txt, happy_log_probs, sad_log_probs)

            for hashtag in hashtags:
                if sentiment > 0:
                    positive_tags[hashtag] += 1
                else:
                    negative_tags[hashtag] += 1

        results = {}
        for key, val in positive_tags.most_common(20):
            results[key] = val

        producer.send_messages(topic, json.dumps(results))
        time.sleep(10)
Beispiel #11
0
class KafkaConnector(object):

    def __init__(self, host_name, host_port):
        self.client = KafkaClient(host_name + ":" + host_port)
        self.producer = SimpleProducer(self.client)

    def create_topic(self, topic_name):
        topic_exists = self.client.has_metadata_for_topic(topic_name)
        if not topic_exists:
            self.client.ensure_topic_exists(topic_name)

    def send_message(self, topic_name, message):
        self.producer.send_messages(topic_name, message)

    def register_consumer(self, callback, parse_json, topic_group, topic_name):
        consumer = SimpleConsumer(self.client, topic_group, topic_name)
        consumer_thread = ConsumerThread(consumer, callback, parse_json)
        consumer_thread.start()

    def blocking_consumer(self, message_consume_function, parse_json, topic_group, topic_name):
        print "starting blocking consumer with topic group %s and topic name %s" % (topic_group, topic_name)
        consumer = SimpleConsumer(self.client, topic_group, topic_name)
        consumer.seek(0,2)

        for message in consumer:
            message = parse_json(message)
            print "=============" + str(message) + "============"
            message_consume_function(message)
            print "called message consume function"
def main():
    client = KafkaClient("localhost:9092")
    producer = SimpleProducer(client)

    for i in range(5):
        producer.send_messages('mytopic', "This is my test message, number {0}".format(i))
        time.sleep(1)
Beispiel #13
0
class Producer(threading.Thread):
    daemon = True

    def __init__(self, broker, topic, directory):
        threading.Thread.__init__(self)
        self.client = KafkaClient(broker)
        self.producer = SimpleProducer(self.client)
        self.message_dir = directory
        self.topic = topic

    def send_file(self, filename):
        print "Send:", filename
        with file(filename) as f:
            file_contents = f.read()
        self.producer.send_messages(self.topic, file_contents)

    def run(self):
        onlyfiles = [
            f for f in listdir(self.message_dir)
            if isfile(join(self.message_dir, f))
        ]
        for f in onlyfiles:
            filename = join(self.message_dir, f)
            self.send_file(filename)
            time.sleep(10)
class KafkaMetrics(object):
    def __init__(self, broker_list, kafka_topic):
        logging.basicConfig(level=logging.INFO)
        self.log = logging.getLogger('druid-kafka-metrics')
        self.log.info("Kafka (brokers=%s, topic=%s)" %
                      (broker_list, kafka_topic))
        client = KafkaClient(broker_list)
        self.producer = SimpleProducer(client)
        self.msg_count = 0
        self.kafka_topic = kafka_topic

    @cherrypy.expose
    @cherrypy.tools.json_out()
    @cherrypy.tools.json_in()
    def metrics(self):
        messages = cherrypy.request.json

        for message in messages:
            self.msg_count += 1
            self.log.debug("%s - %s" % (self.msg_count, str(message)))
            self.producer.send_messages(self.kafka_topic, json.dumps(message))

            if self.msg_count % 100 == 0:
                self.log.info("%s messages processed." % (self.msg_count, ))

        return "{'code':200}"
Beispiel #15
0
def main():
    global freq_array
    client = KafkaClient('ip-172-31-28-55.ec2.internal:6667')
    producer = SimpleProducer(client)

    fft_size = 1000
    fs = 92
    freq_array = np.array((1 * fs / fft_size))
    for i in range(2, int(fft_size / 2)):
        freq_i = np.array((i * fs / fft_size))
        freq_array = np.vstack((freq_array, freq_i))

    with open('xfourmag.csv', 'rt') as f:
        print('opening csv')
        reader = csv.reader(f)
        row = next(reader)
        #global mags
        mags = np.array(row)
        for row in reader:
            #mags += row
            mags = np.vstack((mags, row))
    #print(mags)
    #print(freq_array)

    json_data = {
        'time': int(time.time()),
        'fft': np.hstack((freq_array[0:31], mags[0:31])).tolist(),
        'sensor_id': '1',
        'reading_type': '0'
    }
    print('sending data...')
    producer.send_messages('shm', (json.dumps(json_data)).encode('utf-8'))
    print('data sent! :)')
Beispiel #16
0
 def forwarder(self):
     client = KafkaClient(hosts(self.server_list, self.kafka_port))
     client.ensure_topic_exists(self.topic_name)
     producer = SimpleProducer(client, batch_send=False)
     print producer
     for i in xrange(1, 100):
         with open(self.csvfile, 'r') as FR:
             fields = next(FR).strip().split('\t')
             print fields
             for cnc_log in FR:
                 values = cnc_log.strip().split('\t')
                 zipped = dict(zip(fields, values))
                 zipped['lower_bound'] = float(zipped['lower_bound'])
                 zipped['upper_bound'] = float(zipped['upper_bound'])
                 zipped['temperature'] = float(zipped['temperature'])
                 zipped['no'] = int(zipped['no'])
                 print json.dumps(zipped, sort_keys=True, indent=4)
                 # prob = 0.8
                 # y = lambda x, prob: '<span style="background-color:#bd362f; color:white">FAIL</span>' if randint(0,x) > x*prob  else 'PASS'
                 # cnc_log = (datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')+"\t"+y(10,0.8)+'\t'+cnc_log.strip()).split('\t')
                 # zipped = dict(zip(fields,cnc_log))
                 # node = zipped
                 sleep_sec = random.uniform(0, 3) * 5
                 time.sleep(sleep_sec)
                 producer.send_messages(self.topic_name, json.dumps(zipped))
def kafkahighlevelproducer(kafka_conn, schema, bytes):
    """
    kafka High level API
    """
    print "SimpleProducer start."


    writer =  cStringIO.StringIO()
    encoder = avro.io.BinaryEncoder(writer)
    #JSONEncoder is not implemented ..
    #encoder = avro.io.JSONEncoder(writer)
    datum_writer = avro.io.DatumWriter(schema)

    producer = SimpleProducer(kafka_conn)
    for topic in ["DUMMY_LOG"]:
        writer.truncate(0)
        datum_writer.write({"id": 100L, "logTime": 20140401L, "muchoStuff": {"test": "test1value"}}, encoder)
        bytes = writer.getvalue()
        producer.send_messages(topic, bytes)
        writer.truncate(0)
        datum_writer.write({"id": 101L, "logTime": 20140402L, "muchoStuff": {"test": "test2value"}}, encoder)
        bytes = writer.getvalue()
        producer.send_messages(topic, bytes)

    writer.close()

    print "SimpleProducer done."
Beispiel #18
0
def main():    
    global freq_array 
    client = KafkaClient('ip-172-31-28-55.ec2.internal:6667')
    producer = SimpleProducer(client)    
    
    fft_size=1000
    fs=92
    freq_array=np.array((1*fs/fft_size))
    for i in range(2,int(fft_size/2)):
        freq_i=np.array((i*fs/fft_size))
        freq_array=np.vstack((freq_array,freq_i))

    with open('xfourmag.csv', 'rt') as f:
        print('opening csv')
        reader=csv.reader(f)
        row = next(reader)
        #global mags
        mags = np.array(row)
        for row in reader:
            #mags += row
            mags = np.vstack((mags,row))
    #print(mags)
    #print(freq_array)	
    
    json_data = {'time': int(time.time()), 'fft': np.hstack((freq_array[0:31],mags[0:31])).tolist(), 'sensor_id': '1', 'reading_type': '0'}
    print('sending data...')
    producer.send_messages('shm', (json.dumps(json_data)).encode('utf-8')) 
    print('data sent! :)')
Beispiel #19
0
def seed_kafka(kafka_hosts=None, topic_name=None, num_emails=100000):
    """Seed the local Kafka cluster's "dumpmon" topic with sample email data."""
    topic_name = topic_name or "dumpmon"
    kafka_hosts = kafka_hosts or "127.0.0.1:9092"

    kafka = KafkaClient(kafka_hosts)
    producer = SimpleProducer(kafka)
    # producer = SimpleProducer(kafka, batch_send=True, batch_send_every_n=1000,
    #                           batch_send_every_t=5)

    print("Seeding Kafka ({}) topic '{}' with {:,} fake emails."
           .format(kafka_hosts, topic_name, num_emails))
    emails = random_email_generator()
    for i in range(num_emails):
        email = json.dumps(next(emails)).encode("utf-8", "ignore")
        producer.send_messages(topic_name, email)
    print("Done.")
    
    
    
    
    
    
    
    
Beispiel #20
0
class Producer(object):

    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = SimpleProducer(self.client)
        
    def produce_msgs(self):
        
        msg_cnt = 0
        while True:
            timestamp=rutil.randomDate("2015,10,01,00,00,00", "2015,10,06,23,59,00", random.random())
            phone = fake.phone_number()
            latdep,longdep=rutil.randomSingleLatLon(LATDEP, LONGDEP, RADIUS)
            latarr,longarr=rutil.randomSingleLatLon(LATARR, LONGARR, RADIUS)
            
            message_info = rutil.create_json_message(phone=phone,
                                           latdep=latdep,
                                           longdep=longdep,
                                           latarr=latarr,
                                           longarr=longarr,
                                           timestamp=timestamp,
                                           dr_flag=random.choice([0, 1]),
                                           user_id=random.choice(range(NUM_USERS)),
                                           message_id=msg_cnt)
            print 'message ' +str(msg_cnt) +' ---------- '+ message_info
            self.producer.send_messages('RideRequests',message_info)
            msg_cnt += 1
Beispiel #21
0
 def forwarder(self):
     client = KafkaClient(hosts(self.server_list, self.kafka_port))
     client.ensure_topic_exists(self.topic_name)
     producer = SimpleProducer(client, batch_send=False)
     print producer
     no = 1
     for i in xrange(1,10000):
         with open(self.csvfile, 'r') as FR:
             first_line = next(FR)
             print first_line
             fields = first_line.lstrip().rstrip().split('\t')
             print fields
             for cnc_log in FR:
                 print cnc_log
                 values = cnc_log.strip().split('\t')
                 zipped = dict(zip(fields,values))
                 zipped['lower_bound'] = float(zipped['lower_bound'])
                 zipped['upper_bound'] = float(zipped['upper_bound'])
                 zipped['spindle'] = float(zipped['spindle'])
                 # zipped['no'] = int(zipped['no'])
                 zipped['no'] = no
                 zipped['tool_no'] = int(zipped['tool_no'])
                 # zipped['tool_no'] = i
                 print json.dumps(zipped,sort_keys=True,indent=4)
                 sleep_sec = 1
                 time.sleep(sleep_sec)
                 producer.send_messages(self.topic_name, json.dumps(zipped))
                 no = no +1
Beispiel #22
0
    def __init__(self):
        comlog.init_logger("./../log/vdata.log")
        self.ub_conf = UbConfig("./../conf/log_tail.conf")
        self._conf_info = self.ub_conf.get_conf_info()
        self._file_path = self._conf_info["[LOG_FILE_CONF]"]["file_path"]
        self._file_name_pattern = self._conf_info["[LOG_FILE_CONF]"][
            "file_name_pattern"]
        self._log_max_length = int(
            self._conf_info["[LOG_FILE_CONF]"]["log_max_length"])
        self._batch_flush_counter = int(
            self._conf_info["[LOG_FILE_CONF]"]["batch_flush_counter"])
        self._topic_name = self._conf_info["[KAFKA]"]["topic_name"]
        self._interval_time = self._conf_info["[TIME_INTERVAL]"]["interval"]

        self.init_data_file()
        self.s = sched.scheduler(time.time, time.sleep)
        if self._conf_info["[KAFKA]"].has_key('broker_list'):
            self.broker_list = self._conf_info["[KAFKA]"]["broker_list"]
        elif self._conf_info["[KAFKA]"].has_key('zookeeper'):
            self.broker_list = ','.join(
                self.get_broker_list(self._conf_info["[KAFKA]"]['zookeeper']))
        else:
            raise ValueError, " zookeeper and broker_list are both null in config file"
        self.client = KafkaClient(self.broker_list)
        self.producer = SimpleProducer(self.client)

        #tags
        self.tags = {}
        for key in self._conf_info["[TAGS]"]:
            self.tags[key] = self._conf_info["[TAGS]"][key]
Beispiel #23
0
 def initialize(self, storm_conf, context):
     #TODO debug storm_conf, context
     self.client = InfluxDBClient("streamparse-box", 8086, "root", "root",
                                  "prices_development")
     kafka = KafkaClient(KAFKA_HOST)
     self.queue = SimpleProducer(
         kafka, batch_send=True, batch_send_every_n=1,
         batch_send_every_t=1)  # batching config can be tuned
Beispiel #24
0
    def run(self):
        client = KafkaClient("localhost:9092")
        producer = SimpleProducer(client)
        self.sent = 0

        while True:
            producer.send_messages('my-topic', self.big_msg)
            self.sent += 1
Beispiel #25
0
 def get_producer(self):
     """
     :return: SimpleProducer
     """
     if not self.producer:
         self.get_client()
         self.producer = SimpleProducer(self.client)
     return self.producer
Beispiel #26
0
 def __init__(self, topic, hosts=None, log_level=logging.WARNING):
     hosts = hosts or "localhost:9092"
     self.topic = "{}_{}".format("kafque", topic)
     self.client = KafkaClient(hosts)
     self.client.ensure_topic_exists(str(self.topic))
     self.producer = SimpleProducer(
         self.client, req_acks=SimpleProducer.ACK_AFTER_CLUSTER_COMMIT)
     self.logger = setup_logger(__name__, level=log_level)
Beispiel #27
0
def main():
    client = KafkaClient("localhost:9092")
    producer = SimpleProducer(client)

    for i in range(5):
        producer.send_messages('mytopic',
                               "This is my test message, number {0}".format(i))
        time.sleep(1)
 def run(self):
     avro_serde = AvroSerDe(AVRO_SCHEMA_STRING)
     client = KafkaClient('localhost:9092')
     producer = SimpleProducer(client)
     while True:
         raw_bytes = avro_serde.obj_to_bytes(get_login_event())
         producer.send_messages(KAFKA_TOPIC, raw_bytes)
         time.sleep(1)
Beispiel #29
0
 def __init__(self, settings):
     super(ImageConvertProcess, self).__init__(settings)
     self.kafka_client = KafkaClient(self.settings.get("KAFKA_HOSTS"))
     self.kafka_client.ensure_topic_exists(self.topic_name)
     self.producer = SimpleProducer(self.kafka_client)
     #self.lock = RLock()
     self.IC = ImageConvert(settings)
     self.IC.set_logger(self.logger)
Beispiel #30
0
    def run(self):
        client = KafkaClient("localhost:9092")
        producer = SimpleProducer(client)
        self.sent = 0

        while True:
            producer.send_messages("my-topic", self.big_msg)
            self.sent += 1
Beispiel #31
0
def sendTokafka(partitionData):
    record = partitionData
    Records  = []
    new_record = {'Data':record}
    Records.append(new_record)
    kafka1 =  KafkaClient("localhost:9092")
    producer1 = SimpleProducer(kafka1)
    producer1.send_messages("too",bytes(Records))
    return 0
Beispiel #32
0
def run(self):
    client = KafkaClient("localhost:9092")
    producer = SimpleProducer(client)
    self.sent = 0
    i=1
    while i<10:
        producer.send_messages('my-topic', self.big_msg)
        self.sent += 1
        i +=1
 def run(self):
     client = KafkaClient(self.bootstrap_server)
     producer = SimpleProducer(client)
     i = 0
     while True:
         d = datetime.now()
         producer.send_messages(self.topic, ("%s:%s" % (i, d)).encode('utf-8'))
         i += 1
         time.sleep(2)
Beispiel #34
0
    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = SimpleProducer(self.client)

        with open('AdReports/FB_Report/accessToken.txt', 'rb') as f:
            self.access_token_list = pickle.load(f)

        with open('AdReports/FB_Report/accountID.txt', 'rb') as d:
            self.account_id_list = pickle.load(d)
Beispiel #35
0
    def run(self):
        client = KafkaClient("localhost:9092")
        producer = SimpleProducer(client)

        while True:
            producer.send_messages('my-topic', "test")
            producer.send_messages('my-topic', "\xc2Hola, mundo!")

            time.sleep(1)
    def run(self):
        client = KafkaClient("10.206.216.13:19092,10.206.212.14:19092,10.206.209.25:19092")
        producer = SimpleProducer(client,async=True)

        while True:
            producer.send_messages('guantest', "test")
            producer.send_messages('guantest', "test")

            time.sleep(1)
 def __init__(self, broker_list, kafka_topic):
     logging.basicConfig(level=logging.INFO)
     self.log = logging.getLogger('druid-kafka-metrics')
     self.log.info("Kafka (brokers=%s, topic=%s)" %
                   (broker_list, kafka_topic))
     client = KafkaClient(broker_list)
     self.producer = SimpleProducer(client)
     self.msg_count = 0
     self.kafka_topic = kafka_topic
Beispiel #38
0
 def __init__(self, broker):
     try:
         self.client = KafkaClient(broker)
         self.prod = SimpleProducer(self.client)
     except KafkaUnavailableError:
         log.critical("\nCluster Unavailable %s : Check broker string\n",
                      broker)
         raise
     except:
         raise
Beispiel #39
0
    def run(self):
        client = KafkaClient("localhost:6379")
        producer = SimpleProducer(client)
        i=0
        while (i<5):
            a=askOrder()
            producer.send_messages(a.product, str(a.mode) + "," +str(a.volume) + "," + str(a.price))

            time.sleep(1)
            i=i+1
Beispiel #40
0
def _feed(settings_file, json_item):
    settings = importlib.import_module(settings_file[:-3])
    kafka_conn = KafkaClient(settings.KAFKA_HOSTS)
    topic = settings.KAFKA_INCOMING_TOPIC
    producer = SimpleProducer(kafka_conn)
    print "=> feeding JSON request into {0}...".format(topic)
    print json.dumps(json_item, indent=4)
    kafka_conn.ensure_topic_exists(topic)
    producer.send_messages(topic, json.dumps(json_item))
    print "=> done feeding request."
    def run(self):
        client = KafkaClient("172.17.8.101:9092")
        producer = SimpleProducer(client)
        global_counter = 0

        while True:
            dict = {'P_1': rand(), 'P_2': rand(), 'P_3': rand(), 'Q_1': rand(), 'Q_2': rand(), 'Q_3': rand(), 'timestamp': int(time.time())*1000, 'id': str(uuid.uuid4())};
            producer.send_messages('topic', json.dumps(dict))
            global_counter += 1
            print global_counter
Beispiel #42
0
 def __init__(self, factory, destination, message_size):
     self.factory = factory
     self.destination = destination
     self.producer = SimpleProducer(factory, self.destination)
     self.rate = PerfRate()
     self.message_size = message_size
     self.message = ""
     for i in range(message_size):
         self.message += "a"
     threading.Thread.__init__(self)
Beispiel #43
0
    def run(self):
        client = KafkaClient(settings.KAFKA_SERVER)
        producer = SimpleProducer(client)

        while True:
            producer.send_messages(
                'heartbeats',
                """{"id":"yes-is-a-fake-uuide", "status": 200, "serviceName":"chewit_cam" }"""
            )
            time.sleep(5)
Beispiel #44
0
    def run(self):
        client = KafkaClient("10.206.216.13:19092,10.206.212.14:19092,10.206.209.25:1909")
        producer = SimpleProducer(client)
        #producer = KeyedProducer(client,partitioner=HashedPartitioner)

        while True:
            producer.send_messages('jiketest', "test")
            producer.send_messages('jiketest', "test")

            time.sleep(1)
Beispiel #45
0
    def run(self):
        client = KafkaClient(
            "10.206.216.13:19092,10.206.212.14:19092,10.206.209.25:1909")
        producer = SimpleProducer(client)
        #producer = KeyedProducer(client,partitioner=HashedPartitioner)

        while True:
            producer.send_messages('guantest', "test")
            producer.send_messages('guantest', "test")

            time.sleep(1)
def main():
    kafka = KafkaClient("localhost:9092")

    producer = SimpleProducer(kafka)
    consumer = SimpleConsumer(kafka, "my-group", "activity.stream", max_buffer_size=None)

    producer.send_messages("activity.stream", "some message test")
    for message in consumer:
        print(message)

    kafka.close()
Beispiel #47
0
    def run(self):
        self.client = KafkaClient(settings.KAFKA_SERVER)
        self.producer = SimpleProducer(self.client)

        while True:
            data = {
                "id": str(uuid.uuid4()),
                "status": 200,
                "serviceName": settings.RIDE
            }
            self.producer.send_messages("heartbeats", json.dumps(data))
            time.sleep(5)
class Producer(object):
    """Kafka producer class with functions to send messages.

    Messages are sent to a single kafka topic "messages" as a json formatted
    string

    Attributes:
        client: string representing IP:port of the kafka broker
        producer: Producer object using the previously specified kafka client
        county_state_list: a list of lists containing the counties and their
            associated state abbreviation
    """
    def __init__(self, addr):
        """Initialize Producer with address of the kafka broker ip address."""
        self.client = KafkaClient(addr)
        self.producer = SimpleProducer(self.client)
        self.county_state_list = IngUt.parse_county_list('ingestion/county_list.txt')

    def sim_msg_stream(self, sleep_time=0.25):
        """Sends a stream of messages to the Kafka topic "messages".

        Args:
            sleep_time: float number in seconds representing the rate messages
                should be sent to the Kafka topic

        Returns:
            None
        """
        msg_cnt = 0

        while True:
            print len(self.county_state_list)
            county, state = IngUt.select_random_county(self.county_state_list)

            timestamp = list(time.localtime()[0:6])

            message_info = IngUt.create_json_message(county=county,
                                                     state=state,
                                                     rank=0,
                                                     timestamp=timestamp,
                                                     creator_id=random.randint(0, NUM_USERS-1),
                                                     sender_id=random.randint(0, NUM_USERS-1),
                                                     message_id=msg_cnt,
                                                     message=fake.text())

            self.producer.send_messages('messages', message_info)
            print timestamp

            if sleep_time != 0:
                time.sleep(sleep_time)

            msg_cnt += 1
    def feed(self, json_item):
        '''
        Feeds a json item into the Kafka topic

        @param json_item: The loaded json object
        '''
        topic = self.settings.KAFKA_INCOMING_TOPIC
        producer = SimpleProducer(self.kafka_conn)
        print "=> feeding JSON request into {0}...".format(topic)
        print json.dumps(json_item, indent=4)
        self.kafka_conn.ensure_topic_exists(topic)
        producer.send_messages(topic, json.dumps(json_item))
        print "=> done feeding request."
Beispiel #50
0
    def feed(self, json_item):
        '''
        Feeds a json item into the Kafka topic

        @param json_item: The loaded json object
        '''
        topic = self.settings.KAFKA_INCOMING_TOPIC
        producer = SimpleProducer(self.kafka_conn)
        print "=> feeding JSON request into {0}...".format(topic)
        print json.dumps(json_item, indent=4)
        self.kafka_conn.ensure_topic_exists(topic)
        producer.send_messages(topic, json.dumps(json_item))
        print "=> done feeding request."
Beispiel #51
0
def publish_training_data(kafka_server, kafka_topic, num_to_send, sleep_every_N, target_a, target_b):
    kafka = KafkaClient(kafka_server)
    producer = SimpleProducer(kafka)

    for c in range(num_to_send):
        x, y = util.generate_linear_x_y_data(1, target_a, target_b, c)

        # print "%d %.20f %.20f" % (int(round(time.time() * 1000)), x[0], y[0])
        producer.send_messages(
            kafka_topic,
            "%d %.5f %.5f %.20f %.20f" % (util.now_millis(), target_a, target_b, x[0], y[0]))
        if (c + 1) % sleep_every_N == 0:
            time.sleep(1)
Beispiel #52
0
class Producer():
    def __init__(self, server_list, kafka_port, topic_name):
        self.server_list = server_list
        self.kafka_port = kafka_port
        self.topic_name = topic_name
        self.client = KafkaClient(hosts(self.server_list, self.kafka_port))
        self.producer = SimpleProducer(self.client, batch_send=False)

    def ensure_topic_exists(self):
        self.client.ensure_topic_exists(self.topic_name)

    def forwarder(self, message):
        self.producer.send_messages(self.topic_name, message)
Beispiel #53
0
def executions(test=False, kafka_hosts=None, topic_name='executions', num_fills=1):
    """Seed random executions"""
    kafka_hosts = kafka_hosts or KAFKA_HOST
    kafka = KafkaClient(kafka_hosts)
    producer = SimpleProducer(kafka, batch_send=True, batch_send_every_n=1,
                              batch_send_every_t=1)
    fills = random_execution_generator()
    for i in range(num_fills):
        fill = json.dumps(next(fills)).encode("utf-8","ignore")
        print(fill)
        if test is False:
            producer.send_messages(topic_name, fill)
    print("Done.")
Beispiel #54
0
def queue_fills(test=False, kafka_hosts=None, topic_name='fills', num_fills=1):
    """Seed the local Kafka cluster's "fills" topic with sample fill data."""
    kafka_hosts = kafka_hosts or KAFKA_HOST
    kafka = KafkaClient(kafka_hosts)
    producer = SimpleProducer(kafka, batch_send=True, batch_send_every_n=1,
                              batch_send_every_t=1)
    fills = random_fill_generator()
    for i in range(num_fills):
        fill = json.dumps(next(fills)).encode("utf-8","ignore")
        print(fill)
        if test is False:
            producer.send_messages(topic_name, fill)
    print("Done.")
class KafkaProducer(KafkaBase, TerminatorBlock):
    """ A block for producing Kafka messages """

    version = VersionProperty("1.0.1")

    def __init__(self):
        super().__init__()
        self._producer = None

    def configure(self, context):
        super().configure(context)
        self._connect()

    def stop(self):
        self._disconnect()
        super().stop()

    def process_signals(self, signals, input_id='default'):
        msgs = []
        for signal in signals:
            if self.connected:
                try:
                    if type(signal) is not bytes:
                        signal = pickle.dumps(signal)
                except:
                    self.logger.exception(
                        "Signal: {0} could not be serialized".format(signal))
                    return
                msgs.append(signal)
            else:
                return

        try:
            if self.connected:
                self._producer.send_messages(self._encoded_topic, *msgs)
        except:
            self.logger.exception("Failure sending signal")

    def _connect(self):
        super()._connect()
        self._producer = SimpleProducer(self._kafka)

    def _disconnect(self):
        if self._producer:
            self._producer.stop()
            self._producer = None
        super()._disconnect()

    @property
    def connected(self):
        return super().connected and self._producer
Beispiel #56
0
def send_msg(msgs):
    cli = KafkaClient("localhost:9092")
    producer = SimpleProducer(cli)
    if isinstance(msgs, list):
        content = [(json.dumps(msg) if isinstance(msg, dict) else msg) for msg in msgs]
    else:
        content = [msgs]
    try:
        resp = producer.send_messages("tp_test1", *content)
        print resp
    except Exception:
        print traceback.format_exc()
    finally:
        cli.close()
Beispiel #57
0
class OSCollector(threading.Thread):
    path = None
    kafka = None
    producer = None
    kafkaHost = None  #'9.110.95.141:9092'
    hostname = None

    def __init__(self, kafkaHost):

        if kafkaHost is None:
            raise SyntaxError('Unknown kafka server')

        self.kafkaHost = kafkaHost

        threading.Thread.__init__(self)

        p = subprocess.Popen('hostname -f',
                             shell=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT)
        for line in p.stdout.readlines():
            self.hostname = line.lstrip().rstrip()
            break

        self.path = '/root/x.json'

        self.kafka = KafkaClient(self.kafkaHost)
        self.producer = SimpleProducer(self.kafka)

    def run(self):
        while True:
            cpu = self.get_cpu_usage()
            memory = self.get_mem_usage()

            message = {}
            message["node"] = self.hostname
            systemObject = {}
            systemObject["cpu"] = cpu
            systemObject["memory"] = memory
            message["system"] = systemObject

            try:
                print str(message)
                self.producer.send_messages("collector",
                                            str(json.dumps(message)))
            except Exception, e:
                print str(e)

            time.sleep(10)
Beispiel #58
0
def queue_fills(test=False, kafka_hosts=None, topic_name='fills', num_fills=1):
    """Seed the local Kafka cluster's "fills" topic with sample fill data."""
    kafka_hosts = kafka_hosts or KAFKA_HOST
    kafka = KafkaClient(kafka_hosts)
    producer = SimpleProducer(kafka,
                              batch_send=True,
                              batch_send_every_n=1,
                              batch_send_every_t=1)
    fills = random_fill_generator()
    for i in range(num_fills):
        fill = json.dumps(next(fills)).encode("utf-8", "ignore")
        print(fill)
        if test is False:
            producer.send_messages(topic_name, fill)
    print("Done.")