def sendTwitterTweetsToKafka(body):
  producer = KafkaProducer(bootstrap_servers="10.0.0.9:9092, 10.0.0.12:9092, 10.0.0.6:9092, 10.0.0.14:9092")
  resp = producer.send('marsstreaming', body)
  producer.flush()
  return resp
Example #2
0
from kafka import KafkaProducer
import random
import json
from json import dumps
from datetime import datetime

bootstrap_servers = ['localhost:9091', 'localhost:9092', 'localhost:9093']
# topicName = 'my-topic-three'
topicName = 'my-topic-msg02-sem-flush'

# producer = KafkaProducer(bootstrap_servers = bootstrap_servers)

# configuração do kafka
producer = KafkaProducer(bootstrap_servers=bootstrap_servers,
                         value_serializer=lambda x: dumps(x).encode('utf-8'))

# producer.send(topicName, b'Hello World!')
# Call the producer.send method with a producer-record
print("Ctrl+c to Stop")
total = 0
while True:
    total += 1
    # print(random.randint(1,999))
    # print(total)
    # producer.send('topicName', str(random.randint(1,999)).encode())
    data_e_hora_completa = datetime.now()
    data_string = data_e_hora_completa.strftime('%Y-%m-%d %H:%M:%S')
    msg = 'Olá pessoas, agora sem flush e com o consumer desligado!!!'
    dados = {"msg": str(msg), "horario": data_string, "total": total}
    # producer.send(topicName, b'Hello World!')
    print(dados)
from kafka import KafkaProducer
import threading
import metrics
import json

#JoeBiden id: 939091
#realDonaldTrump id: 25073877

# Twitter API access keys
access_token = "1323451448781238274-ARnzAE9Jr9T4KjiruQSqIJM4WnAiCk"
access_token_secret = "KfMietdsdxqRqKiEVVEJdKypfGbrWjFaE9Y92Y1ij4ilk"
consumer_key = "doz9ZltVGwMmlg2PTptGcoMLd"
consumer_secret = "0BrrMlwj443opgxpjkowqJ18vqrwLnHUaY1eooYTvL4xZmtfFJ"
# Topic and producer
topic = "default_topic"
producer = KafkaProducer(bootstrap_servers='localhost:9092')


class TwitterAuthenticator():
    def authenticate_twitter_app(self):
        auth = OAuthHandler(consumer_key, consumer_secret)
        auth.set_access_token(access_token, access_token_secret)
        return auth


class TwitterStreamProducer():
    def __init__(self, topic_name):
        global topic
        topic = topic_name
        self.twitter_authenticator = TwitterAuthenticator()
        self.metrics = metrics.Metrics(producer=producer)
Example #4
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 22 16:44:22 2021

@author: Saba Kiriako
"""

import json
from kafka import KafkaConsumer, KafkaProducer

stations = {}
consumer = KafkaConsumer("velib-stations", bootstrap_servers='localhost:9092')
host = "9092"
producer = KafkaProducer(bootstrap_servers="localhost:" + str(host))
topic_name = "stations-status"

statuses = {}
for message in consumer:
    i = 0
    stations = json.loads(message.value.decode())
    for station in stations:
        key = "{}-{}-{}".format(station["number"], station["name"],
                                station["contract_name"])
        status = station["available_bikes"]
        if key not in statuses.keys():
            statuses[key] = status
        else:
            if status != statuses[key]:
                print("Current Status :" + str(status) + " Previous Status :" +
                      str(statuses[key]))
Example #5
0
from kafka import KafkaProducer
from kafka.errors import KafkaError
from setting import log

producer = KafkaProducer(bootstrap_servers=['broker1:1234'])

# Asynchronous by default
future = producer.send('my-topic', b'raw_bytes')

# Block for 'synchronous' sends
try:
    record_metadata = future.get(timeout=10)
except KafkaError:
    # Decide what to do if produce request failed...
    log.exception()
    pass

# Successful result returns assigned partition and offset
print(record_metadata.topic)
print(record_metadata.partition)
print(record_metadata.offset)

# produce keyed messages to enable hashed partitioning
producer.send('my-topic', key=b'foo', value=b'bar')

# encode objects via msgpack
producer = KafkaProducer(value_serializer=msgpack.dumps)
producer.send('msgpack-topic', {'key': 'value'})

# produce json messages
producer = KafkaProducer(
Example #6
0
from kafka import KafkaProducer, KafkaConsumer
from kafka import KafkaClient, SimpleConsumer
import time
import sys
from app import app
from flask import Flask, render_template, request, redirect, Response
import random, json

SEND_TIME = None
print("At top of function \n\n")
RECEIVE_TIME = None

bs = ['54.218.73.149:9092','50.112.197.74:9092','34.222.135.111:9092']
PRODUCER = KafkaProducer(bootstrap_servers=bs)
CLIENT = KafkaClient(bs)



@app.route('/')
def home():
    return render_template('setuser.html')


@app.route('/<user>')
def serve_user(user):
    consumer = SimpleConsumer(CLIENT, 'testing', 'user{}_sess{}'.format(user,user))
    msg = None
    msg = consumer.get_message()
    RECEIVE_TIME = time.time()
    color='yellow'
Example #7
0
from xmlrpc.server import SimpleXMLRPCServer
import requests

from time import sleep
from json import dumps
from kafka import KafkaProducer

sleep(20)

producer = KafkaProducer(bootstrap_servers=['localhost:9092'],
                         value_serializer=lambda x: dumps(x).encode('utf-8'))

# for e in range(1000):
#     data = {'number' : e}
#     producer.send('numtest', value=data)
#     sleep(5)

pollServer = SimpleXMLRPCServer(('0.0.0.0', 9000),
                                logRequests=True,
                                allow_none=True)

# ROCKETS_STATES_BASE_URL = "http://localhost:5000"
# ELON_URL = "http://localhost:8000/"
# TORY_URL = "http://localhost:3000/"


def getResponsesPoll(siteName, rocketName):
    print("-----------------------------------")
    print(siteName + "\n" + rocketName)
    data = {'siteName': siteName, 'rocketName': rocketName}
    producer.send('Pollrequesttopic', value=data)
Example #8
0
 def __init__(self):
     # localhost:9092 = Default Zookeeper Producer Host and Port Adresses
     self.producer = KafkaProducer(bootstrap_servers=['localhost:9092'])
Example #9
0
    stream.map(pair).reduceByKey(lambda a, b: (a[0] + b[0], a[1] + b[1])).map(
        lambda (k, v): (k, v[0] / v[1])).foreachRDD(send_to_kafka)


if __name__ == '__main__':
    if len(sys.argv) != 4:
        print("Usage: stream-process.py [topic] [target-topic] [broker-list]")
        exit(1)

    # - create SparkContext and StreamingContext
    sc = SparkContext("local[2]", "StockAveragePrice")
    sc.setLogLevel('INFO')
    ssc = StreamingContext(sc, 5)

    topic, target_topic, brokers = sys.argv[1:]

    # - instantiate a kafka stream for processing
    directKafkaStream = KafkaUtils.createDirectStream(
        ssc, [topic], {'metadata.broker.list': brokers})
    process_stream(directKafkaStream)

    # - instantiate a simple kafka producer
    kafka_producer = KafkaProducer(bootstrap_servers=brokers)

    # - setup proper shutdown hook
    atexit.register(shutdown_hook, kafka_producer)

    ssc.start()
    ssc.awaitTermination()
Example #10
0
from kafka import KafkaProducer
import sys
import time
import datetime
import pandas as pd
import json

producer = KafkaProducer(bootstrap_servers=['kafka1:9092'])

topic_name = "streamKafka"
print("sending messages to topic:" + str(topic_name))

#Compatible with logstash
for chunck_df in pd.read_csv("../Data/miniTraffic.csv", chunksize=100):
    for index, point in chunck_df.iterrows():
        # Los puntos no funcionan bien en las columnas de spark
        #point.index = [x.replace(".","_") for x in point.index]
        point['location'] = [
            point['coordinates_long'], point['coordinates_lat']
        ]
        point['dateSend'] = datetime.datetime.now()
        msg = point.to_json()
        msg2 = json.loads(msg, encoding='utf-8')
        msgJson = json.dumps(msg2)
        producer.send(str(topic_name), bytes(msgJson + '\n', 'utf-8'))
        time.sleep(0.0016)
producer.close()
Example #11
0
from kafka import KafkaConsumer, KafkaProducer
import time
import json
from MyConsumer import *
from log import KafkaLog
from conf.getConf import *

consumer = KafkaConsumer('testyg',
                         group_id="test_group_1",
                         bootstrap_servers=['10.4.10.239:9092'],
                         enable_auto_commit=False)
producer = KafkaProducer(bootstrap_servers='10.4.10.239:9092')  # 连接kafka
clintInf = ClientInf(0, 18)
log = KafkaLog()
localHost = getLocalhost()
pwd = os.path.dirname(__file__)

with open(pwd + '/imgs/label/imagenetLabel.json') as f:
    labels = json.load(f)


def class_id_to_label(i):
    return labels[i]


def dataConsume():
    cnt = 0
    for msg in consumer:
        #     recv = "%s:%d:%d: key=%s value=%s" % (msg.topic, msg.partition, msg.offset, msg.key, msg.value)
        #
        #     print(recv)
        trace[AUX_VALID_TRACE] = 1
    return trace


def print_active_connections():
    print("############################")
    print(active_connections)


def get_active_connections():
    return active_connections


consumer = KafkaConsumer('sniffer', bootstrap_servers=['10.40.39.22:1025'])

producer = KafkaProducer(bootstrap_servers='10.40.39.22:1025')

print('Started Consumer')
for message in consumer:
    package_info = message.value.decode('utf-8').replace("[", "").replace(
        "]", "").replace('"', "").replace("'", "").split(' ')
    parsed_trace, trace, protocol, total_active_connections = update_active_connections(
        package_info)
    if parsed_trace is not None:
        new_parsed_trace = np.array([])
        for i in range(len(parsed_trace)):
            new_parsed_trace = np.append(new_parsed_trace,
                                         str(parsed_trace[i]))
        new_parsed_trace = np.append(new_parsed_trace, str(protocol))
        producer.send('connections', np.array_str(new_parsed_trace).encode())
        print(new_parsed_trace)
Example #13
0
    'https://polyglot-academy-pub.nyc3.digitaloceanspaces.com/liquor-ml')
bucket_source_name = bucket_source.split('/')[-1]
bucket_destination = os.getenv('BUCKET_BASE_NAME', 'liquor-images')

# Helper database
db_user = os.getenv('DATABASE_USER', 'liquorlab')
db_password = os.getenv('DATABASE_PASSWORD', 'liquorlab')
db_host = os.getenv('DATABASE_HOST', 'liquorlabdb')
db_db = os.getenv('DATABASE_DB', 'liquorlabdb')

# Delay between images
seconds_wait = float(os.getenv('SECONDS_WAIT', 2))

# Kakfa producer
producer = KafkaProducer(
    bootstrap_servers='my-cluster-kafka-bootstrap:9092',
    value_serializer=lambda v: json.dumps(v).encode('utf-8'))


########
# Code #
########
def copy_file(source, image_key, destination, image_name):
    """Copies an object from a URL source to a destination bucket."""

    image_url = source + '/' + image_key
    req_for_file = requests.get(image_url, stream=True)

    # Init File-like object (to be used by upload_fileobj method)
    file_object_from_req = req_for_file.raw
Example #14
0
def send_topic(text):
    producer = KafkaProducer(
        bootstrap_servers='localhost:9092',
        value_serializer=lambda v: json.dumps(v).encode('utf-8'))
    producer.send('nifi', key=b'text', value=text)
Example #15
0
    scrape_config_file = 'scrape.yml'
    reddit_config = {
        k: os.environ[k]
        for k in
        ['client_id', 'client_secret', 'password', 'user_agent', 'username']
    }
    kafka_config = {k: os.environ[k] for k in ['host', 'port', 'topic']}
    subreddit_names = os.environ['subreddits'].split(',')
    print(f'kafka_config: {kafka_config}')
    print(f'reddit_config: {reddit_config}')
    print(f'subreddit_names: {subreddit_names}')
    time.sleep(5)

    producer = KafkaProducer(
        api_version=(2, 6),
        bootstrap_servers=f'{kafka_config["host"]}:{kafka_config["port"]}',
        value_serializer=lambda v: json.dumps(v).encode('utf-8'))
    topic = kafka_config["topic"]
    reddit = praw.Reddit(**reddit_config)
    thread_list = []
    for subreddit_name in subreddit_names:
        thread = threading.Thread(target=print_submissions,
                                  group=None,
                                  args=(reddit, subreddit_name,
                                        kafka_submission),
                                  daemon=True)
        thread_list.append(thread)
        thread.start()
    for thread in thread_list:
        thread.join()
Example #16
0
# in the file data/stream_apple.json

import tweepy
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import time
import argparse
import string
import config
import json

from kafka import KafkaProducer
import time

producer = KafkaProducer(bootstrap_servers='localhost:9092')
producer = KafkaProducer(
    value_serializer=lambda v: json.dumps(v).encode('utf-8'))


def get_parser():
    """Get parser for command line arguments."""
    parser = argparse.ArgumentParser(description="Twitter Downloader")
    parser.add_argument("-q",
                        "--query",
                        dest="query",
                        help="Query/Filter",
                        default='-')
    parser.add_argument("-d",
                        "--data-dir",
                        dest="data_dir",
Example #17
0
 def __init__(self, host: str, port: int) -> None:
     h = f'{host}:{port}'
     self.kf_producer = KafkaProducer(bootstrap_servers=h)
import csv
from configparser import ConfigParser

config = ConfigParser()
config.read('config.ini')

TOPIC = config.get('kafka','topic')
BOOTSTRAP_SERVERS = config.get('kafka','bootstrap_servers')
FILEPATH = config.get('kafka','csv_filepath')





#creating producer object for ingest data to kafka topic
producer=KafkaProducer(bootstrap_servers=BOOTSTRAP_SERVERS,api_version=(0,10,1))

if __name__ == "__main__":
    #ingesting data as json into kafka topic
    with open(FILEPATH,'r') as csv_file:
        csv_reader=csv.reader(csv_file,delimiter=',')
        next(csv_reader)
        msg={}
        for row in csv_reader:
            msg['Invoice']=row[0]
            msg['StockCode']=row[1]
            msg['Description']=row[2]
            msg['Quantity']=row[3]
            msg['InvoiceDate']=row[4]
            msg['Price']=row[5]
            msg['CustomerID']=row[6]
Example #19
0
 def __init__(self, bootstrap_servers, topic):
     self.bootstrap_servers = bootstrap_servers
     self.topic = topic
     self.producer = KafkaProducer(bootstrap_servers=bootstrap_servers)
Example #20
0
 def _setup_kafka_producer(self, bootstrap_servers):
     return KafkaProducer(bootstrap_servers=bootstrap_servers)
Example #21
0
from time import sleep
from json import dumps
from kafka import KafkaProducer

producer = KafkaProducer(bootstrap_servers=['localhost:9092'],
                         value_serializer=lambda m: dumps(m).encode('ascii'))

for e in range(1000):
    data = {'number': e}
    print(data)
    producer.send('test', value=data)
    sleep(5)
Example #22
0
def create_kafkaProducer():
    return KafkaProducer(bootstrap_servers=kafkaServerEndPoint,
                         sasl_mechanism=kafkaAuth['sasl_mechanism'],
                         sasl_plain_username=kafkaAuth['sasl_plain_username'],
                         sasl_plain_password=kafkaAuth['sasl_plain_password'])
Example #23
0
def create_producer(broker_ids):
    producer = KafkaProducer(bootstrap_servers=broker_ids)
    return producer
parser.add_argument('id', type=str)
parser.add_argument('user_id', type=str)
parser.add_argument('timestamp', type=lambda x: dateutil.parser.parse(x))
parser.add_argument('creation_timestamp',
                    type=lambda x: dateutil.parser.parse(x))
parser.add_argument('modified_timestamp',
                    type=lambda x: dateutil.parser.parse(x))
parser.add_argument('type', type=str)
parser.add_argument('data', type=dict)

basePath = Path(__file__).parent.parent / 'kafka_auth'
producer = KafkaProducer(
    bootstrap_servers=['kafka-demo-parametrix-b70f.aivencloud.com:12744'],
    value_serializer=lambda x: json.dumps(x).encode('utf-8'),
    security_protocol="SSL",
    ssl_cafile=basePath / "ca.pem",
    ssl_certfile=basePath / "service.cert",
    ssl_keyfile=basePath / "service.key",
    api_version=(2, 5),
)


class EventApi(Resource):
    @marshal_with(event_fields)
    def get(self, event_id):
        return Event.objects().get(id=event_id)

    @marshal_with(event_fields)
    def delete(self, event_id):
        return Event.objects().get(id=event_id)
Example #25
0
# Attempt connection
try:
    ucscauthlogindb.connect()
    print("Connected!")
except Exception as ex:
    raise Exception(ex)

# Connect KafkaProducer
kafkaserver = [
    "itsec-prod-elk-3.ucsc.edu:9092", "itsec-prod-elk-8.ucsc.edu:9092",
    "itsec-prod-elk-9.ucsc.edu:9092"
]
topic = 'secinc'
try:
    kproducer = KafkaProducer(bootstrap_servers=kafkaserver)
except Exception as ex:
    raise Exception(ex)

# Set up time for data pull
nowDate = datetime.datetime.today().strftime("%Y-%m-%d %H:%M:%S")
pastDate = (datetime.datetime.today() -
            datetime.timedelta(hours=24)).strftime("%Y-%m-%d %H:%M:%S")

authenticationsources = ['shibboleth', 'google', 'vpn']

# Loop through each authentication source and pull records related to potential compromises
for authsource in authenticationsources:

    # Run query for the authentication source
    results = ucscauthlogindb.getUserLoginsForAuthsource(
 def __init__(self):
     self.producer = KafkaProducer(bootstrap_servers='192.168.100.122',
                                   value_serializer=lambda v: json.dumps(v).encode('utf-8'))
Example #27
0
File: Kafka.py Project: kenser/one
#!/usr/bin/python3
# yum -y install python36-setuptools
# easy_install-3.6 pip
# pip install kafka kafka-python

from kafka import KafkaProducer
from kafka import KafkaConsumer

#Producer
producer = KafkaProducer(bootstrap_servers='127.0.0.1:9092')
future = producer.send('TopicName', b'hello_world')
result = future.get(timeout= 10)
#print(result)

#Consumer
consumer = KafkaConsumer('TopicName', group_id='GroupID', bootstrap_servers=['127.0.0.1:9092'])
for msg in consumer:
    print(msg)
Example #28
0
 def __init__(self):
     self._producer = KafkaProducer(
         bootstrap_servers=config.CONFIG['kafka_broker'] + ":" +
         config.CONFIG['port'])
Example #29
0
from kafka import KafkaProducer
from time import sleep
import parsApi
import json

bootstarpServer = ['localhost:9092']

producer = KafkaProducer(
    bootstrap_servers=bootstarpServer,
    value_serializer=lambda v: json.dumps(v).encode('utf-8'))

weather_data = parsApi.get_Api(city='london')

data_weather_forcast_5day = parsApi.get_date_weatherstatus_tmp_windspeed(
    weather_data)

for e in range(len(data_weather_forcast_5day)):
    temporary_data = data_weather_forcast_5day[e]
    print('done')
    producer.send('weather_forcast', temporary_data)
    sleep(3)

producer.close()
from inferencer.YAMnet import YAMnet
from nivelDeRuido.nivelRuido import NivelRuido
from resources.recorder import recorder
from resources.deviceInfo import deviceInfo

logging.getLogger().setLevel(logging.INFO)

device = deviceInfo()
info = device.getInfoObj()
inferencer = YAMnet()
nivel_ruido = NivelRuido()
recorder = recorder(dirname='./', time=10)
inferencer_identifier = uuid.uuid4().__str__()

producer = KafkaProducer(
    bootstrap_servers=[os.environ["KAFKA_BOOTSTRAP_SERVER_ONE"]],
    value_serializer=lambda x: dumps(x).encode("utf-8"))

try:

    while True:

        filename = "raspberry-" + uuid.uuid1().__str__()
        ruta = recorder.record(id=filename)
        now = datetime.now(tz=tz.tzutc())
        date_time = now.strftime("%Y-%m-%dT%H:%M:%S")
        info['audio_uuid'] = filename
        info['time'] = date_time
        start = datetime.now()
        inferencer_result = inferencer.run_inferencer(filename)
        Leq = nivel_ruido.calcular_db(filename)