topic = client.topics[args.topic]
consumer = topic.get_simple_consumer(auto_commit_enable=True,
                                     consumer_group=args.group,
                                     auto_offset_reset=OffsetType.EARLIEST,
                                     reset_offset_on_start=False)

sys.stderr.write("connected to kafka\n")

killer = KillerMonitor(args.timeout)
killer.setDaemon(True)
killer.start()
sys.stderr.write(
    "Monitor will kill application if unable to process a message for %d seconds\n"
    % args.timeout)

webserver = WebServer(args.http_port)
webserver.setDaemon(True)
webserver.start()
sys.stderr.write("Web server running on port %d\n" % args.http_port)

if args.verbose:
    sys.stdout.write("\n")
oldtime = time.time()
file_path = None
f_all = None
for message in consumer:
    if message is not None:
        (timestamp, source, data) = message.value.split('|', 3)
        if source.startswith("20"):  # wrong order on some early belmullet
            tmp = timestamp
            timestamp = source
Ejemplo n.º 2
0
conn = pymssql.connect(server=args.db_server, user=args.db_user, database=args.db_name, password=args.db_password,autocommit=True)
client = KafkaClient(hosts="kafka01:9092,kafka02:9092,kafka03:9092")
topic = client.topics[instrument_id]
consumer = topic.get_simple_consumer(auto_commit_enable=False,
                                     consumer_group="ysi2mssql_v1", 
                                     auto_offset_reset=OffsetType.EARLIEST,
                                     reset_offset_on_start=False)

sys.stderr.write("connected to kafka\n")

killer = KillerMonitor(args.timeout)
killer.setDaemon(True)
killer.start()
sys.stderr.write("Monitor will kill application if unable to process a message for %d seconds\n" % args.timeout)

webserver = WebServer(args.http_port)
webserver.setDaemon(True)
webserver.start()
sys.stderr.write("Web server running on port %d\n" % args.http_port)

if args.verbose:
    sys.stdout.write("\n")
for message in consumer:
   if message is not None:
        values = message.value.split(',')
        if(len(values) >= 11) and all(is_number(i) for i in values[2:10]):

            (clock_date, clock_time, temp, cond, salinity, depth, ph, ph_mv, turbidity, do_mgl, battery,dummy) = values
            (dd,mm,yyyy) = clock_date.split('/')
            timestamp = "{0}-{1}-{2}T{3}Z".format(yyyy,mm,dd,clock_time)
            with conn.cursor() as cursor:
Ejemplo n.º 3
0
parser.add_argument('--kafkacat',
                    default='/usr/local/bin/kafkacat',
                    help='path to kafkacat')
args = parser.parse_args()

folder = '/home/gcouser/adcp'
device = args.source

killer = KillerMonitor(args.timeout)
killer.setDaemon(True)
killer.start()
sys.stderr.write(
    "Monitoring %s:%d will kill application if unable to process a message for %d seconds\n"
    % (args.server, args.port, args.timeout))

webserver = WebServer(args.http_port)
webserver.setDaemon(True)
webserver.start()
sys.stderr.write("Web server running on port %d\n" % args.http_port)

s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((args.server, args.port))
fcntl.fcntl(s, fcntl.F_SETFL, os.O_NONBLOCK)

chunks = []
nodata = True
f = None
filename = None
first_time = True

Ejemplo n.º 4
0
import argparse
import subprocess
import os
from midas import WebServer, KillerMonitor

parser = argparse.ArgumentParser(description='Reads ais data from kafka, converts to json using gpsd, and writes to kafka')
parser.add_argument('--timeout', type=int, default=600, help='Number of seconds to wait for messages before giving up, default=600 (10 minutes)')
parser.add_argument('--http-port', type=int, default=8078, help='HTTP web server port showing latest message, default is 8078')
args = parser.parse_args()

killer = KillerMonitor(args.timeout)
killer.setDaemon(True)
killer.start()
sys.stderr.write("Monitor will kill application if unable to process a message for %d seconds\n" % args.timeout)

webserver = WebServer(args.http_port)
webserver.setDaemon(True)
webserver.start()
sys.stderr.write("Web server running on port %d\n" % args.http_port)

offsets_dir = "/var/lib/consumer-offsets/ais-rinville-1-2gpsdjson"
if not os.path.exists(offsets_dir):
    os.makedirs(offsets_dir)


command = """kafkacat -X topic.offset.store.path=/var/lib/consumer-offsets/ais-rinville-1-2gpsdjson -o stored -C -u -b kafka01,kafka02,kafka03 -t ais-rinville-1 | stdbuf -oL sed -e 's/^[^|]*|[^|]*|//' | stdbuf -oL gpsdecode | stdbuf -oL kafkacat -P -T -b kafka01,kafka02,kafka03 -p 0 -t ais-rinville-1-gpsdjson """
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
for line in iter(process.stdout.readline, ''):
        killer.ping()
        webserver.update(line)
Ejemplo n.º 5
0
parser.add_argument('--timeout', type=int, default=300, help='Number of seconds to wait for messages before giving up, default=300 (5 minutes)')
parser.add_argument('--http-port', type=int, default=8082, help='HTTP web server port showing latest message, default is 8082')
args = parser.parse_args()

if "rfc2217://" in args.device:
  ser = rfc2217.Serial(args.device,args.baud,timeout=args.timeout)
elif "://" in args.device:
  ser = serial_for_url(args.device,baudrate=args.baud,timeout=args.timeout)
else:
  ser = Serial(args.device,baudrate=args.baud,timeout=args.timeout)

killer = KillerMonitor(args.timeout)
killer.setDaemon(True)
killer.start()
sys.stderr.write("Monitoring %s will kill application if unable to process a message for %d seconds\n" % (args.device, args.timeout))

webserver = WebServer(args.http_port)
webserver.setDaemon(True)
webserver.start()
sys.stderr.write("Web server running on port %d\n" % args.http_port)

while 1:
    line = ser.readline().rstrip()
    if line:
      timestamp = datetime.datetime.utcnow().isoformat()[:-3]+"Z"
      output = u"{1}{0}{2}{0}{3}".format(args.separator,timestamp,args.source,line)
      print output
      sys.stdout.flush()
      killer.ping()
      webserver.update(output)
Ejemplo n.º 6
0
parser.add_argument('--timeout', type=int, default=1200, help='Number of seconds to wait for messages before giving up, default=1200 (20 minutes)')
parser.add_argument('--http-port', type=int, default=8085, help='HTTP web server port showing latest message, default is 8085')
parser.add_argument('--kafka-server', default='localhost', help='kafka server, default is localhost')
parser.add_argument('--kafka-topic', default='spiddal-adcp', help='kafka topic, default is spiddal-adcp')
parser.add_argument('--kafkacat', default='/usr/local/bin/kafkacat', help='path to kafkacat')
args = parser.parse_args()

folder = '/home/gcouser/adcp'
device = args.source

killer = KillerMonitor(args.timeout)
killer.setDaemon(True)
killer.start()
sys.stderr.write("Monitoring %s:%d will kill application if unable to process a message for %d seconds\n" % (args.server, args.port, args.timeout))

webserver = WebServer(args.http_port)
webserver.setDaemon(True)
webserver.start()
sys.stderr.write("Web server running on port %d\n" % args.http_port)


s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((args.server,args.port))
fcntl.fcntl(s, fcntl.F_SETFL, os.O_NONBLOCK)

chunks = []
nodata = True
f = None
filename = None
first_time = True
Ejemplo n.º 7
0
subtopic = client.topics[args.consume]
consumer = subtopic.get_simple_consumer(auto_commit_enable=True,
                                     consumer_group="gpsdjson2geojson_v1", 
                                     auto_offset_reset=OffsetType.EARLIEST,
                                     reset_offset_on_start=False)

pubtopic = client.topics[args.publish]

sys.stderr.write("connected to kafka\n")

killer = KillerMonitor(args.timeout)
killer.setDaemon(True)
killer.start()
sys.stderr.write("Monitor will kill application if unable to process a message for %d seconds\n" % args.timeout)

webserver = WebServer(args.http_port)
webserver.setDaemon(True)
webserver.start()
sys.stderr.write("Web server running on port %d\n" % args.http_port)

with pubtopic.get_sync_producer() as producer:
  for message in consumer:
    if message is not None:
        o = None
        try:
          o = json.loads(message.value)
        except ValueError,e:
          continue
        if "mmsi" in o:
            mmsi = o["mmsi"]
            if not mmsi in cache: