Beispiel #1
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('broker',
                        type=str,
                        help='Hostname or IP and port of Kafka broker.')
    parser.add_argument('topic',
                        type=str,
                        help='Name of Kafka topic to listen to.')
    parser.add_argument('interval', type=int, help='Print every Nth alert.')
    parser.add_argument('--group',
                        type=str,
                        help='Globally unique name of the consumer group. '
                        'Consumers in the same group will share messages '
                        '(i.e., only one consumer will receive a message, '
                        'as in a queue). Default is value of $HOSTNAME.')
    parser.add_argument('--stampDir',
                        type=str,
                        help='Output directory for writing postage stamp'
                        'cutout files. **THERE ARE NO STAMPS RIGHT NOW.**')

    args = parser.parse_args()

    # Configure consumer connection to Kafka broker
    conf = {
        'bootstrap.servers': args.broker,
        'default.topic.config': {
            'auto.offset.reset': 'smallest'
        }
    }
    if args.group:
        conf['group.id'] = args.group
    else:
        conf['group.id'] = os.environ['HOSTNAME']

    # Start consumer and print alert stream
    with alertConsumer.AlertConsumer(args.topic, **conf) as streamReader:
        msg_count = 0
        while True:
            try:
                schema, msg = streamReader.poll()

                if msg is None:
                    continue
                else:
                    msg_count += 1
                    if msg_count % args.interval == 0:
                        # Apply filter to each alert
                        alert_filter(msg, args.stampDir)

            except alertConsumer.EopError as e:
                # Write when reaching end of partition
                sys.stderr.write(e.message)
            except IndexError:
                sys.stderr.write('%% Data cannot be decoded\n')
            except UnicodeDecodeError:
                sys.stderr.write('%% Unexpected data format received\n')
            except KeyboardInterrupt:
                sys.stderr.write('%% Aborted by user\n')
                sys.exit()
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('host',
                        type=str,
                        help='Hostname or IP of Kafka host to connect to.')
    parser.add_argument('topic',
                        type=str,
                        help='Name of Kafka topic to listen to.')
    parser.add_argument('--group',
                        type=str,
                        help='Globally unique name of the consumer group. '
                        'Consumers in the same group will share messages '
                        '(i.e., only one consumer will receive a message, '
                        'as in a queue). Default is value of $HOSTNAME.')
    args = parser.parse_args()

    # Configure consumer connection to Kafka broker
    conf = {
        'bootstrap.servers':
        '{}:9092,{}:9093,{}:9094'.format(args.host, args.host, args.host),
        'default.topic.config': {
            'auto.offset.reset': 'smallest'
        }
    }
    if args.group:
        conf['group.id'] = args.group
    else:
        conf['group.id'] = os.environ['HOSTNAME']

    # Start consumer and monitor alert stream
    with alertConsumer.AlertConsumer(args.topic, **conf) as streamWatcher:

        while True:
            try:
                msg = streamWatcher.poll(decode=False, verbose=False)

                if msg is None:
                    continue
                else:
                    print(msg)

            except alertConsumer.EopError as e:
                # Write when reaching end of partition
                sys.stderr.write(e.message)
            except KeyboardInterrupt:
                sys.stderr.write('%% Aborted by user\n')
                sys.exit()
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('host',
                        type=str,
                        help='Hostname or IP of Kafka host to connect to.')
    parser.add_argument('topic',
                        type=str,
                        help='Name of Kafka topic to listen to.')
    parser.add_argument('--group',
                        type=str,
                        help='Globally unique name of the consumer group. '
                        'Consumers in the same group will share messages '
                        '(i.e., only one consumer will receive a message, '
                        'as in a queue). Default is value of $HOSTNAME.')
    parser.add_argument('--stampDir',
                        type=str,
                        help='Output directory for writing postage stamp'
                        'cutout files.')
    avrogroup = parser.add_mutually_exclusive_group()
    avrogroup.add_argument('--decode',
                           dest='avroFlag',
                           action='store_true',
                           help='Decode from Avro format. (default)')
    avrogroup.add_argument('--decode-off',
                           dest='avroFlag',
                           action='store_false',
                           help='Do not decode from Avro format.')
    parser.set_defaults(avroFlag=True)

    args = parser.parse_args()

    # Configure consumer connection to Kafka broker
    conf = {
        'bootstrap.servers':
        '{}:9092,{}:9093,{}:9094'.format(args.host, args.host, args.host),
        'default.topic.config': {
            'auto.offset.reset': 'smallest'
        }
    }
    if args.group:
        conf['group.id'] = args.group
    else:
        conf['group.id'] = os.environ['HOSTNAME']

    # Configure Avro reader schema
    schema_files = [
        "../ztf-avro-alert/schema/candidate.avsc",
        "../ztf-avro-alert/schema/cutout.avsc",
        "../ztf-avro-alert/schema/prv_candidate.avsc",
        "../ztf-avro-alert/schema/alert.avsc"
    ]

    # Start consumer and print alert stream
    with alertConsumer.AlertConsumer(args.topic, schema_files,
                                     **conf) as streamReader:
        while True:
            try:
                msg = streamReader.poll(decode=args.avroFlag)

                if msg is None:
                    continue
                else:
                    for record in msg:
                        # Apply filter to each alert
                        alert_filter(record, args.stampDir)

            except alertConsumer.EopError as e:
                # Write when reaching end of partition
                sys.stderr.write(e.message)
            except IndexError:
                sys.stderr.write('%% Data cannot be decoded\n')
            except UnicodeDecodeError:
                sys.stderr.write('%% Unexpected data format received\n')
            except KeyboardInterrupt:
                sys.stderr.write('%% Aborted by user\n')
                sys.exit()
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('topic',
                        type=str,
                        help='Name of Kafka topic to listen to.')
    parser.add_argument('--group',
                        type=str,
                        help='Globally unique name of the consumer group. '
                        'Consumers in the same group will share messages '
                        '(i.e., only one consumer will receive a message, '
                        'as in a queue). Default is value of $HOSTNAME.')
    parser.add_argument('--tarName', type=str, help='Name of tar file.')
    avrogroup = parser.add_mutually_exclusive_group()
    avrogroup.add_argument('--decode',
                           dest='avroFlag',
                           action='store_true',
                           help='Decode from Avro format. (default)')
    avrogroup.add_argument('--decode-off',
                           dest='avroFlag',
                           action='store_false',
                           help='Do not decode from Avro format.')
    parser.set_defaults(avroFlag=True)

    args = parser.parse_args()

    # Configure consumer connection to Kafka broker
    conf = {
        'bootstrap.servers':
        'epyc.astro.washington.edu:9092,epyc.astro.washington.edu:9093,epyc.astro.washington.edu:9094',
        'default.topic.config': {
            'auto.offset.reset': 'smallest'
        }
    }
    if args.group:
        conf['group.id'] = args.group
    else:
        conf['group.id'] = os.environ['HOSTNAME']

    # Configure Avro reader schema
    schema_files = [
        "./ztf-avro-alert/schema/candidate.avsc",
        "./ztf-avro-alert/schema/cutout.avsc",
        "./ztf-avro-alert/schema/prv_candidate.avsc",
        "./ztf-avro-alert/schema/alert.avsc"
    ]

    # Start consumer and collect alerts in a stream
    with alertConsumer.AlertConsumer(args.topic, schema_files,
                                     **conf) as streamReader:

        with tarfile.open("./" + args.tarName + ".tar", "a") as tar:
            while True:

                try:
                    msg = streamReader.poll(decode=args.avroFlag)

                    if msg is None:

                        print('currenttime: ', int(strftime('%H')))
                        if (int(strftime('%H')) >= stopTIME):
                            print("break \n")

                            break
                        else:
                            print("continue \n")
                            continue

                    else:
                        for record in msg:

                            #record0 = msg_text(record)
                            candidate_data = record.get('candidate')
                            fn = str(candidate_data['candid']) + ".avro"

                            with io.BytesIO() as avro_file:
                                record0 = [record]
                                fastavro.writer(avro_file,
                                                (combineSchemas(schema_files)),
                                                record0)
                                avro_file.seek(0)
                                tarinfo = tarfile.TarInfo(name=fn)
                                tarinfo.size = len(avro_file.getvalue())
                                tarinfo.mtime = time.time()
                                tarinfo.mode = 0o744
                                tarinfo.type = tarfile.REGTYPE
                                tarfile.uid = tarfile.gid = 0
                                tarfile.unmae = tarfile.gname = "root"
                                tar.addfile(tarinfo, avro_file)

                            #print( "%s \t %8.9f \t %8.5f \t %8.5f \n" % \
                            #    (record.get('objectId'),candidate_data['jd'],candidate_data['ra'],candidate_data['dec']) )

                except alertConsumer.EopError as e:
                    # Write when reaching end of partition
                    sys.stderr.write(e.message)
                    #continue
                except IndexError:
                    sys.stderr.write('%% Data cannot be decoded\n')
                except UnicodeDecodeError:
                    sys.stderr.write('%% Unexpected data format received\n')
                except KeyboardInterrupt:
                    sys.stderr.write('%% Aborted by user\n')
                    break

            print('we reached the end of stream at: {}'.format(
                strftime("%b %d %Y %H:%M:%S")))

            sys.exit()
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('broker', type=str,
                        help='Hostname or IP and port of Kafka broker.')
    parser.add_argument('topic', type=str,
                        help='Name of Kafka topic to listen to.')
    parser.add_argument('filterNum', type=int,
                        help='Number of the filter in range '
                        '(1-100) to deploy.')
    parser.add_argument('--group', type=str,
                        help='Globally unique name of the consumer group. '
                        'Consumers in the same group will share messages '
                        '(i.e., only one consumer will receive a message, '
                        'as in a queue). Default is value of $HOSTNAME.')
    avrogroup = parser.add_mutually_exclusive_group()
    avrogroup.add_argument('--decode', dest='avroFlag', action='store_true',
                           help='Decode from Avro format. (default)')
    avrogroup.add_argument('--decode-off', dest='avroFlag',
                           action='store_false',
                           help='Do not decode from Avro format.')
    parser.set_defaults(avroFlag=True)

    args = parser.parse_args()
    fnum = args.filterNum

    # Configure Avro reader schema
    schema_files = ["../sample-avro-alert/schema/diasource.avsc",
                    "../sample-avro-alert/schema/diaobject.avsc",
                    "../sample-avro-alert/schema/ssobject.avsc",
                    "../sample-avro-alert/schema/cutout.avsc",
                    "../sample-avro-alert/schema/alert.avsc"]

    # Configure consumer connection to Kafka broker
    cconf = {'bootstrap.servers': args.broker,
             'default.topic.config': {'auto.offset.reset': 'smallest'}}
    if args.group:
        cconf['group.id'] = args.group
    else:
        cconf['group.id'] = os.environ['HOSTNAME']

    pconf = {'bootstrap.servers': args.broker}

    # Choose filter class to deploy from filters module
    filter_class = inspect.getmembers(filters, inspect.isclass)[fnum][1]

    # Name output stream using filter class name
    topic_name = filter_class.__name__

    prod = alertProducer.AlertProducer(topic_name, schema_files, **pconf)
    exp = filterBase.StreamExporter(prod)
    apply_filter = filter_class(exp)

    # Start consumer and print alert stream
    with alertConsumer.AlertConsumer(args.topic, schema_files,
                                     **cconf) as streamReader:

        while True:
            try:
                msg = streamReader.poll(decode=True)

                if msg is None:
                    continue
                else:
                    # Apply filter to each alert
                        apply_filter(msg)

            except alertConsumer.EopError as e:
                # Write when reaching end of partition
                sys.stderr.write(e.message)
            except IndexError:
                sys.stderr.write('%% Data cannot be decoded\n')
            except UnicodeDecodeError:
                sys.stderr.write('%% Unexpected data format received\n')
            except KeyboardInterrupt:
                sys.stderr.write('%% Aborted by user\n')
                sys.exit()
Beispiel #6
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('topic',
                        type=str,
                        help='Name of Kafka topic to listen to.')
    parser.add_argument("--kafka",
                        type=str,
                        default="localhost:9092",
                        help="Address of Kafka server")
    parser.add_argument('--group',
                        type=str,
                        help='Globally unique name of the consumer group. '
                        'Consumers in the same group will share messages '
                        '(i.e., only one consumer will receive a message, '
                        'as in a queue). Default is value of $HOSTNAME.')

    args = parser.parse_args()

    # Configure consumer connection to Kafka broker
    conf = {
        'bootstrap.servers': args.kafka,
        'default.topic.config': {
            'auto.offset.reset': 'earliest'
        }
    }
    if args.group:
        conf['group.id'] = args.group
    else:
        conf['group.id'] = "cts_test"

    redis_conn = redis.StrictRedis("127.0.0.1")

    # Configure Avro reader schema
    schema_files = [
        "../ztf-avro-alert/schema/candidate.avsc",
        "../ztf-avro-alert/schema/cutout.avsc",
        "../ztf-avro-alert/schema/prv_candidate.avsc",
        "../ztf-avro-alert/schema/alert.avsc"
    ]

    # Start consumer and print alert stream
    streamReader = alertConsumer.AlertConsumer(args.topic, schema_files,
                                               **conf)

    while True:
        try:
            messages = streamReader.poll(decode=True, timeout=2)

            if messages is None:
                continue

            for msg in messages:
                process_alert(msg, redis_conn, date=True)

        except alertConsumer.EopError as e:
            # Write when reaching end of partition
            sys.stderr.write(e.message)
        except IndexError:
            sys.stderr.write('%% Data cannot be decoded\n')
        except UnicodeDecodeError:
            sys.stderr.write('%% Unexpected data format received\n')
        except KeyboardInterrupt:
            sys.stderr.write('%% Aborted by user\n')
            sys.exit()
Beispiel #7
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('topic',
                        type=str,
                        help='Name of Kafka topic to listen to.')
    parser.add_argument('--group',
                        type=str,
                        help='Globally unique name of the consumer group. '
                        'Consumers in the same group will share messages '
                        '(i.e., only one consumer will receive a message, '
                        'as in a queue). Default is value of $HOSTNAME.')
    parser.add_argument('--stampDir',
                        type=str,
                        help='Output directory for writing postage stamp'
                        'cutout files.')
    avrogroup = parser.add_mutually_exclusive_group()
    avrogroup.add_argument('--decode',
                           dest='avroFlag',
                           action='store_true',
                           help='Decode from Avro format. (default)')
    avrogroup.add_argument('--decode-off',
                           dest='avroFlag',
                           action='store_false',
                           help='Do not decode from Avro format.')
    parser.set_defaults(avroFlag=True)

    args = parser.parse_args()

    # Configure consumer connection to Kafka broker
    conf = {
        'bootstrap.servers': 'kafka:9092',
        'default.topic.config': {
            'auto.offset.reset': 'smallest'
        }
    }
    if args.group:
        conf['group.id'] = args.group
    else:
        conf['group.id'] = os.environ['HOSTNAME']

    # Configure Avro reader schema
    schema_files = [
        "../sample-avro-alert/schema/diasource.avsc",
        "../sample-avro-alert/schema/diaobject.avsc",
        "../sample-avro-alert/schema/ssobject.avsc",
        "../sample-avro-alert/schema/cutout.avsc",
        "../sample-avro-alert/schema/alert.avsc"
    ]

    # Start consumer and print alert stream
    streamReader = alertConsumer.AlertConsumer(args.topic, schema_files,
                                               **conf)

    msg_count = 0
    while True:
        try:
            msg = streamReader.poll(decode=args.avroFlag)

            if msg is None:
                continue
            else:
                msg_count += 1
                if msg_count % 100 == 0:
                    print(msg_count, flush=True),
                    print(msg_text(msg), flush=True)
                if args.stampDir:  # Collect postage stamps
                    write_stamp_file(msg.get('cutoutDifference'),
                                     args.stampDir)
                    write_stamp_file(msg.get('cutoutTemplate'), args.stampDir)

        except alertConsumer.EopError as e:
            # Write when reaching end of partition
            sys.stderr.write(e.message)
        except IndexError:
            sys.stderr.write('%% Data cannot be decoded\n')
        except UnicodeDecodeError:
            sys.stderr.write('%% Unexpected data format received\n')
        except KeyboardInterrupt:
            sys.stderr.write('%% Aborted by user\n')
            sys.exit()