예제 #1
0
    def setUp(self):

        data_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), 'data'))
        schema_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         '../schemas/distribution_schema_0p2.avsc'))

        r = AlertReader(data_path)
        alerts = r.to_list()

        conf = load_credentials(tmp=True)

        kafka_servers = conf['servers']
        p = confluent_kafka.Producer({'bootstrap.servers': kafka_servers})

        for alert in alerts:
            avro_data = encode_into_avro(alert, schema_path)
            topic = get_legal_topic_name(alert['cdsxmatch'])
            p.produce(topic, avro_data)
        p.flush()

        # instantiate an AlertConsumer
        mytopics = conf["mytopics"]

        myconfig = {
            'bootstrap.servers': kafka_servers,
            'group_id': conf['group_id']
        }

        self.consumer = AlertConsumer(mytopics,
                                      myconfig,
                                      schema_path=schema_path)
예제 #2
0
    def setUp(self):

        data_path = os.path.abspath(os.path.join(
            os.path.dirname(__file__), 'data'))
        schema_path = os.path.abspath(os.path.join(
            os.path.dirname(__file__), '../schemas/distribution_schema_0p2.avsc'))

        r = AlertReader(data_path)
        alerts = r.to_list()

        kafka_servers = 'localhost:9093, localhost:9094, localhost:9095'
        p = confluent_kafka.Producer({
            'bootstrap.servers': kafka_servers})

        for alert in alerts:
            avro_data = encode_into_avro(alert, schema_path)
            topic = get_legal_topic_name(alert['cdsxmatch'])
            p.produce(topic, avro_data)
        p.flush()

        # instantiate an AlertConsumer
        mytopics = ["rrlyr"]

        myconfig = {
            'bootstrap.servers': kafka_servers,
            'group_id': 'test_group'}

        self.consumer = AlertConsumer(mytopics, myconfig, schema=schema_path)
예제 #3
0
def poll_single_alert(myconfig, topics) -> None:
    """ Connect to and poll fink servers once.
    """
    maxtimeout = 5

    # Instantiate a consumer
    consumer = AlertConsumer(topics, myconfig)

    # Poll the servers
    topic, alert = consumer.poll(maxtimeout)

    # Analyse output
    if topic is not None:
        print("-" * 65)
        row = [
            alert['timestamp'], topic, alert['objectId'],
            alert['roid'], alert['rfscore'], alert['snn_snia_vs_nonia']
        ]
        print("{:<25}|{:<10}|{:<15}|{}|{:<10}|{:<10}".format(*row))
    else:
        print(
            'No alerts received in the last {} seconds'.format(
                maxtimeout
            )
        )

    # Close the connection to the servers
    consumer.close()
예제 #4
0
파일: test.py 프로젝트: cAbhi15/fink-client
    def setUp(self):

        data_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), 'data'))

        alert_reader = read_avro_alerts(data_path)

        kafka_servers = 'localhost:9093, localhost:9094, localhost:9095'
        p = confluent_kafka.Producer({'bootstrap.servers': kafka_servers})

        for alert in alert_reader:
            avro_data = encode_into_avro(alert)
            topic = get_legal_topic_name(alert['cross_match_alerts_per_batch'])
            p.produce(topic, avro_data)
        p.flush()

        # instantiate an AlertConsumer
        mytopics = ["rrlyr", "ebwuma", "unknown"]
        test_schema = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "test_schema.avsc"))
        myconfig = {
            'bootstrap.servers': kafka_servers,
            'group_id': 'test_group'
        }

        self.consumer = AlertConsumer(mytopics, myconfig, schema=test_schema)
예제 #5
0
def poll_single_alert(outdir: str) -> (str, dict):
    """ Connect to and poll fink servers once.

    Parameters
    ----------
    outdir: str
        Directory to store incoming alerts. It must exist.

    Returns
    ---------
    topic: str
        Topic name. None if no alert has been returned from the servers.
    alert: dict
        Alert data in a dictionary.
        None if no alert has been returned from the servers.
    """
    # Load configuration parameters
    conf = load_credentials()

    myconfig = {
        "username": conf['username'],
        'bootstrap.servers': conf['servers'],
        'group_id': conf['group_id']}

    if conf['password'] is not None:
        myconfig['password'] = conf['password']

    # Instantiate a consumer
    consumer = AlertConsumer(conf['mytopics'], myconfig)

    # Poll the servers
    topic, alert = consumer.poll_and_write(outdir, conf['maxtimeout'])

    # Analyse output
    if topic is not None:
        # Distance to known solar system object
        ssdistnr = alert['candidate']['ssdistnr']

        print("-" * 65)
        row = [
            alert['timestamp'], topic, alert['objectId'],
            alert['cdsxmatch'], alert['rfscore'], ssdistnr
        ]
        print("{:<25}|{:<10}|{:<15}|{:<10}|{:<5}|{:<10}".format(*row))
    else:
        print(
            'No alerts received in the last {} seconds'.format(
                conf['maxtimeout']))

    # Close the connection to the servers
    consumer.close()

    return topic, alert
예제 #6
0
class TestIntegration(unittest.TestCase):

    def setUp(self):

        data_path = os.path.abspath(os.path.join(
            os.path.dirname(__file__), 'data'))
        schema_path = os.path.abspath(os.path.join(
            os.path.dirname(__file__), '../schemas/distribution_schema_0p2.avsc'))

        r = AlertReader(data_path)
        alerts = r.to_list()

        kafka_servers = 'localhost:9093, localhost:9094, localhost:9095'
        p = confluent_kafka.Producer({
            'bootstrap.servers': kafka_servers})

        for alert in alerts:
            avro_data = encode_into_avro(alert, schema_path)
            topic = get_legal_topic_name(alert['cdsxmatch'])
            p.produce(topic, avro_data)
        p.flush()

        # instantiate an AlertConsumer
        mytopics = ["rrlyr"]

        myconfig = {
            'bootstrap.servers': kafka_servers,
            'group_id': 'test_group'}

        self.consumer = AlertConsumer(mytopics, myconfig, schema=schema_path)

    def test_poll(self):
        topic, alert = self.consumer.poll()
        self.assertIsNotNone(alert)
        self.assertTrue(fastavro.validate(alert, self.consumer._parsed_schema))

    def test_consume(self):
        num_messages = 1
        alerts = self.consumer.consume(num_messages)
        self.assertEqual(len(alerts), num_messages)

    def tearDown(self):
        self.consumer.close()
예제 #7
0
파일: test.py 프로젝트: cAbhi15/fink-client
class TestIntegration(unittest.TestCase):
    def setUp(self):

        data_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), 'data'))

        alert_reader = read_avro_alerts(data_path)

        kafka_servers = 'localhost:9093, localhost:9094, localhost:9095'
        p = confluent_kafka.Producer({'bootstrap.servers': kafka_servers})

        for alert in alert_reader:
            avro_data = encode_into_avro(alert)
            topic = get_legal_topic_name(alert['cross_match_alerts_per_batch'])
            p.produce(topic, avro_data)
        p.flush()

        # instantiate an AlertConsumer
        mytopics = ["rrlyr", "ebwuma", "unknown"]
        test_schema = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "test_schema.avsc"))
        myconfig = {
            'bootstrap.servers': kafka_servers,
            'group_id': 'test_group'
        }

        self.consumer = AlertConsumer(mytopics, myconfig, schema=test_schema)

    def test_poll(self):
        alert, topic = self.consumer.poll()
        self.assertIsNotNone(alert)

    def test_consume(self):
        num_messages = 3
        alerts = self.consumer.consume(num_messages)
        self.assertEqual(len(alerts), num_messages)

    def tearDown(self):
        self.consumer.close()
예제 #8
0
class TestIntegration(unittest.TestCase):
    def setUp(self):

        data_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), 'data'))
        schema_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__),
                         '../schemas/distribution_schema_0p2.avsc'))

        r = AlertReader(data_path)
        alerts = r.to_list()

        conf = load_credentials(tmp=True)

        kafka_servers = conf['servers']
        p = confluent_kafka.Producer({'bootstrap.servers': kafka_servers})

        for alert in alerts:
            avro_data = encode_into_avro(alert, schema_path)
            topic = get_legal_topic_name(alert['cdsxmatch'])
            p.produce(topic, avro_data)
        p.flush()

        # instantiate an AlertConsumer
        mytopics = conf["mytopics"]

        myconfig = {
            'bootstrap.servers': kafka_servers,
            'group_id': conf['group_id']
        }

        self.consumer = AlertConsumer(mytopics,
                                      myconfig,
                                      schema_path=schema_path)

    def test_poll(self):
        topic, alert, key = self.consumer.poll()
        self.assertIsNotNone(alert)
        self.assertTrue(fastavro.validate(alert, self.consumer._parsed_schema))

    def test_consume(self):
        num_messages = 1
        alerts = self.consumer.consume(num_messages)
        self.assertEqual(len(alerts), num_messages)

    def test_topics(self):
        topics = self.consumer.available_topics()
        self.assertTrue('rrlyr' in topics.keys())

    def test_broker_name(self):
        brokers = self.consumer.available_brokers()
        self.assertTrue(0 in brokers.keys())

    def tearDown(self):
        self.consumer.close()
예제 #9
0
BASE_PATH = pathlib.Path(__file__).parent.resolve()

# Path to put in a configuration file
DATA_PATH = BASE_PATH.joinpath("data").resolve()

myconfig = {
    "username": fcc.username,
    'bootstrap.servers': fcc.servers,
    'group_id': fcc.group_id
}

if fcc.password is not None:
    myconfig['password'] = fcc.password

# Instantiate a consumer
consumer = AlertConsumer(fcc.mytopics, myconfig, schema=fcc.schema)

# List topics
topic_dic = consumer._consumer.list_topics().topics.keys()
topic_list = [i for i in topic_dic if i[0:2] != "__" and i in fcc.mytopics]


def build_tabs():
    """ Build the two tabs of the dashboard.

    - Tab 1: Stream connector. Pull alerts and save them on disk. You can also
        watch number of alerts per topic received in the previous 5 minutes.
    - Tab 2: Alert explorer. Display properties of each received alert.

    Returns
    ---------
예제 #10
0
def main():
    """ """
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--display',
        action='store_true',
        help="If specified, print on screen information about incoming alert.")
    parser.add_argument(
        '-limit',
        type=int,
        default=None,
        help="If specified, download only `limit` alerts. Default is None.")
    parser.add_argument(
        '--available_topics',
        action='store_true',
        help="If specified, print on screen information about available topics."
    )
    parser.add_argument(
        '--save',
        action='store_true',
        help="If specified, save alert data on disk (Avro). See also -outdir.")
    parser.add_argument(
        '-outdir',
        type=str,
        default='.',
        help="Folder to store incoming alerts if --save is set. It must exist."
    )
    parser.add_argument(
        '-schema',
        type=str,
        default=None,
        help=
        "Avro schema to decode the incoming alerts. Default is None (latest version downloaded from server)"
    )
    args = parser.parse_args(None)

    # load user configuration
    conf = load_credentials()

    myconfig = {
        "username": conf['username'],
        'bootstrap.servers': conf['servers'],
        'group_id': conf['group_id']
    }

    if conf['password'] is not None:
        myconfig['password'] = conf['password']

    # Instantiate a consumer
    if args.schema is None:
        schema = None
    else:
        schema = args.schema
    consumer = AlertConsumer(conf['mytopics'], myconfig, schema_path=schema)

    if args.available_topics:
        print(consumer.available_topics().keys())
        sys.exit(0)

    # Time to wait before polling again if no alerts
    maxtimeout = conf['maxtimeout']

    # infinite loop
    maxpoll = args.limit if args.limit else 1e10
    try:
        poll_number = 0
        while poll_number < maxpoll:
            if args.save:
                # Save alerts on disk
                topic, alert, key = consumer.poll_and_write(outdir=args.outdir,
                                                            timeout=maxtimeout,
                                                            overwrite=True)
            else:
                # TODO: this is useless to get it and done nothing
                # why not thinking about handler like Comet?
                topic, alert, key = consumer.poll(timeout=maxtimeout)

            if topic is not None:
                poll_number += 1

            if args.display and topic is not None:
                utc = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime())
                table = [
                    [
                        alert['timestamp'], utc, topic, alert['objectId'],
                        alert['cdsxmatch'], alert['rfscore']
                    ],
                ]
                headers = [
                    'Emitted at (UTC)', 'Received at (UTC)', 'Topic',
                    'objectId', 'Simbad', 'RF score'
                ]
                print(tabulate(table, headers, tablefmt="pretty"))
            elif args.display:
                print('No alerts the last {} seconds'.format(maxtimeout))
    except KeyboardInterrupt:
        sys.stderr.write('%% Aborted by user\n')
    finally:
        consumer.close()