Ejemplo n.º 1
0
def main():
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)
    runKerasTensorflowClassifierMultiprocess(options)
Ejemplo n.º 2
0
def main():
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)
    getATLASTrainingSetCutouts(options)
Ejemplo n.º 3
0
def main():
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)
    print(options.csvfile)
    plotResults(options.csvfile, options.outputFile, options=options)
Ejemplo n.º 4
0
def main(argv=None):
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)

    alert_schema = combine_schemas(options.schema)

    if options.schemalessMessage:
        m = read_avro_data_from_file(options.messageFile, alert_schema)
        printMessage(m)
    else:
        data = read_avro_data_bulk(options.messageFile)

        if data:
            for m in data:
                printMessage(m)

    return 0
Ejemplo n.º 5
0
def main():
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    doPlots(options)
Ejemplo n.º 6
0
def main():
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    getPS1TrainingSetCutouts(options)
def main(argv=None):
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)

    json_path = options.data
    schema_files = options.schema
    cutoutsci_path = options.cutoutSci
    cutouttemp_path = options.cutoutTemp
    cutoutdiff_path = options.cutoutDiff
    mjdThreshold = float(options.mjdThreshold)

    alert_schema = combine_schemas(schema_files)

    import yaml
    with open(options.configfile) as yaml_file:
        config = yaml.load(yaml_file)

    username = config['databases']['local']['username']
    password = config['databases']['local']['password']
    database = config['databases']['local']['database']
    hostname = config['databases']['local']['hostname']

    # Now that we have all the data, we need to construct a properly formed alert - FOR EACH ROW.
    # NOTE - To get this to work, feed it junk test.json data.

    # The alerts can be written to a file - but don't forget, we are using the schemaless writer
    # which means that the schemaless reader MUST be used to read the data!
    alert = None

    # If we just have some json data, process it.  Otherwise read from the database.

    if json_path:
        with open(json_path) as file_text:
            json_data = json.load(file_text)

        avro_bytes = write_avro_data(json_data, alert_schema)

        # Load science stamp if included
        if cutoutsci_path is not None:
            cutoutTemplate = load_stamp(cutoutsci_path)
            json_data['cutoutScience'] = cutoutTemplate

        # Load template stamp if included
        if cutouttemp_path is not None:
            cutoutTemplate = load_stamp(cutouttemp_path)
            json_data['cutoutTemplate'] = cutoutTemplate

        # Load difference stamp if included
        if cutoutdiff_path is not None:
            cutoutDifference = load_stamp(cutoutdiff_path)
            json_data['cutoutDifference'] = cutoutDifference

        if options.writeFile:
            with open('/tmp/alert.avro', 'wb') as f:
                # NOTE - This code writes a schemaless message. To read it we need to pass the schema
                #        to the reader. How we pass this message to Kafka is the next problem to be
                #        resolved.
                avro_bytes.seek(0)
                data = avro_bytes.read()
                f.write(data)

        #m = read_avro_data_from_file('alert.avro', alert_schema)
        m = read_avro_data(avro_bytes, alert_schema)
        if options.readMessage:
            if m:
                # Print message text to screen
                message_text = {
                    k: m[k]
                    for k in m if k not in
                    ['cutoutScience', 'cutoutDifference', 'cutoutTemplate']
                }
                print(message_text)

                # Collect stamps as files written to local directory 'output' and check hashes match expected
                if m.get('cutoutScience') is not None:
                    stamp_temp_out = write_stamp_file(
                        message.get('cutoutScience'), 'output')
                    print('Science stamp ok:',
                          check_md5(args.cutoutSci, stamp_temp_out))

                if m.get('cutoutTemplate') is not None:
                    stamp_temp_out = write_stamp_file(
                        message.get('cutoutTemplate'), 'output')
                    print('Template stamp ok:',
                          check_md5(args.cutoutTemp, stamp_temp_out))

                if m.get('cutoutDifference') is not None:
                    stamp_diff_out = write_stamp_file(
                        message.get('cutoutDifference'), 'output')

                print("size in bytes of json text: %d" %
                      sys.getsizeof(message_text))
                raw_bytes = avro_bytes.getvalue()
                print("size in bytes of avro message: %d" %
                      sys.getsizeof(raw_bytes))

                print("size in bytes of json text: %d" %
                      sys.getsizeof(message_text))
                raw_bytes = avro_bytes.getvalue()
                print("size in bytes of avro message: %d" %
                      sys.getsizeof(raw_bytes))
        return 0

    conn = dbConnect(hostname, username, password, database)
    if not conn:
        print("Cannot connect to the database")
        return 1

    # Connect to the database and read out the ATLAS detections.
    records = getATLASIngestedDetections(conn, mjdThreshold)

    conn.close()

    alerts = []
    if options.writeFile and options.bulkMessage:
        for row in records:
            alert = {
                'alertId': row['db_det_id'],
                'atlas_object_id': row['atlas_object_id'],
                'candidate': row
            }
            alerts.append(alert)
        write_avro_data_to_file_with_schema('/tmp/alerts_bulk.avro',
                                            alert_schema, alerts)
        return

    for row in records:
        alert = {
            'alertId': row['db_det_id'],
            'atlas_object_id': row['atlas_object_id'],
            'candidate': row
        }

        avro_bytes = write_avro_data(alert, alert_schema)

        if options.readMessage:
            #m = read_avro_data_from_file('alert.avro', alert_schema)
            m = read_avro_data(avro_bytes, alert_schema)
            if m:
                # Print message text to screen
                message_text = {
                    k: m[k]
                    for k in m if k not in
                    ['cutoutScience', 'cutoutDifference', 'cutoutTemplate']
                }
                print(message_text)

                # Collect stamps as files written to local directory 'output' and check hashes match expected
                if m.get('cutoutScience') is not None:
                    stamp_temp_out = write_stamp_file(
                        message.get('cutoutScience'), 'output')
                    print('Science stamp ok:',
                          check_md5(args.cutoutSci, stamp_temp_out))

                if m.get('cutoutTemplate') is not None:
                    stamp_temp_out = write_stamp_file(
                        message.get('cutoutTemplate'), 'output')
                    print('Template stamp ok:',
                          check_md5(args.cutoutTemp, stamp_temp_out))

                if m.get('cutoutDifference') is not None:
                    stamp_diff_out = write_stamp_file(
                        message.get('cutoutDifference'), 'output')

                print("size in bytes of json text: %d" %
                      sys.getsizeof(message_text))
                raw_bytes = avro_bytes.getvalue()
                print("size in bytes of avro message: %d" %
                      sys.getsizeof(raw_bytes))

        if options.writeFile:
            if not options.bulkMessage:
                f = open('/tmp/alert_%s.avro' % row['db_det_id'], 'wb')
            avro_bytes.seek(0)
            data = avro_bytes.read()
            f.write(data)
            if not options.bulkMessage:
                f.close()

    return 0