Ejemplo n.º 1
0
def flush_simultaneous_detections(utc_min, utc_max):
    """
    Remove all pre-existing observation groups from within a specified time period.

    :param utc_min:
        The earliest time for which we are to flush observation groups.
    :param utc_max:
        The latest time for which we are to flush observation groups.
    :return:
        None
    """
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    # Search for existing observation groups representing simultaneous events
    search = mp.ObservationGroupSearch(semantic_type=simultaneous_event_type,
                                       time_min=utc_min, time_max=utc_max, limit=1000000)
    existing_groups = db.search_obsgroups(search)
    existing_groups = existing_groups['obsgroups']

    logging.info("{:6d} existing observation groups within this time period (will be deleted).".
                 format(len(existing_groups)))

    # Delete existing observation groups
    for item in existing_groups:
        db.delete_obsgroup(item.id)

    # Commit to database
    db.commit()
Ejemplo n.º 2
0
def list_observatory_status(utc, obstory):
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    try:
        obstory_info = db.get_obstory_from_id(obstory_id=obstory)
    except ValueError:
        print(
            "Unknown observatory <{}>. Run ./listObservatories.py to see a list of available observatories."
            .format(obstory))
        sys.exit(0)

    title = "Observatory <{}>".format(obstory)
    print("\n\n{}\n{}".format(title, "-" * len(title)))

    metadata = db.get_obstory_status(obstory_id=obstory, time=utc)

    # Display list of items
    for item_key, item_value in metadata.items():
        print("  * {:16s} = {}".format(item_key, item_value))
Ejemplo n.º 3
0
def list_disk_usage(utc_min, utc_max):
    """
    Search through all of the files in the database, and give a breakdown of the disk usage of different kinds
    of moving objects.

    :param utc_min:
        Only list disk usage after the specified unix time
    :param utc_max:
        Only list disk usage before the specified unix time
    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    file_census = {}

    # Get list of files in each directory
    db.con.execute("""
SELECT f.mimeType, f.fileTime, f.fileSize, ot.name AS semanticType, am.stringValue AS web_type
FROM archive_files f
INNER JOIN archive_observations o on f.observationId = o.uid
INNER JOIN archive_semanticTypes ot on o.obsType = ot.uid
INNER JOIN archive_metadata am on o.uid = am.observationId
    AND am.fieldId=(SELECT x.uid FROM archive_metadataFields x WHERE x.metaKey="web:category")
WHERE f.fileTime BETWEEN %s AND %s;
""", (utc_min, utc_max))

    # Process each file in turn
    for item in db.con.fetchall():
        file_type = item['web_type']
        if file_type not in file_census:
            file_census[file_type] = 0
        file_census[file_type] += item['fileSize']

    # Render quick and dirty table
    out = sys.stdout
    cols = list(file_census.keys())
    cols.sort()

    # Render column headings
    for col_head in cols:
        out.write("{:25s} ".format(col_head))
    out.write("\n")

    # Render data
    data = []
    for col_head in cols:
        data.append(file_census[col_head])
    data_string = render_data_size_list(file_sizes=data)
    for i in range(len(cols)):
        out.write("{:25s} ".format(data_string[i]))
    out.write("\n")
Ejemplo n.º 4
0
def list_export_status():
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    sql = db.con

    sql.execute("SELECT * FROM archive_exportConfig;")
    export_configs = sql.fetchall()

    for config in export_configs:
        heading = "{} (UID {})".format(config['exportName'],
                                       config['exportConfigId'])
        print("\n{}\n{}\n\n".format(heading, "-" * len(heading)))

        if config['active']:
            print("  * Active")
        else:
            print("  * Disabled")
        n_total = n_pending = -1

        if config['exportType'] == "metadata":
            sql.execute("SELECT COUNT(*) FROM archive_metadataExport;")
            n_total = sql.fetchall()[0]['COUNT(*)']
            sql.execute(
                "SELECT COUNT(*) FROM archive_metadataExport WHERE exportState>0;"
            )
            n_pending = sql.fetchall()[0]['COUNT(*)']

        elif config['exportType'] == "observation":
            sql.execute("SELECT COUNT(*) FROM archive_observationExport;")
            n_total = sql.fetchall()[0]['COUNT(*)']
            sql.execute(
                "SELECT COUNT(*) FROM archive_observationExport WHERE exportState>0;"
            )
            n_pending = sql.fetchall()[0]['COUNT(*)']

        elif config['exportType'] == "file":
            sql.execute("SELECT COUNT(*) FROM archive_fileExport;")
            n_total = sql.fetchall()[0]['COUNT(*)']
            sql.execute(
                "SELECT COUNT(*) FROM archive_fileExport WHERE exportState>0;")
            n_pending = sql.fetchall()[0]['COUNT(*)']

        print("  * {:9d} jobs in export table".format(n_total))
        print("  * {:9d} jobs still to be done".format(n_pending))
Ejemplo n.º 5
0
def add_export(url, username, password):
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Set up default observatory metadata export configuration
    search = obsarchive_model.ObservatoryMetadataSearch(limit=None)
    config = obsarchive_model.ExportConfiguration(
        target_url=url,
        user_id=username,
        password=password,
        search=search,
        name="metadata_export",
        description="Export all observatory metadata to remote server",
        enabled=True)
    db.create_or_update_export_configuration(config)

    # Set up default observation export configuration
    search = obsarchive_model.ObservationSearch(limit=None)
    config = obsarchive_model.ExportConfiguration(
        target_url=url,
        user_id=username,
        password=password,
        search=search,
        name="obs_export",
        description="Export all observation objects to remote server",
        enabled=True)
    db.create_or_update_export_configuration(config)

    # Set up default file export configuration
    search = obsarchive_model.FileRecordSearch(limit=None)
    config = obsarchive_model.ExportConfiguration(
        target_url=url,
        user_id=username,
        password=password,
        search=search,
        name="file_export",
        description="Export all image files to remote server",
        enabled=True)
    db.create_or_update_export_configuration(config)

    # Commit changes to database
    db.commit()
Ejemplo n.º 6
0
def check_database_integrity(purge=False):
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])
    sql = db.con

    # Check files exist
    file_list = {}
    logging.info("Checking whether files exist...")
    sql.execute("SELECT repositoryFname FROM archive_files;")
    for item in sql.fetchall():
        id = item['repositoryFname']
        file_list[id] = True
        if not os.path.exists(db.file_path_for_id(id)):
            logging.info("Files: Missing file ID <{}>".format(id))

            if purge:
                sql.execute("DELETE FROM archive_files WHERE repositoryFname=%s;", (id,))

    # Check for files which aren't in database
    logging.info("Checking for files with no database record...")
    for item in glob.glob(os.path.join(db.file_store_path, "*")):
        filename = os.path.split(item)[1]
        if filename not in file_list:
            logging.info("Files: File not in database <{}>".format(filename))

    # Checking for observations with no files
    logging.info("Checking for observations with no files...")
    sql.execute("SELECT publicId FROM archive_observations WHERE uid NOT IN (SELECT observationId FROM archive_files)")
    for item in sql.fetchall():
        logging.info("Files: Observation with no files <{}>".format(item['publicId']))

        if purge:
            sql.execute("DELETE FROM archive_observations WHERE publicId=%s;", (item['publicId'],))

    # Commit changes to database
    db.commit()
    db.close_db()
Ejemplo n.º 7
0
def list_observatories():
    """
    Display a list of all the observatories registered in the database.

    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Fetch observatory IDs
    obstory_ids = db.get_obstory_ids()
    obstory_ids.sort()

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # List information about each observatory
    print("{:6s} {:32s} {:32s} {:6s} {:6s} {:s}".format(
        "ObsID", "Public ID", "Name", "Lat", "Lng", "Observations"))
    for item in obstory_ids:
        obstory_info = db.get_obstory_from_id(obstory_id=item)

        # Count observations
        conn.execute(
            'SELECT COUNT(*) FROM archive_observations WHERE observatory=%s;',
            (obstory_info['uid'], ))
        results = conn.fetchall()
        obs_count = results[0]["COUNT(*)"]

        print("{:6d} {:32s} {:32s} {:6.1f} {:6.1f} {:7d}".format(
            obstory_info['uid'], obstory_info['publicId'],
            obstory_info['name'], obstory_info['latitude'],
            obstory_info['longitude'], obs_count))
Ejemplo n.º 8
0
def list_users():
    """
    Display a list of all user accounts in the database.

    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Fetch list of users
    user_ids = db.get_users()
    user_ids.sort(key=operator.attrgetter('user_id'))

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # List information about each user in turn
    print("{:32s} {:32s} {:48s} {:s}".format("Username", "Name", "Roles",
                                             "Observations"))
    for user_info in user_ids:
        # Count observations
        conn.execute(
            'SELECT COUNT(*) FROM archive_observations WHERE userId=%s;',
            (user_info.user_id, ))
        results = conn.fetchall()
        obs_count = results[0]["COUNT(*)"]

        # Print user information
        print("{:32s} {:32s} {:48s} {:9d}".format(user_info.user_id,
                                                  user_info.name,
                                                  str(user_info.roles),
                                                  obs_count))
Ejemplo n.º 9
0
def delete_export(export_id):
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Check that requested export exists
    configs = [item.id for item in db.get_export_configurations()]

    if export_id not in configs:
        print("Export <{}> does not exist".format(export_id))
        sys.exit(0)

    # Delete all export config
    db.delete_export_configuration(export_id)

    # Commit changes to database
    db.commit()
def add_observatory_maintenance_event(metadata):
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Make sure that observatory exists in known_observatories list
    assert metadata['obstory_id'] in known_observatories

    db.register_obstory_metadata(obstory_id=metadata['obstory_id'],
                                 key="refresh",
                                 value=1,
                                 metadata_time=metadata['utc'],
                                 time_created=time.time(),
                                 user_created=metadata['username'])

    # Commit changes to database
    db.commit()

    # Make sure that all required fields are populated
    add_observatory_status(metadata)
Ejemplo n.º 11
0
def plane_determination(utc_min, utc_max, source):
    """
    Estimate the identity of aircraft observed between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should determine the identity of aircraft (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the identity of aircraft (unix time).
    :type utc_max:
        float
    :param source:
        The source we should use for plane trajectories. Either 'adsb' or 'fr24'.
    :type source:
        str
    :return:
        None
    """

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Starting aircraft identification.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'unsuccessful_fits': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Status update
    logging.info("Searching for aircraft within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for planes and satellites within this time period
    conn.execute(
        """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
INNER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s AND
      (am2.stringValue='Plane' OR am2.stringValue='Satellite' OR am2.stringValue='Junk')
ORDER BY ao.obsTime
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the identity of {:d} aircraft.".format(
        len(results)))

    # Analyse each aircraft in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if 'pigazing:path' not in all_metadata:
            logging.info(
                "Cannot process <{}> due to inadequate metadata.".format(
                    item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}]".format(
            date=date_string(utc=item['obsTime']), obs=item['observationId'])

        # Project path from (x,y) coordinates into (RA, Dec)
        projector = PathProjection(db=db,
                                   obstory_id=item['observatory'],
                                   time=item['obsTime'],
                                   logging_prefix=logging_prefix)

        path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list = projector.ra_dec_from_x_y(
            path_json=all_metadata['pigazing:path'],
            path_bezier_json=all_metadata['pigazing:pathBezier'],
            detections=all_metadata['pigazing:detectionCount'],
            duration=all_metadata['pigazing:duration'])

        # Check for error
        if projector.error is not None:
            if projector.error in outcomes:
                outcomes[projector.error] += 1
            continue

        # Check for notifications
        for notification in projector.notifications:
            if notification in outcomes:
                outcomes[notification] += 1

        # Check number of points in path
        path_len = len(path_x_y)

        # Look up list of aircraft tracks at the time of this sighting
        if source == 'adsb':
            aircraft_list = fetch_planes_from_adsb(utc=item['obsTime'])
        elif source == 'fr24':
            aircraft_list = fetch_planes_from_fr24(utc=item['obsTime'])
        else:
            raise ValueError("Unknown source <{}>".format(source))

        # List of aircraft this moving object might be
        candidate_aircraft = []

        # Check that we found a list of aircraft
        if aircraft_list is None:
            logging.info("{date} [{obs}] -- No aircraft records found.".format(
                date=date_string(utc=item['obsTime']),
                obs=item['observationId']))
            outcomes['insufficient_information'] += 1
            continue

        # Logging message about how many aircraft we're testing
        # logging.info("{date} [{obs}] -- Matching against {count:7d} aircraft.".format(
        #     date=date_string(utc=item['obsTime']),
        #     obs=item['observationId'],
        #     count=len(aircraft_list)
        # ))

        # Test for each candidate aircraft in turn
        for aircraft in aircraft_list:
            # Fetch aircraft position at each time point along trajectory
            ang_mismatch_list = []
            distance_list = []
            altitude_list = []

            def aircraft_angular_offset(index, clock_offset):
                # Fetch observed position of object at this time point
                pt_utc = sight_line_list[index]['utc']
                observatory_position = sight_line_list[index]['obs_position']
                observed_sight_line = sight_line_list[index]['line'].direction

                # Project position of this aircraft in space at this time point
                aircraft_position = path_interpolate(aircraft=aircraft,
                                                     utc=pt_utc + clock_offset)
                if aircraft_position is None:
                    return np.nan, np.nan, np.nan

                # Convert position to Cartesian coordinates
                aircraft_point = Point.from_lat_lng(
                    lat=aircraft_position['lat'],
                    lng=aircraft_position['lon'],
                    alt=aircraft_position['altitude'] * feet,
                    utc=None)

                # Work out offset of plane's position from observed moving object
                aircraft_sight_line = aircraft_point.to_vector(
                ) - observatory_position.to_vector()
                angular_offset = aircraft_sight_line.angle_with(
                    other=observed_sight_line)  # degrees
                distance = abs(aircraft_sight_line)
                altitude = aircraft_position['altitude'] * feet

                return angular_offset, distance, altitude

            def time_offset_objective(p):
                """
                Objective function that we minimise in order to find the best fit clock offset between the observed
                and model paths.

                :param p:
                    Vector with a single component: the clock offset
                :return:
                    Metric to minimise
                """

                # Turn input parameters into a time offset
                clock_offset = p[0]

                # Look up angular offset
                ang_mismatch, distance, altitude = aircraft_angular_offset(
                    index=0, clock_offset=clock_offset)

                # Return metric to minimise
                return ang_mismatch * exp(clock_offset / 8)

            # Work out the optimum time offset between the plane's path and the observed path
            # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/>
            # for more information about how this works
            parameters_initial = [0]
            parameters_optimised = scipy.optimize.minimize(
                time_offset_objective,
                np.asarray(parameters_initial),
                options={
                    'disp': False,
                    'maxiter': 100
                }).x

            # Construct best-fit linear trajectory for best-fitting parameters
            clock_offset = float(parameters_optimised[0])

            # Check clock offset is reasonable
            if abs(clock_offset) > global_settings['max_clock_offset']:
                continue

            # Measure the offset between the plane's position and the observed position at each time point
            for index in range(path_len):
                # Look up angular mismatch at this time point
                ang_mismatch, distance, altitude = aircraft_angular_offset(
                    index=index, clock_offset=clock_offset)

                # Keep list of the offsets at each recorded time point along the trajectory
                ang_mismatch_list.append(ang_mismatch)
                distance_list.append(distance)
                altitude_list.append(altitude)

            # Consider adding this plane to list of candidates
            mean_ang_mismatch = np.mean(
                np.asarray(ang_mismatch_list))  # degrees
            distance_mean = np.mean(np.asarray(distance_list))  # metres
            altitude_mean = np.mean(np.asarray(altitude_list))  # metres

            if mean_ang_mismatch < global_settings['max_mean_angular_mismatch']:
                start_time = sight_line_list[0]['utc']
                end_time = sight_line_list[-1]['utc']
                start_point = path_interpolate(aircraft=aircraft,
                                               utc=start_time + clock_offset)
                end_point = path_interpolate(aircraft=aircraft,
                                             utc=end_time + clock_offset)
                candidate_aircraft.append({
                    'call_sign': aircraft['call_sign'],  # string
                    'hex_ident': aircraft['hex_ident'],  # string
                    'distance': distance_mean / 1e3,  # km
                    'altitude': altitude_mean / 1e3,  # km
                    'clock_offset': clock_offset,  # seconds
                    'offset': mean_ang_mismatch,  # degrees
                    'start_point': start_point,
                    'end_point': end_point
                })

        # Add model possibility for null aircraft
        if len(candidate_aircraft) == 0:
            candidate_aircraft.append({
                'call_sign': "Unidentified",
                'hex_ident': "Unidentified",
                'distance': 0,
                'altitude': 0,
                'clock_offset': 0,
                'offset': 0,
                'start_point': None,
                'end_point': None
            })

        # Sort candidates by score
        for candidate in candidate_aircraft:
            candidate['score'] = hypot(
                candidate['offset'],
                candidate['clock_offset'],
            )
        candidate_aircraft.sort(key=itemgetter('score'))

        # Report possible satellite identifications
        logging.info("{prefix} -- {aircraft}".format(
            prefix=logging_prefix,
            aircraft=", ".join([
                "{} ({:.1f} deg offset; clock offset {:.1f} sec; distance {:.1f} km)"
                .format(aircraft['call_sign'], aircraft['offset'],
                        aircraft['clock_offset'], aircraft['distance'])
                for aircraft in candidate_aircraft
            ])))

        # Identify most likely aircraft
        most_likely_aircraft = candidate_aircraft[0]

        # Fetch extra information about plane
        plane_info = fetch_aircraft_data(
            hex_ident=most_likely_aircraft['hex_ident'])

        # Store aircraft identification
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:call_sign",
                         value=most_likely_aircraft['call_sign']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:hex_ident",
                         value=most_likely_aircraft['hex_ident']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:clock_offset",
                         value=most_likely_aircraft['clock_offset']))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="plane:angular_offset",
                                        value=most_likely_aircraft['offset']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:distance",
                         value=most_likely_aircraft['distance']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:mean_altitude",
                         value=most_likely_aircraft['altitude']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:path",
                         value=json.dumps([
                             most_likely_aircraft['start_point'],
                             most_likely_aircraft['end_point']
                         ])))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:path_length",
                         value=ang_dist(ra0=path_ra_dec_at_epoch[0][0],
                                        dec0=path_ra_dec_at_epoch[0][1],
                                        ra1=path_ra_dec_at_epoch[-1][0],
                                        dec1=path_ra_dec_at_epoch[-1][1]) *
                         180 / pi))

        aircraft_operator = ""
        if 'operator' in plane_info and plane_info['operator']:
            aircraft_operator = plane_info['operator']
        elif 'owner' in plane_info and plane_info['owner']:
            aircraft_operator = plane_info['owner']

        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="plane:operator",
                                                 value=aircraft_operator))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="plane:model",
                                                 value=plane_info.get(
                                                     'model', '')))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="plane:manufacturer",
                                                 value=plane_info.get(
                                                     'manufacturername', '')))

        # Aircraft successfully identified
        if most_likely_aircraft['call_sign'] == "Unidentified":
            outcomes['unsuccessful_fits'] += 1
        else:
            outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} aircraft successfully identified.".format(
        outcomes['successful_fits']))
    logging.info("{:d} aircraft not identified.".format(
        outcomes['unsuccessful_fits']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))
    logging.info("{:d} aircraft with incomplete data.".format(
        outcomes['insufficient_information']))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
Ejemplo n.º 12
0
def empty_bin(dry_run):
    """
    Delete all observation which the web editor user has classified as being 'bin'.

    :param dry_run:
        Boolean indicating whether we should do a dry run, without actually deleting anything

    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Open direct connection to database
    conn = db.con

    # Search for observations
    conn.execute("""
SELECT f.repositoryFname, f.fileSize, s.name AS semantic, o.publicId AS obs_id
FROM archive_files f
INNER JOIN archive_observations o ON f.observationId = o.uid
INNER JOIN archive_semanticTypes s ON f.semanticType = s.uid
INNER JOIN archive_metadata am on o.uid = am.observationId
    AND am.fieldId=(SELECT x.uid FROM archive_metadataFields x WHERE x.metaKey="web:category")
INNER JOIN archive_metadata am2 on o.uid = am2.observationId
    AND am2.fieldId=(SELECT x.uid FROM archive_metadataFields x WHERE x.metaKey="pigazing:amplitudePeak")
INNER JOIN archive_metadata am3 on o.uid = am3.observationId
    AND am3.fieldId=(SELECT x.uid FROM archive_metadataFields x WHERE x.metaKey="pigazing:duration")
WHERE o.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      (s.name='pigazing:movingObject/video' OR
       s.name='pigazing:movingObject/previousFrame' OR
       s.name='pigazing:movingObject/mapDifference' OR
       s.name='pigazing:movingObject/mapExcludedPixels' OR
       s.name='pigazing:movingObject/mapTrigger'
      ) AND (
      am.stringValue='Bin' OR (am.stringValue='Plane' AND am2.floatValue < 9000 AND am3.floatValue > 5)
      );
""")
    results_observations = conn.fetchall()

    # Keep track of how many bytes we cleaned up
    total_file_size = 0

    # Delete each observation in turn
    for observation in results_observations:
        # Check that observation is not featured
        conn.execute(
            """
SELECT COUNT(*)
FROM archive_files f
INNER JOIN archive_observations o ON f.observationId = o.uid
INNER JOIN archive_metadata d ON f.uid = d.fileId AND d.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='web:featured')
WHERE o.publicId=%s;
""", (observation['obs_id'], ))
        featured_file_count = conn.fetchall()[0]['COUNT(*)']

        if featured_file_count > 0:
            logging.info(
                "Not pruning observation <{}> because it is featured.".format(
                    observation['obs_id']))

        # Keep track of total file size we are deleting
        total_file_size += observation['fileSize']

        # Transfer file metadata to observation
        if observation['semantic'] == 'pigazing:movingObject/video':
            # Open observation object
            obs_obj = db.get_observation(observation_id=observation['obs_id'])
            obs_metadata = {item.key: item.value for item in obs_obj.meta}

            # Open file object
            file_obj = db.get_file(
                repository_fname=observation['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}

            # Transfer file metadata to observation
            for key in file_metadata:
                if key not in obs_metadata:
                    db.set_observation_metadata(
                        user_id='migrated',
                        observation_id=observation['obs_id'],
                        meta=obsarchive_model.Meta(key=key,
                                                   value=file_metadata[key]))

        # Delete file
        if not dry_run:
            os.unlink(
                os.path.join(settings['dbFilestore'],
                             observation['repositoryFname']))

        # Delete file record
        if not dry_run:
            conn.execute("DELETE FROM archive_files WHERE repositoryFname=%s",
                         (observation['repositoryFname'], ))

    # Report how much disk space we saved
    logging.info("Total storage saved: {:.3f} GB".format(total_file_size /
                                                         1e9))

    # Commit changes to database
    db.commit()
    db.close_db()
Ejemplo n.º 13
0
def do_triangulation(utc_min, utc_max, utc_must_stop):
    # We need to share the list of sight lines to each moving object with the objective function that we minimise
    global sight_line_list, time_span, seed_position

    # Start triangulation process
    logging.info(
        "Triangulating simultaneous object detections between <{}> and <{}>.".
        format(date_string(utc_min), date_string(utc_max)))

    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Count how many objects we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'failed_fits': 0,
        'inadequate_baseline': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Compile search criteria for observation groups
    where = [
        "g.semanticType = (SELECT uid FROM archive_semanticTypes WHERE name=\"{}\")"
        .format(simultaneous_event_type)
    ]
    args = []

    if utc_min is not None:
        where.append("o.obsTime>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("o.obsTime<=%s")
        args.append(utc_max)

    # Open direct connection to database
    conn = db.con

    # Search for observation groups containing groups of simultaneous detections
    conn.execute(
        """
SELECT g.publicId AS groupId, o.publicId AS observationId, o.obsTime, f.repositoryFname,
       am.stringValue AS objectType, l.publicId AS observatory
FROM archive_obs_groups g
INNER JOIN archive_obs_group_members m on g.uid = m.groupId
INNER JOIN archive_observations o ON m.childObservation = o.uid
INNER JOIN archive_observatories l ON o.observatory = l.uid
LEFT OUTER JOIN archive_files f on o.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video")
INNER JOIN archive_metadata am ON g.uid = am.groupId AND
    am.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE """ + " AND ".join(where) + """
ORDER BY o.obsTime;
""", args)
    results = conn.fetchall()

    # Compile list of events into list of groups
    obs_groups = {}
    obs_group_ids = []
    for item in results:
        key = item['groupId']
        if key not in obs_groups:
            obs_groups[key] = []
            obs_group_ids.append({
                'groupId': key,
                'time': item['obsTime'],
                'type': item['objectType']
            })
        obs_groups[key].append(item)

    # Loop over list of simultaneous event detections
    for group_info in obs_group_ids:
        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}/{type:16s}]".format(
            date=date_string(utc=group_info['time']),
            obs=group_info['groupId'],
            type=group_info['type'])

        # If we've run out of time, stop now
        time_now = time.time()
        if utc_must_stop is not None and time_now > utc_must_stop:
            break

        # Make a list of all our sight-lines to this object, from all observatories
        sight_line_list = []
        observatory_list = {}

        # Fetch information about each observation in turn
        for item in obs_groups[group_info['groupId']]:
            # Fetch metadata about this object, some of which might be on the file, and some on the observation
            obs_obj = db.get_observation(observation_id=item['observationId'])
            obs_metadata = {item.key: item.value for item in obs_obj.meta}
            if item['repositoryFname']:
                file_obj = db.get_file(
                    repository_fname=item['repositoryFname'])
                file_metadata = {
                    item.key: item.value
                    for item in file_obj.meta
                }
            else:
                file_metadata = {}
            all_metadata = {**obs_metadata, **file_metadata}

            # Project path from (x,y) coordinates into (RA, Dec)
            projector = PathProjection(db=db,
                                       obstory_id=item['observatory'],
                                       time=item['obsTime'],
                                       logging_prefix=logging_prefix)

            path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list_this = projector.ra_dec_from_x_y(
                path_json=all_metadata['pigazing:path'],
                path_bezier_json=all_metadata['pigazing:pathBezier'],
                detections=all_metadata['pigazing:detectionCount'],
                duration=all_metadata['pigazing:duration'])

            # Check for error
            if projector.error is not None:
                if projector.error in outcomes:
                    outcomes[projector.error] += 1
                continue

            # Check for notifications
            for notification in projector.notifications:
                if notification in outcomes:
                    outcomes[notification] += 1

            # Add to observatory_list, now that we've checked this observatory has all necessary information
            if item['observatory'] not in observatory_list:
                observatory_list[item['observatory']] = projector.obstory_info

            # Add sight lines from this observatory to list which combines all observatories
            sight_line_list.extend(sight_line_list_this)

        # If we have fewer than four sight lines, don't bother trying to triangulate
        if len(sight_line_list) < 4:
            logging.info(
                "{prefix} -- Giving up triangulation as we only have {x:d} sight lines to object."
                .format(prefix=logging_prefix, x=len(sight_line_list)))
            continue

        # Initialise maximum baseline between the stations which saw this objects
        maximum_baseline = 0

        # Check the distances between all pairs of observatories
        obstory_info_list = [
            Point.from_lat_lng(lat=obstory['latitude'],
                               lng=obstory['longitude'],
                               alt=0,
                               utc=None)
            for obstory in observatory_list.values()
        ]

        pairs = [[obstory_info_list[i], obstory_info_list[j]]
                 for i in range(len(obstory_info_list))
                 for j in range(i + 1, len(obstory_info_list))]

        # Work out maximum baseline between the stations which saw this objects
        for pair in pairs:
            maximum_baseline = max(
                maximum_baseline,
                abs(pair[0].displacement_vector_from(pair[1])))

        # If we have no baselines of over 1 km, don't bother trying to triangulate
        if maximum_baseline < 1000:
            logging.info(
                "{prefix} -- Giving up triangulation as longest baseline is only {x:.0f} m."
                .format(prefix=logging_prefix, x=maximum_baseline))
            outcomes['inadequate_baseline'] += 1
            continue

        # Set time range of sight lines
        time_span = [
            min(item['utc'] for item in sight_line_list),
            max(item['utc'] for item in sight_line_list)
        ]

        # Create a seed point to start search for object path. We pick a point above the centroid of the observatories
        # that saw the object
        centroid_v = sum(item['obs_position'].to_vector()
                         for item in sight_line_list) / len(sight_line_list)
        centroid_p = Point(x=centroid_v.x, y=centroid_v.y, z=centroid_v.z)
        centroid_lat_lng = centroid_p.to_lat_lng(utc=None)
        seed_position = Point.from_lat_lng(lat=centroid_lat_lng['lat'],
                                           lng=centroid_lat_lng['lng'],
                                           alt=centroid_lat_lng['alt'] * 2e4,
                                           utc=None)

        # Attempt to fit a linear trajectory through all of the sight lines that we have collected
        parameters_initial = [0, 0, 0, 0, 0, 0]

        # Solve the system of equations
        # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/>
        # for more information about how this works
        parameters_optimised = scipy.optimize.minimize(
            angular_mismatch_objective,
            numpy.asarray(parameters_initial),
            options={
                'disp': False,
                'maxiter': 1e8
            }).x

        # Construct best-fit linear trajectory for best-fitting parameters
        best_triangulation = line_from_parameters(parameters_optimised)
        # logging.info("Best fit path of object is <{}>.".format(best_triangulation))

        # logging.info("Mismatch of observed sight lines from trajectory are {} deg.".format(
        #     ["{:.1f}".format(best_triangulation.find_closest_approach(s['line'])['angular_distance'])
        #      for s in sight_line_list]
        # ))

        # Find sight line with the worst match
        mismatch_list = sight_line_mismatch_list(trajectory=best_triangulation)
        maximum_mismatch = max(mismatch_list)

        # Reject trajectory if it deviates by more than 8 degrees from any observation
        if maximum_mismatch > 8:
            logging.info(
                "{prefix} -- Trajectory mismatch is too great ({x:.1f} deg).".
                format(prefix=logging_prefix, x=maximum_mismatch))
            outcomes['failed_fits'] += 1
            continue

        # Convert start and end points of path into (lat, lng, alt)
        start_point = best_triangulation.point(0).to_lat_lng(utc=None)
        start_point['utc'] = time_span[0]
        end_point = best_triangulation.point(1).to_lat_lng(utc=None)
        end_point['utc'] = time_span[1]

        # Calculate linear speed of object
        speed = abs(best_triangulation.direction) / (
            time_span[1] - time_span[0])  # m/s

        # Calculate radiant direction for this object
        radiant_direction_vector = best_triangulation.direction * -1
        radiant_direction_coordinates = radiant_direction_vector.to_ra_dec(
        )  # hours, degrees
        radiant_greenwich_hour_angle = radiant_direction_coordinates['ra']
        radiant_dec = radiant_direction_coordinates['dec']
        instantaneous_sidereal_time = sidereal_time(utc=(utc_min + utc_max) /
                                                    2)  # hours
        radiant_ra = radiant_greenwich_hour_angle + instantaneous_sidereal_time  # hours
        radiant_direction = [radiant_ra, radiant_dec]

        # Store triangulated information in database
        user = settings['pigazingUser']
        timestamp = time.time()
        triangulation_metadata = {
            "triangulation:speed":
            speed,
            "triangulation:mean_altitude":
            (start_point['alt'] + end_point['alt']) / 2 / 1e3,  # km
            "triangulation:max_angular_offset":
            maximum_mismatch,
            "triangulation:max_baseline":
            maximum_baseline,
            "triangulation:radiant_direction":
            json.dumps(radiant_direction),
            "triangulation:sight_line_count":
            len(sight_line_list),
            "triangulation:path":
            json.dumps([start_point, end_point])
        }

        # Set metadata on the observation group
        for metadata_key, metadata_value in triangulation_metadata.items():
            db.set_obsgroup_metadata(user_id=user,
                                     group_id=group_info['groupId'],
                                     utc=timestamp,
                                     meta=mp.Meta(key=metadata_key,
                                                  value=metadata_value))

        # Set metadata on each observation individually
        for item in obs_groups[group_info['groupId']]:
            for metadata_key, metadata_value in triangulation_metadata.items():
                db.set_observation_metadata(
                    user_id=user,
                    observation_id=item['observationId'],
                    utc=timestamp,
                    meta=mp.Meta(key=metadata_key, value=metadata_value))

        # Commit metadata to database
        db.commit()

        # Report outcome
        logging.info(
            "{prefix} -- Success -- {path}; speed {mph:11.1f} mph; {sight_lines:6d} detections."
            .format(
                prefix=logging_prefix,
                path="{:5.1f} {:5.1f} {:10.1f} -> {:5.1f} {:5.1f} {:10.1f}".
                format(
                    start_point['lat'],
                    start_point['lng'],
                    start_point['alt'] / 1e3,  # deg deg km
                    end_point['lat'],
                    end_point['lng'],
                    end_point['alt'] / 1e3),
                mph=speed / 0.44704,  # mph
                sight_lines=len(sight_line_list)))

        # Triangulation successful
        outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} objects successfully triangulated.".format(
        outcomes['successful_fits']))
    logging.info("{:d} objects could not be triangulated.".format(
        outcomes['failed_fits']))
    logging.info("{:d} objects had an inadequate baseline.".format(
        outcomes['inadequate_baseline']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))
    logging.info("{:d} objects with incomplete data.".format(
        outcomes['insufficient_information']))

    # Commit changes
    db.commit()
    db.close_db()
Ejemplo n.º 14
0
def plot_orientation(obstory_ids, utc_min, utc_max):
    """
    Plot the orientation of a particular observatory within the time period between the unix times
    <utc_min> and <utc_max>.

    :param obstory_ids:
        The IDs of the observatories we want to plot the orientation for.
    :type obstory_ids:
        list<str>
    :param utc_min:
        The start of the time period in which we should plot the observatory's orientation (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should plot the observatory's orientation (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Plotting camera alignment for <{}>".format(obstory_ids))

    # Data filename stem
    filename_stem = "/tmp/orientation_plot_{:.1f}".format(utc_min)

    # Make data file for each observatory in turn
    for counter, obstory_id in enumerate(obstory_ids):
        # Search for background-subtracted time lapse image with best sky clarity, and no existing orientation fit,
        # within this time period
        conn.execute(
            """
SELECT ao.obsTime, ao.publicId AS observationId,
       am.floatValue AS skyClarity, am2.stringValue AS fitQuality, am3.stringValue AS fitQualityToAverage
FROM archive_files f
INNER JOIN archive_observations ao on f.observationId = ao.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality")
LEFT OUTER JOIN archive_metadata am3 ON ao.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality_to_daily")
WHERE ao.obsTime BETWEEN %s AND %s
    AND ao.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
    AND f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:timelapse/backgroundSubtracted")
ORDER BY ao.obsTime
""", (utc_min, utc_max, obstory_id))
        results = conn.fetchall()

        # Data filename
        filename = "{}_{:02d}".format(filename_stem, counter)

        # Loop over results and write to data file
        with open("{}.dat".format(filename), "w") as f:
            for item in results:
                utc = float(item['obsTime'])
                sky_clarity = float(item['skyClarity'])
                fit_quality = -99
                fit_quality_to_average = -99

                if item['fitQuality'] is not None:
                    fit_quality = float(json.loads(item['fitQuality'])[0])

                if item['fitQualityToAverage'] is not None:
                    fit_quality_to_average = float(
                        json.loads(item['fitQualityToAverage'])[0])

                f.write("{:.1f} {:6.1f} {:6.3f} {:6.3f}\n".format(
                    utc, sky_clarity, fit_quality, fit_quality_to_average))

    # Write pyxplot command file
    with open("{}.ppl".format(filename_stem), "w") as ppl:
        plot_settings = {
            "x_min": utc_min,
            "x_max": utc_max,
            "width": 18,
            "spacing": 4,
            "pt": 17,
            "filename": filename_stem
        }
        ppl.write("""
set width {width}
set multiplot ; set nodisplay
set key below
set xlabel 'Time / hour' ; set xrange [{x_min}:{x_max}]
set xformat "%.1f"%((x/3600) % 24)
set ylabel 'Fit quality' ; set yrange [0:6]
set y2label 'Sky clarity' ; set y2range [0:1000]
    """.format(**plot_settings))

        for counter, obstory_id in enumerate(obstory_ids):
            ppl.write("""
            set origin ({width}+{spacing})*{counter}, 0
plot '{filename}_{counter:02d}.dat' title 'Fit quality' using 1:3 axes x1y1 with p col green pt {pt}, \
     '{filename}_{counter:02d}.dat' title 'Fit quality (to daily average)' using 1:4 axes x1y1 with p col red pt {pt}, \
     '{filename}_{counter:02d}.dat' title 'Sky clarity' using 1:2 axes x1y2 with p col blue pt {pt}
    """.format(**plot_settings, counter=counter))

        ppl.write("""
set term png ; set output '{filename}.png'
set term dpi 100
set display ; refresh
    """.format(**plot_settings))

    os.system("pyxplot {}.ppl".format(filename_stem))

    # Close database handles
    db.close_db()
    conn.close()
    db0.close()
    return
Ejemplo n.º 15
0
def orientation_calc(obstory_id, utc_min, utc_max):
    """
    Use astrometry.net to determine the orientation of a particular observatory within each night within the time
    period between the unix times <utc_min> and <utc_max>.

    :param obstory_id:
        The ID of the observatory we want to determine the orientation for.
    :type obstory_id:
        str
    :param utc_min:
        The start of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_max:
        float
    :param utc_must_stop:
        The unix time after which we must abort and finish work as quickly as possible.
    :type utc_must_stop:
        float
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info(
        "Starting calculation of camera alignment for <{}>".format(obstory_id))

    # Reduce time window we are searching to the interval in which observations are present (to save time)
    utc_max, utc_min = reduce_time_window(conn=conn,
                                          obstory_id=obstory_id,
                                          utc_max=utc_max,
                                          utc_min=utc_min)

    # Try to average the fits within each night to determine the sigma-clipped mean orientation
    average_daily_fits(conn=conn,
                       db=db,
                       obstory_id=obstory_id,
                       utc_max=utc_max,
                       utc_min=utc_min)
    measure_fit_quality_to_daily_fits(conn=conn,
                                      db=db,
                                      obstory_id=obstory_id,
                                      utc_max=utc_max,
                                      utc_min=utc_min)

    # Clean up and exit
    db.commit()
    db.close_db()
    db0.commit()
    conn.close()
    db0.close()
    return
Ejemplo n.º 16
0
def list_orientations(obstory_id, utc_min, utc_max):
    """
    List the worst orientation fits of a particular observatory within the time period between the unix times
    <utc_min> and <utc_max>.

    :param obstory_id:
        The ID of the observatory we want to determine the orientation for.
    :type obstory_id:
        str
    :param utc_min:
        The start of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Plotting camera alignment for <{}>".format(obstory_id))

    # Search for background-subtracted time lapse image with best sky clarity, and no existing orientation fit,
    # within this time period
    conn.execute(
        """
SELECT ao.obsTime, f.repositoryFname AS observationId,
       am.floatValue AS skyClarity, am2.stringValue AS fitQuality, am3.stringValue AS fitQualityToAverage
FROM archive_files f
INNER JOIN archive_observations ao on f.observationId = ao.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality")
LEFT OUTER JOIN archive_metadata am3 ON ao.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality_to_daily")
WHERE ao.obsTime BETWEEN %s AND %s
    AND ao.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
    AND f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:timelapse/backgroundSubtracted")
    AND am.floatValue > %s
ORDER BY ao.obsTime
""", (utc_min, utc_max, obstory_id, minimum_sky_clarity))
    results = conn.fetchall()

    # Data filename
    filename = "/tmp/worst_orientations"

    # Loop over results
    data = []
    for item in results:
        utc = float(item['obsTime'])
        sky_clarity = float(item['skyClarity'])
        fit_quality = -99
        fit_quality_to_average = -99

        if item['fitQuality'] is not None:
            fit_quality = float(json.loads(item['fitQuality'])[0])

        if item['fitQualityToAverage'] is not None:
            fit_quality_to_average = float(
                json.loads(item['fitQualityToAverage'])[0])

        data.append({
            'uid': item['observationId'],
            'utc': utc,
            'sky_clarity': sky_clarity,
            'fit_quality': fit_quality,
            'fit_quality_to_average': fit_quality_to_average
        })

    # Sort on fit quality
    data.sort(key=itemgetter('fit_quality_to_average'))
    data.reverse()

    # Limit to 1000 worst points
    data = data[:1000]

    # Write to data file
    with open("{}.dat".format(filename), "w") as f:
        for item in data:
            f.write("{} {:6.1f} {:6.3f} {:6.3f}\n".format(
                item['uid'], item['sky_clarity'], item['fit_quality'],
                item['fit_quality_to_average']))

    # Close database handles
    db.close_db()
    conn.close()
    db0.close()
    return
Ejemplo n.º 17
0
def observing_loop():
    obstory_id = installation_info['observatoryId']

    logging.info("Observatory controller launched")

    # Fetch observatory status, e.g. location, etc
    logging.info("Fetching observatory status")
    latitude = known_observatories[obstory_id]['latitude']
    longitude = known_observatories[obstory_id]['longitude']
    altitude = 0
    latest_position_update = 0
    flag_gps = 0

    # Make sure that observatory exists in the database

    # Start main observing loop
    while True:
        # Get a new MySQL connection because old one may not be connected any longer
        db = obsarchive_db.ObservationDatabase(
            file_store_path=settings['dbFilestore'],
            db_host=installation_info['mysqlHost'],
            db_user=installation_info['mysqlUser'],
            db_password=installation_info['mysqlPassword'],
            db_name=installation_info['mysqlDatabase'],
            obstory_id=installation_info['observatoryId'])

        # Get a GPS fix on the current time and our location
        gps_fix = get_gps_fix()
        if gps_fix:
            latitude = gps_fix['latitude']
            longitude = gps_fix['longitude']
            altitude = gps_fix['altitude']
            flag_gps = 1

        # Decide whether we should observe, or do some day-time maintenance tasks
        logging.info("Observation controller considering what to do next.")

        time_now = time.time()

        # How far below the horizon do we require the Sun to be before we start observing?
        angle_below_horizon = settings['sunRequiredAngleBelowHorizon']

        sun_times_yesterday = sunset_times.sun_times(
            unix_time=time_now - 3600 * 24,
            longitude=longitude,
            latitude=latitude,
            angle_below_horizon=angle_below_horizon)
        sun_times_today = sunset_times.sun_times(
            unix_time=time_now,
            longitude=longitude,
            latitude=latitude,
            angle_below_horizon=angle_below_horizon)
        sun_times_tomorrow = sunset_times.sun_times(
            unix_time=time_now + 3600 * 24,
            longitude=longitude,
            latitude=latitude,
            angle_below_horizon=angle_below_horizon)

        logging.info("Sunrise at {}".format(
            dcf_ast.date_string(sun_times_yesterday[0])))
        logging.info("Sunset  at {}".format(
            dcf_ast.date_string(sun_times_yesterday[2])))
        logging.info("Sunrise at {}".format(
            dcf_ast.date_string(sun_times_today[0])))
        logging.info("Sunset  at {}".format(
            dcf_ast.date_string(sun_times_today[2])))
        logging.info("Sunrise at {}".format(
            dcf_ast.date_string(sun_times_tomorrow[0])))
        logging.info("Sunset  at {}".format(
            dcf_ast.date_string(sun_times_tomorrow[2])))

        sun_margin = settings['sunMargin']

        # Calculate whether it's currently night time, and how long until the next sunrise
        is_night_time = False
        seconds_till_sunrise = 0

        # Test whether it is night time is we are between yesterday's sunset and today's sunrise
        if (time_now > sun_times_yesterday[2] + sun_margin) and (
                time_now < sun_times_today[0] - sun_margin):
            logging.info("""
It is night time. We are between yesterday's sunset and today's sunrise.
""".strip())
            is_night_time = True
            seconds_till_sunrise = sun_times_today[0] - time_now

        # Test whether it is between yesterday's sunset and today's sunrise
        elif (time_now > sun_times_yesterday[2]) and (time_now <
                                                      sun_times_today[0]):
            next_observing_time = sun_times_yesterday[2] + sun_margin
            next_observing_wait = next_observing_time - time_now
            if next_observing_wait > 0:
                logging.info("""
We are between yesterday's sunset and today's sunrise, but sun has recently set. \
Waiting {:.0f} seconds (until {}) to start observing.
""".format(next_observing_wait,
                dcf_ast.date_string(next_observing_time)).strip())
                db.commit()
                db.close_db()
                del db
                time.sleep(next_observing_wait + 2)
                continue

        # Test whether it is night time, since we are between today's sunrise and tomorrow's sunset
        elif (time_now > sun_times_today[2] + sun_margin) and (
                time_now < sun_times_tomorrow[0] - sun_margin):
            logging.info("""
It is night time. We are between today's sunset and tomorrow's sunrise.
""".strip())
            is_night_time = True
            seconds_till_sunrise = sun_times_tomorrow[0] - time_now

        # Test whether we between today's sunset and tomorrow's sunrise
        elif (time_now > sun_times_today[2]) and (time_now <
                                                  sun_times_tomorrow[0]):
            next_observing_time = sun_times_today[2] + sun_margin
            next_observing_wait = next_observing_time - time_now
            if next_observing_time > 0:
                logging.info("""
We are between today's sunset and tomorrow's sunrise, but sun has recently set. \
Waiting {:.0f} seconds (until {}) to start observing.
""".format(next_observing_wait,
                dcf_ast.date_string(next_observing_time)).strip())
                db.commit()
                db.close_db()
                del db
                time.sleep(next_observing_wait + 2)
                continue

        # Calculate time until the next sunset
        seconds_till_sunset = sun_times_yesterday[2] - time_now
        if seconds_till_sunset < -sun_margin:
            seconds_till_sunset = sun_times_today[2] - time_now
        if seconds_till_sunset < -sun_margin:
            seconds_till_sunset = sun_times_tomorrow[2] - time_now

        # If sunset was well in the past, and sunrise is well in the future, we should observe!
        minimum_time_worth_observing = 600
        if is_night_time and (seconds_till_sunrise >
                              (sun_margin + minimum_time_worth_observing)):

            # Check that observatory exists
            check_observatory_exists(db_handle=db,
                                     obs_id=obstory_id,
                                     utc=time.time())

            # Fetch updated observatory status
            obstory_status = db.get_obstory_status(obstory_id=obstory_id)

            # If we've not stored a GPS fix in the database within the past six hours, do so now
            if flag_gps and (time.time() > latest_position_update + 6 * 3600):
                latest_position_update = time.time()
                db.register_obstory_metadata(
                    obstory_id=obstory_id,
                    key="latitude_gps",
                    value=latitude,
                    metadata_time=time.time(),
                    time_created=time.time(),
                    user_created=settings['pigazingUser'])
                db.register_obstory_metadata(
                    obstory_id=obstory_id,
                    key="longitude_gps",
                    value=longitude,
                    metadata_time=time.time(),
                    time_created=time.time(),
                    user_created=settings['pigazingUser'])
                db.register_obstory_metadata(
                    obstory_id=obstory_id,
                    key="altitude_gps",
                    value=altitude,
                    metadata_time=time.time(),
                    time_created=time.time(),
                    user_created=settings['pigazingUser'])

            # Create clipping region mask file
            mask_file = "/tmp/triggermask_%d.txt" % os.getpid()
            open(mask_file, "w").write("\n\n".join([
                "\n".join([("%d %d" % tuple(p)) for p in point_list])
                for point_list in json.loads(obstory_status["clipping_region"])
            ]))

            # Commit updates to the database
            db.commit()
            db.close_db()
            del db

            # Calculate how long to observe for
            observing_duration = seconds_till_sunrise - sun_margin

            # Do not record too much video in one file, as otherwise the file will be big
            if not settings['realTime']:
                observing_duration = min(observing_duration,
                                         settings['videoMaxRecordTime'])

            # Start observing run
            t_stop = time_now + observing_duration
            logging.info("""
Starting observing run until {} (running for {:.0f} seconds).
""".format(dcf_ast.date_string(t_stop), observing_duration).strip())

            # Flick the relay to turn the camera on
            relay_control.camera_on()
            time.sleep(5)
            logging.info("Camera has been turned on.")

            # Observe! We use different binaries depending whether we're using a webcam-like camera,
            # or a DSLR connected via gphoto2
            time_key = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S')

            # Work out which C binary we're using to do observing
            if settings['realTime']:
                output_argument = ""
                if obstory_status["camera_type"] == "gphoto2":
                    binary = "realtimeObserve_dslr"
                else:
                    binary = "realtimeObserve"
            else:
                output_argument = """ --output \"{}/raw_video/{}_{}\" """.format(
                    settings['dataPath'], time_key, obstory_id)
                if settings['i_am_a_rpi']:
                    binary = "recordH264_openmax"
                else:
                    binary = "recordH264_libav"

            binary_full_path = "{path}{debug}/{binary}".format(
                path=settings['binaryPath'],
                debug="/debug" if settings['debug'] else "",
                binary=binary)
            cmd = """
timeout {timeout} \
{binary} --utc-stop {utc_stop:.1f} \
         --obsid \"{obsid}\" \
         --device \"{device}\" \
         --fps {fps} \
         --width {width:d} \
         --height {height:d} \
         --mask \"{mask_file}\" \
         --latitude {latitude} \
         --longitude {longitude} \
         --flag-gps {flag_gps} \
         --flag-upside-down {upside_down} \
         {output_argument}
""".format(timeout=float(observing_duration + 300),
            binary=binary_full_path,
            utc_stop=float(t_stop),
            obsid=obstory_id,
            device=settings['videoDev'],
            width=int(obstory_status['camera_width']),
            height=int(obstory_status['camera_height']),
            fps=float(obstory_status['camera_fps']),
            mask_file=mask_file,
            latitude=float(latitude),
            longitude=float(longitude),
            flag_gps=int(flag_gps),
            upside_down=int(obstory_status['camera_upside_down']),
            output_argument=output_argument).strip()

            logging.info("Running command: {}".format(cmd))
            os.system(cmd)

            # Flick the relay to turn the camera off
            relay_control.camera_off()
            time.sleep(5)
            logging.info("Camera has been turned off.")

            # Snooze for up to 10 minutes; we may rerun observing tasks in a while if they ended prematurely
            if time.time() < t_stop:
                snooze_duration = float(min(t_stop - time.time(), 600))
                logging.info(
                    "Snoozing for {:.0f} seconds".format(snooze_duration))
                time.sleep(snooze_duration)

            continue

        # It is day time, so consider running day time tasks

        # First, commit updates to the database
        db.commit()
        db.close_db()
        del db

        # Estimate roughly when we're next going to be able to observe (i.e. shortly after sunset)
        next_observing_wait = seconds_till_sunset + sun_margin

        # If we've got more than an hour, it's worth doing some day time tasks
        # Do daytime tasks on a RPi only if we are doing real-time observation
        if (next_observing_wait > 3600) and (settings['realTime']
                                             or not settings['i_am_a_rpi']):
            t_stop = time_now + next_observing_wait
            logging.info("""
Starting daytime tasks until {} (running for {:.0f} seconds).
""".format(dcf_ast.date_string(t_stop), next_observing_wait).strip())
            os.system("cd {} ; ./daytimeTasks.py --stop-by {}".format(
                os.path.join(settings['pythonPath'], "observe"), t_stop))

            # Snooze for up to 30 minutes; we may rerun daytime tasks in a while if they ended prematurely
            if time.time() < t_stop:
                snooze_duration = float(min(t_stop - time.time(), 1800))
                logging.info(
                    "Snoozing for {:.0f} seconds".format(snooze_duration))
                time.sleep(snooze_duration)

        else:
            if next_observing_wait < 0:
                next_observing_wait = 0
            next_observing_wait += 30
            t_stop = time_now + next_observing_wait
            logging.info("""
Not time to start observing yet, so sleeping until {} ({:.0f} seconds away).
""".format(dcf_ast.date_string(t_stop), next_observing_wait).strip())
            time.sleep(next_observing_wait)

        # Little snooze to prevent spinning around the loop
        snooze_duration = float(10)
        logging.info("Snoozing for {:.0f} seconds".format(snooze_duration))
        time.sleep(snooze_duration)
def calibrate_lens(obstory_id, utc_min, utc_max, utc_must_stop=None):
    """
    Use astrometry.net to determine the orientation of a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to determine the orientation for.
    :param utc_min:
        The start of the time period in which we should determine the observatory's orientation.
    :param utc_max:
        The end of the time period in which we should determine the observatory's orientation.
    :param utc_must_stop:
        The time by which we must finish work
    :return:
        None
    """
    global parameter_scales, fit_list

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info(
        "Starting estimation of lens calibration for <{}>".format(obstory_id))

    # Mathematical constants
    deg = pi / 180
    rad = 180 / pi

    # Count how many successful fits we achieve
    successful_fits = 0

    # Read properties of known lenses
    hw = hardware_properties.HardwareProps(
        path=os.path.join(settings['pythonPath'], "..", "configuration_global",
                          "camera_properties"))

    # Reduce time window to where observations are present
    conn.execute(
        """
SELECT obsTime
FROM archive_observations
WHERE obsTime BETWEEN %s AND %s
    AND observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
ORDER BY obsTime ASC LIMIT 1
""", (utc_min, utc_max, obstory_id))
    results = conn.fetchall()

    if len(results) == 0:
        logging.warning("No observations within requested time window.")
        return
    utc_min = results[0]['obsTime'] - 1

    conn.execute(
        """
SELECT obsTime
FROM archive_observations
WHERE obsTime BETWEEN %s AND %s
    AND observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
ORDER BY obsTime DESC LIMIT 1
""", (utc_min, utc_max, obstory_id))
    results = conn.fetchall()
    utc_max = results[0]['obsTime'] + 1

    # Divide up time interval into day-long blocks
    logging.info("Searching for images within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))
    block_size = 3600
    minimum_sky_clarity = 1e6 + 1400
    utc_min = (floor(utc_min / block_size + 0.5) -
               0.5) * block_size  # Make sure that blocks start at noon
    time_blocks = list(
        np.arange(start=utc_min, stop=utc_max + block_size, step=block_size))

    # Start new block whenever we have a hardware refresh
    conn.execute(
        """
SELECT time FROM archive_metadata
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='refresh')
      AND time BETWEEN %s AND %s
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()
    for item in results:
        time_blocks.append(item['time'])

    # Make sure that start points for time blocks are in order
    time_blocks.sort()

    # Build list of images we are to analyse
    images_for_analysis = []

    for block_index, utc_block_min in enumerate(time_blocks[:-1]):
        utc_block_max = time_blocks[block_index + 1]
        logging.info("Calibrating lens within period {} to {}".format(
            date_string(utc_block_min), date_string(utc_block_max)))

        # Search for background-subtracted time lapse image with best sky clarity within this time period
        conn.execute(
            """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, am.floatValue AS skyClarity
FROM archive_files f
INNER JOIN archive_observations ao on f.observationId = ao.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON f.uid = am2.fileId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
WHERE ao.obsTime BETWEEN %s AND %s
    AND ao.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
    AND f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:timelapse/backgroundSubtracted")
    AND am.floatValue > %s
    AND am2.uid IS NULL
    AND ao.astrometryProcessed IS NULL
ORDER BY am.floatValue DESC LIMIT 1
""", (utc_block_min, utc_block_max, obstory_id, minimum_sky_clarity))
        results = conn.fetchall()

        if len(results) > 0:
            images_for_analysis.append({
                'utc':
                results[0]['obsTime'],
                'skyClarity':
                results[0]['skyClarity'],
                'repositoryFname':
                results[0]['repositoryFname'],
                'observationId':
                results[0]['observationId']
            })

    # Sort images into order of sky clarity
    images_for_analysis.sort(key=itemgetter("skyClarity"))
    images_for_analysis.reverse()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the calibration of {:d} images:".format(
        len(images_for_analysis)))
    for item in images_for_analysis:
        logging.info("{:17s} {:04.0f} {:32s}".format(date_string(item['utc']),
                                                     item['skyClarity'],
                                                     item['repositoryFname']))

    # Analyse each image in turn
    for item_index, item in enumerate(images_for_analysis):
        logging.info("Working on image {:32s} ({:4d}/{:4d})".format(
            item['repositoryFname'], item_index + 1, len(images_for_analysis)))

        # Make a temporary directory to store files in.
        # This is necessary as astrometry.net spams the cwd with lots of temporary junk
        tmp0 = "/tmp/dcf21_calibrateLens_{}".format(item['repositoryFname'])
        # logging.info("Created temporary directory <{}>".format(tmp))
        os.system("mkdir {}".format(tmp0))

        # Fetch observatory status
        obstory_info = db.get_obstory_from_id(obstory_id)
        obstory_status = None
        if obstory_info and ('name' in obstory_info):
            obstory_status = db.get_obstory_status(obstory_id=obstory_id,
                                                   time=item['utc'])
        if not obstory_status:
            logging.info("Aborting -- no observatory status available.")
            continue

        # Fetch observatory status
        lens_name = obstory_status['lens']
        lens_props = hw.lens_data[lens_name]

        # This is an estimate of the *maximum* angular width we expect images to have.
        # It should be within a factor of two of correct!
        estimated_image_scale = lens_props.fov

        # Find image orientation orientation
        filename = os.path.join(settings['dbFilestore'],
                                item['repositoryFname'])

        if not os.path.exists(filename):
            logging.info("Error: File <{}> is missing!".format(
                item['repositoryFname']))
            continue

        # 1. Copy image into working directory
        # logging.info("Copying file")
        img_name = item['repositoryFname']
        command = "cp {} {}/{}_tmp.png".format(filename, tmp0, img_name)
        # logging.info(command)
        os.system(command)

        # 2. We estimate the distortion of the image by passing a series of small portions of the image to
        # astrometry.net. We use this to construct a mapping between (x, y) pixel coordinates to (RA, Dec).

        # Define the size of each small portion we pass to astrometry.net
        fraction_x = 0.15
        fraction_y = 0.15

        # Create a list of the centres of the portions we send
        fit_list = []
        portion_centres = [{'x': 0.5, 'y': 0.5}]

        # Points along the leading diagonal of the image
        for z in np.arange(0.1, 0.9, 0.1):
            if z != 0.5:
                portion_centres.append({'x': z, 'y': z})
                portion_centres.append({'x': (z + 0.5) / 2, 'y': z})
                portion_centres.append({'x': z, 'y': (z + 0.5) / 2})

        # Points along the trailing diagonal of the image
        for z in np.arange(0.1, 0.9, 0.1):
            if z != 0.5:
                portion_centres.append({'x': z, 'y': 1 - z})
                portion_centres.append({'x': (1.5 - z) / 2, 'y': z})
                portion_centres.append({'x': z, 'y': (1.5 - z) / 2})

        # Points down the vertical centre-line of the image
        for z in np.arange(0.15, 0.85, 0.1):
            portion_centres.append({'x': 0.5, 'y': z})

        # Points along the horizontal centre-line of the image
        for z in np.arange(0.15, 0.85, 0.1):
            portion_centres.append({'x': z, 'y': 0.5})

        # Fetch the pixel dimensions of the image we are working on
        d = image_dimensions("{}/{}_tmp.png".format(tmp0, img_name))

        @dask.delayed
        def analyse_image_portion(image_portion):

            # Make a temporary directory to store files in.
            # This is necessary as astrometry.net spams the cwd with lots of temporary junk
            tmp = "/tmp/dcf21_calibrateLens_{}_{}".format(
                item['repositoryFname'], image_portion['index'])
            # logging.info("Created temporary directory <{}>".format(tmp))
            os.system("mkdir {}".format(tmp))

            # Use ImageMagick to crop out each small piece of the image
            command = """
cd {6} ; \
rm -f {5}_tmp3.png ; \
convert {0}_tmp.png -colorspace sRGB -define png:format=png24 -crop {1:d}x{2:d}+{3:d}+{4:d} +repage {5}_tmp3.png
            """.format(os.path.join(tmp0, img_name), int(fraction_x * d[0]),
                       int(fraction_y * d[1]),
                       int((image_portion['x'] - fraction_x / 2) * d[0]),
                       int((image_portion['y'] - fraction_y / 2) * d[1]),
                       img_name, tmp)
            # logging.info(command)
            os.system(command)

            # Check that we've not run out of time
            if utc_must_stop and (time.time() > utc_must_stop):
                logging.info("We have run out of time! Aborting.")
                os.system("rm -Rf {}".format(tmp))
                return None

            # How long should we allow astrometry.net to run for?
            timeout = "40s"

            # Run astrometry.net. Insert --no-plots on the command line to speed things up.
            # logging.info("Running astrometry.net")
            estimated_width = 2 * math.atan(
                math.tan(estimated_image_scale / 2 * deg) * fraction_x) * rad
            astrometry_output = os.path.join(tmp, "txt")
            command = """
cd {5} ; \
timeout {0} solve-field --no-plots --crpix-center --scale-low {1:.1f} \
        --scale-high {2:.1f} --overwrite {3}_tmp3.png > {4} 2> /dev/null \
            """.format(timeout, estimated_width * 0.6, estimated_width * 1.2,
                       img_name, astrometry_output, tmp)
            # logging.info(command)
            os.system(command)

            # Parse the output from astrometry.net
            assert os.path.exists(
                astrometry_output), "Path <{}> doesn't exist".format(
                    astrometry_output)
            fit_text = open(astrometry_output).read()
            # logging.info(fit_text)

            # Clean up
            # logging.info("Removing temporary directory <{}>".format(tmp))
            os.system("rm -Rf {}".format(tmp))

            # Extract celestial coordinates of the centre of the frame from astrometry.net output
            test = re.search(
                r"\(RA H:M:S, Dec D:M:S\) = \(([\d-]*):(\d\d):([\d.]*), [+]?([\d-]*):(\d\d):([\d\.]*)\)",
                fit_text)
            if not test:
                logging.info("FAIL(POS): Point ({:.2f},{:.2f}).".format(
                    image_portion['x'], image_portion['y']))
                return None

            ra_sign = sgn(float(test.group(1)))
            ra = abs(float(test.group(1))) + float(test.group(2)) / 60 + float(
                test.group(3)) / 3600
            if ra_sign < 0:
                ra *= -1
            dec_sign = sgn(float(test.group(4)))
            dec = abs(float(test.group(4))) + float(
                test.group(5)) / 60 + float(test.group(6)) / 3600
            if dec_sign < 0:
                dec *= -1

            # If astrometry.net achieved a fit, then we report it to the user
            logging.info(
                "FIT: RA: {:7.2f}h. Dec {:7.2f} deg. Point ({:.2f},{:.2f}).".
                format(ra, dec, image_portion['x'], image_portion['y']))

            # Also, populate <fit_list> with a list of the central points of the image fragments, and their (RA, Dec)
            # coordinates.
            return {
                'ra': ra * pi / 12,
                'dec': dec * pi / 180,
                'x': image_portion['x'],
                'y': image_portion['y'],
                'radius': hypot(image_portion['x'] - 0.5,
                                image_portion['y'] - 0.5)
            }

        # Analyse each small portion of the image in turn
        dask_tasks = []
        for index, image_portion in enumerate(portion_centres):
            image_portion['index'] = index
            dask_tasks.append(
                analyse_image_portion(image_portion=image_portion))
        fit_list = dask.compute(*dask_tasks)

        # Remove fits which returned None
        fit_list = [i for i in fit_list if i is not None]

        # Clean up
        os.system("rm -Rf {}".format(tmp0))
        os.system("rm -Rf /tmp/tmp.*")

        # Make histogram of fits as a function of radius
        radius_histogram = [0] * 10
        for fit in fit_list:
            radius_histogram[int(fit['radius'] * 10)] += 1

        logging.info("Fit histogram vs radius: {}".format(radius_histogram))

        # Reject this image if there are insufficient fits from astrometry.net
        if min(radius_histogram[:5]) < 2:
            logging.info("Insufficient fits to continue")
            continue

        # Fit a gnomonic projection to the image, with barrel correction, to fit all the celestial positions of the
        # image fragments.

        # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/> for more information

        ra0 = fit_list[0]['ra']
        dec0 = fit_list[0]['dec']
        parameter_scales = [
            pi / 4, pi / 4, pi / 4, pi / 4, pi / 4, pi / 4, 5e-2, 5e-6
        ]
        parameters_default = [
            ra0, dec0, pi / 4, pi / 4, 0, lens_props.barrel_parameters[2], 0
        ]
        parameters_initial = [
            parameters_default[i] / parameter_scales[i]
            for i in range(len(parameters_default))
        ]
        fitting_result = scipy.optimize.minimize(mismatch,
                                                 parameters_initial,
                                                 method='nelder-mead',
                                                 options={
                                                     'xtol': 1e-8,
                                                     'disp': True,
                                                     'maxiter': 1e8,
                                                     'maxfev': 1e8
                                                 })
        parameters_optimal = fitting_result.x
        parameters_final = [
            parameters_optimal[i] * parameter_scales[i]
            for i in range(len(parameters_default))
        ]

        # Display best fit numbers
        headings = [["Central RA / hr", 12 / pi],
                    ["Central Decl / deg", 180 / pi],
                    ["Image width / deg", 180 / pi],
                    ["Image height / deg", 180 / pi],
                    ["Position angle / deg", 180 / pi], ["barrel_k1", 1],
                    ["barrel_k2", 1]]

        logging.info(
            "Fit achieved to {:d} points with offset of {:.5f}. Best fit parameters were:"
            .format(len(fit_list), fitting_result.fun))
        for i in range(len(parameters_default)):
            logging.info("{0:30s} : {1}".format(
                headings[i][0], parameters_final[i] * headings[i][1]))

        # Reject fit if objective function too large
        if fitting_result.fun > 1e-4:
            logging.info("Rejecting fit as chi-squared too large.")
            continue

        # Reject fit if k1/k2 values are too extreme
        if (abs(parameters_final[5]) > 0.3) or (abs(parameters_final[6]) >
                                                0.1):
            logging.info("Rejecting fit as parameters seem extreme.")
            continue

        # Update observation status
        successful_fits += 1
        user = settings['pigazingUser']
        timestamp = time.time()
        barrel_parameters = [
            parameters_final[2] * 180 / pi, parameters_final[3] * 180 / pi,
            parameters_final[5], parameters_final[6], 0
        ]
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="calibration:lens_barrel_parameters",
                         value=json.dumps(barrel_parameters)))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="calibration:chi_squared",
                                                 value=fitting_result.fun))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="calibration:point_count",
                                                 value=str(radius_histogram)))

    # Commit metadata changes
    db.commit()
    db0.commit()

    # Report how many fits we achieved
    logging.info(
        "Total of {:d} images successfully fitted.".format(successful_fits))

    if successful_fits > 0:
        # Now determine mean lens calibration each day
        logging.info("Averaging daily fits within period {} to {}".format(
            date_string(utc_min), date_string(utc_max)))
        block_size = 86400
        utc_min = (floor(utc_min / block_size + 0.5) -
                   0.5) * block_size  # Make sure that blocks start at noon
        time_blocks = list(
            np.arange(start=utc_min,
                      stop=utc_max + block_size,
                      step=block_size))

        # Start new block whenever we have a hardware refresh
        conn.execute(
            """
SELECT time FROM archive_metadata
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='refresh')
      AND time BETWEEN %s AND %s
""", (obstory_id, utc_min, utc_max))
        results = conn.fetchall()
        for item in results:
            time_blocks.append(item['time'])

        # Make sure that start points for time blocks are in order
        time_blocks.sort()

        for block_index, utc_block_min in enumerate(time_blocks[:-1]):
            utc_block_max = time_blocks[block_index + 1]

            # Select observations with calibration fits
            conn.execute(
                """
SELECT am1.stringValue AS barrel_parameters
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_block_min, utc_block_max))
            results = conn.fetchall()

            logging.info(
                "Averaging fits within period {} to {}: Found {} fits.".format(
                    date_string(utc_block_min), date_string(utc_block_max),
                    len(results)))

            # Average the fits we found
            if len(results) < 3:
                logging.info("Insufficient images to reliably average.")
                continue

            # Pick the median fit
            value_list = {
                'scale_x': [],
                'scale_y': [],
                'barrel_k1': [],
                'barrel_k2': [],
                'barrel_k3': []
            }
            for item in results:
                barrel_parameters = json.loads(item['barrel_parameters'])
                value_list['scale_x'].append(barrel_parameters[0])
                value_list['scale_y'].append(barrel_parameters[1])
                value_list['barrel_k1'].append(barrel_parameters[2])
                value_list['barrel_k2'].append(barrel_parameters[3])
                value_list['barrel_k3'].append(barrel_parameters[4])

            median_values = {}
            for key, values in value_list.items():
                values.sort()
                median_values[key] = values[len(values) // 2]

            # Print fit information
            logging.info("""\
CALIBRATION FIT from {:2d} images: %s. \
""".format(
                len(results), "; ".join([
                    "{}: {}".format(key, median)
                    for key, median in median_values.items()
                ])))

            # Flush any previous observation status
            flush_calibration(obstory_id=obstory_id,
                              utc_min=utc_block_min - 1,
                              utc_max=utc_block_min + 1)

            # Update observatory status
            user = settings['pigazingUser']
            timestamp = time.time()
            barrel_parameters = [
                median_values['scale_x'], median_values['scale_y'],
                median_values['barrel_k1'], median_values['barrel_k2'],
                median_values['barrel_k3']
            ]
            db.register_obstory_metadata(
                obstory_id=obstory_id,
                key="calibration:lens_barrel_parameters",
                value=json.dumps(barrel_parameters),
                time_created=timestamp,
                metadata_time=utc_block_min,
                user_created=user)
            db.commit()

    # Clean up and exit
    db.commit()
    db.close_db()
    db0.commit()
    conn.close()
    db0.close()
    return
Ejemplo n.º 19
0
# If we're called as a script, run the method constellation_data()
if __name__ == "__main__":
    logging.basicConfig(
        level=logging.INFO,
        stream=sys.stdout,
        format='[%(asctime)s] %(levelname)s:%(filename)s:%(message)s',
        datefmt='%d/%m/%Y %H:%M:%S')
    logger = logging.getLogger(__name__)
    logger.info(__doc__.strip())

    # Open a connection to the database
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Fetch database connection handle
    c = db.con

    c.execute("BEGIN;")

    # Add constellation descriptions, and central positions
    constellation_data(c_=c, logger=logger)

    # Commit changes
    c.execute("COMMIT;")
    db.commit()
Ejemplo n.º 20
0
    def execute_tasks(self):
        global observatories_seen

        # We should select the best image from every N seconds of observing
        period = 1800

        # This makes sure that we have a valid task list
        self.fetch_job_list()

        # Open connection to the database
        db = obsarchive_db.ObservationDatabase(
            file_store_path=settings['dbFilestore'],
            db_host=installation_info['mysqlHost'],
            db_user=installation_info['mysqlUser'],
            db_password=installation_info['mysqlPassword'],
            db_name=installation_info['mysqlDatabase'],
            obstory_id=installation_info['observatoryId'])

        # Select best images for each observatory in turn
        for obstory_id in observatories_seen:
            utc_start = floor(
                observatories_seen[obstory_id]['utc_min'] / period) * period
            utc_end = ceil(
                observatories_seen[obstory_id]['utc_max'] / period) * period

            # Remove featured flag from any time-lapse images that are already highlighted
            db.con.execute(
                """
UPDATE archive_observations SET featured=0
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:timelapse/')
      AND obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_start - 1, utc_end + 1))

            # Loop over each hour within the time period for which we have new observations
            for hour in numpy.arange(utc_start, utc_end - 1, period):

                # Select the time-lapse image with the best sky clarity within each hour
                db.con.execute(
                    """
SELECT o.uid
FROM archive_files f
INNER JOIN archive_observations o ON f.observationId = o.uid
INNER JOIN archive_metadata m ON f.uid = m.fileId AND
           m.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='pigazing:skyClarity')
WHERE o.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:timelapse/')
      AND obsTime BETWEEN %s AND %s
ORDER BY m.floatValue DESC LIMIT 1;
""", (obstory_id, hour, hour + period))

                # Feature that image
                for item in db.con.fetchall():
                    db.con.execute(
                        "UPDATE archive_observations SET featured=1 WHERE uid=%s;",
                        (item['uid'], ))

        # Close connection to the database
        db.commit()
        db.close_db()
        del db
Ejemplo n.º 21
0
    def fetch_job_list_by_time_stamp(self):
        """
        Fetch list of input files we need to operate on, and sort them by time stamp. Files with the same time stamp
        correspond to the same "observation" and so will be grouped together in the database.

        :return:
            None
        """

        global observatories_seen

        # Open connection to the database
        db = obsarchive_db.ObservationDatabase(
            file_store_path=settings['dbFilestore'],
            db_host=installation_info['mysqlHost'],
            db_user=installation_info['mysqlUser'],
            db_password=installation_info['mysqlPassword'],
            db_name=installation_info['mysqlDatabase'],
            obstory_id=installation_info['observatoryId'])

        # Path to data directory
        data_dir = os.path.join(settings['pythonPath'], '../datadir/')

        # Fetch list of input files, and sort them by time stamp
        jobs_by_time = {}
        for glob_pattern in self.glob_patterns():
            for input_file in sorted(
                    glob.glob(os.path.join(data_dir,
                                           glob_pattern['wildcard']))):

                # Collect metadata associated with this input file
                try:
                    input_metadata_file, obstory_info, input_metadata = metadata_file_to_dict(
                        db_handle=db,
                        product_filename=input_file,
                        required=True)
                except AssertionError:
                    logging.error(
                        "Invalid metadata for file <{}>".format(input_file))
                    continue

                # Properties that specify what command to run to complete this task, and what output it produces
                job_descriptor = {
                    'input_file':
                    input_file,
                    'input_file_without_extension':
                    os.path.splitext(os.path.split(input_file)[1])[0],
                    'input_metadata_filename':
                    input_metadata_file,
                    'input_metadata':
                    input_metadata,
                    'metadata_fields_to_propagate':
                    self.propagate_metadata_to_observation(
                        metadata=input_metadata),
                    'shell_command':
                    self.shell_command(),
                    'data_dir':
                    data_dir,
                    'h264_encoder':
                    'libav' if settings['i_am_a_rpi'] else 'libav',
                    'output_file_wildcards':
                    self.output_file_wildcards(input_file)
                }

                # Work out the time stamp of this job from the input file's filename
                obstory_id = input_metadata['obstoryId']
                utc = input_metadata['utc']
                time_stamp_string = "{:014.1f}_{}".format(
                    filename_to_utc(filename=input_file), obstory_id)

                # If we haven't had any jobs at the time stamp before, create an empty list for this time stamp
                if time_stamp_string not in jobs_by_time:
                    jobs_by_time[time_stamp_string] = {
                        'obs_id': obstory_id,
                        'utc': utc,
                        'obs_type': glob_pattern['obs_type'],
                        'user_id': obstory_info['userId'],
                        'must_quit_by': self.must_quit_by,
                        'job_list': []
                    }

                # Append this job to list of others with the same time stamp
                jobs_by_time[time_stamp_string]['job_list'].append(
                    job_descriptor)

                # Record that we've seen this observatory at this time
                if obstory_id not in observatories_seen:
                    observatories_seen[obstory_id] = {
                        'utc_min': utc,
                        'utc_max': utc
                    }

                if utc < observatories_seen[obstory_id]['utc_min']:
                    observatories_seen[obstory_id]['utc_min'] = utc
                if utc > observatories_seen[obstory_id]['utc_max']:
                    observatories_seen[obstory_id]['utc_max'] = utc

        # Close database connection
        db.commit()
        db.close_db()

        return jobs_by_time
Ejemplo n.º 22
0
def execute_shell_command(arguments):
    """
    Run a shell command to compete some task. Import the resulting file products into the database, and then delete
    them.

    :param arguments:
        Dictionary of the arguments associated with each command we are to run
    :return:
        None
    """

    # If we have run out of time, exit immediately
    if (arguments['must_quit_by']
            is not None) and (time.time() > arguments['must_quit_by']):
        return

    # Compile a list of all of the output files we have generated
    file_inputs = []
    file_products = []

    # Loop over all the input files associated with this time stamp
    for job in arguments['job_list']:

        # If this job requires a clipping mask, we create that now
        if 'mask_file' in job['shell_command']:
            # Open connection to the database
            db = obsarchive_db.ObservationDatabase(
                file_store_path=settings['dbFilestore'],
                db_host=installation_info['mysqlHost'],
                db_user=installation_info['mysqlUser'],
                db_password=installation_info['mysqlPassword'],
                db_name=installation_info['mysqlDatabase'],
                obstory_id=installation_info['observatoryId'])

            # Fetch observatory status
            obstory_status = db.get_obstory_status(
                obstory_id=arguments['obs_id'], time=arguments['utc'])

            # Close database connection
            db.close_db()
            del db

            # Export the clipping mask to a JSON file
            mask_file = "/tmp/mask_{}_{}.txt".format(os.getpid(),
                                                     str(uuid.uuid4()))
            with open(mask_file, "w") as f:
                f.write("\n\n".join([
                    "\n".join([("%d %d" % p) for p in pointList]) for pointList
                    in json.loads(obstory_status['clipping_region'])
                ]))
            job['mask_file'] = mask_file

        # Make settings available as string substitutions
        job['settings'] = settings

        # Make sure that output directories exist
        for output_file_wildcard in job['output_file_wildcards']:
            output_path = os.path.join(
                job['data_dir'],
                os.path.split(output_file_wildcard['wildcard'])[0])
            os.system("mkdir -p {}".format(output_path))

        # Run the shell command
        command = job['shell_command'].format(**job)
        print(command)
        result = subprocess.run(command, shell=True, stderr=subprocess.PIPE)
        errors = result.stderr.decode('utf-8').strip()

        # Check for errors
        if errors:
            logging.error("Error processing file <{}>: <{}>".format(
                job['input_file'], errors))

        # Compile list of all the input files we have processed
        for item in (job['input_file'], job['input_metadata_filename']):
            if item is not None:
                file_inputs.append(item)

        # Fetch list of output files we have created
        for output_file_wildcard in job['output_file_wildcards']:
            output_path = os.path.join(job['data_dir'],
                                       output_file_wildcard['wildcard'])
            for output_file in glob.glob(output_path):
                file_products.append({
                    'filename':
                    output_file,
                    'mime_type':
                    output_file_wildcard['mime_type'],
                    'input_file_metadata':
                    job['input_metadata'],
                    'propagate_metadata':
                    job['metadata_fields_to_propagate'],
                    'metadata_files': []
                })

    # Only add anything to the database if we created some output files
    if len(file_products) > 0:
        # Open connection to the database
        db = obsarchive_db.ObservationDatabase(
            file_store_path=settings['dbFilestore'],
            db_host=installation_info['mysqlHost'],
            db_user=installation_info['mysqlUser'],
            db_password=installation_info['mysqlPassword'],
            db_name=installation_info['mysqlDatabase'],
            obstory_id=installation_info['observatoryId'])

        # Collect metadata associated with this observation
        observation_metadata = {}
        for output_file in file_products:

            # Collect metadata associated with this output file
            try:
                product_metadata_file, obstory_info, product_metadata = metadata_file_to_dict(
                    db_handle=db,
                    product_filename=output_file['filename'],
                    input_metadata=output_file['input_file_metadata'],
                    required=False)
            except AssertionError:
                logging.error("Invalid metadata for file <{}>".format(
                    output_file['filename']))
                continue

            # Store metadata associated with this file
            metadata_objs = metadata_to_object_list(
                db_handle=db,
                obs_time=arguments['utc'],
                obs_id=arguments['obs_id'],
                user_id=arguments['user_id'],
                meta_dict=product_metadata)

            if product_metadata_file is not None:
                output_file['metadata_files'].append(product_metadata_file)

            output_file['product_metadata'] = product_metadata
            output_file['metadata_objs'] = metadata_objs
            output_file['obstory_info'] = obstory_info

            # Check which fields this file propagates to its parent observation
            for field_to_propagate in output_file['propagate_metadata']:
                if field_to_propagate in product_metadata:
                    observation_metadata[
                        field_to_propagate] = product_metadata[
                            field_to_propagate]

        # Turn metadata associated with this observation into database metadata objects
        metadata_objs = metadata_to_object_list(db_handle=db,
                                                obs_time=arguments['utc'],
                                                obs_id=arguments['obs_id'],
                                                user_id=arguments['user_id'],
                                                meta_dict=observation_metadata)

        # Import file products into the database
        obs_obj = db.register_observation(obstory_id=arguments['obs_id'],
                                          random_id=False,
                                          obs_time=arguments['utc'],
                                          creation_time=time.time(),
                                          obs_type=arguments['obs_type'],
                                          user_id=arguments['user_id'],
                                          obs_meta=metadata_objs,
                                          published=1,
                                          moderated=1,
                                          featured=0,
                                          ra=-999,
                                          dec=-999,
                                          field_width=None,
                                          field_height=None,
                                          position_angle=None,
                                          central_constellation=None,
                                          altitude=-999,
                                          azimuth=-999,
                                          alt_az_pa=None,
                                          astrometry_processed=None,
                                          astrometry_processing_time=None,
                                          astrometry_source=None)
        obs_id = obs_obj.id

        for output_file in file_products:
            # The semantic types which we should make the primary images of their parent observations
            primary_image_type_list = (
                'pigazing:movingObject/maximumBrightness',
                'pigazing:timelapse/backgroundSubtracted')

            db.register_file(
                file_path=output_file['filename'],
                user_id=output_file['obstory_info']['userId'],
                mime_type=output_file['mime_type'],
                semantic_type=output_file['product_metadata']['semanticType'],
                primary_image=output_file['product_metadata']['semanticType']
                in primary_image_type_list,
                file_time=arguments['utc'],
                file_meta=output_file['metadata_objs'],
                observation_id=obs_id,
                random_id=False)

        # Close connection to the database
        db.commit()
        db.close_db()
        del db

    # Delete input files
    for item in file_inputs:
        if os.path.exists(item):
            os.unlink(item)

    # Delete output files
    for item in file_products:
        if os.path.exists(item['filename']):
            os.unlink(item['filename'])
        for metadata_filename in item['metadata_files']:
            if os.path.exists(metadata_filename):
                os.unlink(metadata_filename)
Ejemplo n.º 23
0
def add_observatory_status(metadata):
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Make sure that observatory exists in known_observatories list
    assert metadata['obstory_id'] in known_observatories

    hw = hardware_properties.HardwareProps(
        path=os.path.join(settings['pythonPath'], "..", "configuration_global",
                          "camera_properties"))

    obstory_id_list = db.get_obstory_ids()

    metadata_existing = {}

    # If observatory already exists, get existing metadata fields
    if metadata['obstory_id'] in obstory_id_list:
        metadata_existing = db.get_obstory_status(
            obstory_id=metadata['obstory_id'], time=metadata['utc'])

    metadata = {**metadata_existing, **metadata}

    # If input parameters have not been supplied, read the defaults from configuration file
    if "latitude" not in metadata:
        metadata['latitude'] = known_observatories[
            metadata['obstory_id']]['latitude']
    if "longitude" not in metadata:
        metadata['longitude'] = known_observatories[
            metadata['obstory_id']]['longitude']
    if "obstory_name" not in metadata:
        metadata['obstory_name'] = known_observatories[
            metadata['obstory_id']]['observatoryName']
    if "camera" not in metadata:
        metadata['camera'] = known_observatories[
            metadata['obstory_id']]['defaultCamera']
    if "lens" not in metadata:
        metadata['lens'] = known_observatories[
            metadata['obstory_id']]['defaultLens']
    if "software_version" not in metadata:
        metadata['software_version'] = settings['softwareVersion']
    if ("username" not in metadata) or (metadata['username'] is None):
        metadata['username'] = known_observatories[
            metadata['obstory_id']]['owner']

    # If observatory doesn't exist yet, create a new observatory
    if metadata['obstory_id'] not in obstory_id_list:
        # Create new observatory
        db.register_obstory(obstory_id=metadata['obstory_id'],
                            obstory_name=metadata['obstory_name'],
                            latitude=metadata['latitude'],
                            longitude=metadata['longitude'],
                            owner=metadata['username'])

    for item, value in metadata.items():
        if item not in [
                "obstory_id", "username", "utc", "latitude", "longitude",
                "name"
        ]:

            # Offer user options to update metadata
            if item == "camera":
                hw.update_camera(db=db,
                                 obstory_id=metadata['obstory_id'],
                                 utc=metadata['utc'],
                                 name=value)

            elif item == "lens":
                hw.update_lens(db=db,
                               obstory_id=metadata['obstory_id'],
                               utc=metadata['utc'],
                               name=value)

            # Register arbitrary metadata
            else:
                db.register_obstory_metadata(obstory_id=metadata['obstory_id'],
                                             key=item,
                                             value=value,
                                             metadata_time=metadata['utc'],
                                             time_created=time.time(),
                                             user_created=metadata['username'])

    # Commit changes to database
    db.commit()
Ejemplo n.º 24
0
def new_image(image, username, observatory, title, semantic_type='Original', time_offset=0):
    """
    Insert an image file into the database.

    :param image:
        The filename of the image to be inserted
    :param username:
        The username of the user who is to own this image
    :param observatory:
        The observatory from which this observation was made
    :param title:
        The title of this image
    :param semantic_type:
        The semantic type of this image file, e.g. "Original"
    :param time_offset:
        Time offset to apply to image, seconds (positive means we move time forwards).
    :type time_offset:
        int
    :return:
        None
    """
    our_path = os_path.split(os_path.abspath(__file__))[0]

    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Fetch user ID
    conn.execute('SELECT userId FROM pigazing_users WHERE username=%s;', (username,))
    results = conn.fetchall()
    assert len(results) > 0, "No such user <{}>".format(username)

    # Fetch observatory ID
    conn.execute('SELECT uid FROM archive_observatories WHERE publicId=%s;', (observatory,))
    results = conn.fetchall()
    assert len(results) > 0, "No such observatory <{}>".format(observatory)

    # Look up image EXIF metadata
    metadata = fetch_exif_metadata(input_path=image, time_offset=time_offset)

    # Create observation object for this image
    utc = time.time()
    obs_obj = db.register_observation(obstory_id=observatory,
                                      random_id=True,
                                      obs_time=metadata['Time'],
                                      creation_time=utc,
                                      obs_type="image", user_id=username,
                                      obs_meta=[],
                                      published=1, moderated=1, featured=0,
                                      ra=-999, dec=-999,
                                      field_width=None, field_height=None,
                                      position_angle=None, central_constellation=None,
                                      altitude=-999, azimuth=-999, alt_az_pa=None,
                                      astrometry_processed=None, astrometry_processing_time=None,
                                      astrometry_source=None)

    # Create metadata about image
    obs_id = obs_obj.id
    db.set_observation_metadata(username, obs_id, obsarchive_model.Meta("Observer", username))
    db.set_observation_metadata(username, obs_id, obsarchive_model.Meta("Caption", title))

    for key, value in metadata.items():
        db.set_observation_metadata(user_id=username,
                                    observation_id=obs_id,
                                    meta=obsarchive_model.Meta(key, value))

    db.commit()

    # Make copy of file
    tmp_file_path = os_path.join(our_path, "../auto/tmp/dss_images")
    os.system("mkdir -p {}".format(tmp_file_path))
    img_name = os_path.split(image)[1]
    tmp_filename = os_path.join(tmp_file_path, img_name)
    os.system("cp '{}' '{}'".format(image, tmp_filename))
    os.system("chmod 644 '{}'".format(tmp_filename))

    # Create file object for this image
    file_obj = db.register_file(file_path=tmp_filename, user_id=username, mime_type="image/png",
                                semantic_type=semantic_type, primary_image=True,
                                file_time=metadata['Time'], file_meta=[],
                                observation_id=obs_id,
                                random_id=True)
    db.commit()
Ejemplo n.º 25
0
def frame_drop_detection(utc_min, utc_max):
    """
    Detect video frame drop events between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should search for video frame drop (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should search for video frame drop (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Starting video frame drop detection.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'frame_drop_events': 0,
        'non_frame_drop_events': 0,
        'error_records': 0,
        'rescued_records': 0
    }

    # Status update
    logging.info("Searching for frame drops within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for meteors within this time period
    conn.execute(
        """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory, am6.stringValue AS type
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
LEFT OUTER JOIN archive_metadata am6 ON ao.uid = am6.observationId AND
    am6.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s
ORDER BY ao.obsTime
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the videos we are going to work on
    logging.info("Searching for dropped frames within {:d} videos.".format(
        len(results)))

    # Analyse each video in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if ('pigazing:path' not in all_metadata) or ('pigazing:videoStart'
                                                     not in all_metadata):
            logging.info(
                "Cannot process <{}> due to inadequate metadata.".format(
                    item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}/{type:16s}]".format(
            date=date_string(utc=item['obsTime']),
            obs=item['observationId'],
            type=item['type'] if item['type'] is not None else '')

        # Read path of the moving object in pixel coordinates
        path_json = all_metadata['pigazing:path']
        try:
            path_x_y = json.loads(path_json)
        except json.decoder.JSONDecodeError:
            # Attempt JSON repair; sometimes JSON content gets truncated
            original_json = path_json
            fixed_json = "],[".join(original_json.split("],[")[:-1]) + "]]"
            try:
                path_x_y = json.loads(fixed_json)

                # logging.info("{prefix} -- RESCUE: In: {detections:.0f} / {duration:.1f} sec; "
                #              "Rescued: {count:d} / {json_span:.1f} sec".format(
                #     prefix=logging_prefix,
                #     detections=all_metadata['pigazing:detections'],
                #     duration=all_metadata['pigazing:duration'],
                #     count=len(path_x_y),
                #     json_span=path_x_y[-1][3] - path_x_y[0][3]
                # ))
                outcomes['rescued_records'] += 1
            except json.decoder.JSONDecodeError:
                logging.info(
                    "{prefix} -- !!! JSON error".format(prefix=logging_prefix))
            outcomes['error_records'] += 1
            continue

        # Check number of points in path
        path_len = len(path_x_y)

        # Make list of object speed at each point
        path_speed = []  # pixels/sec
        path_distance = []
        for i in range(path_len - 1):
            pixel_distance = hypot(path_x_y[i + 1][0] - path_x_y[i][0],
                                   path_x_y[i + 1][1] - path_x_y[i][1])
            time_interval = (path_x_y[i + 1][3] - path_x_y[i][3]) + 1e-8
            speed = pixel_distance / time_interval
            path_speed.append(speed)
            path_distance.append(pixel_distance)

        # Start making a list of frame-drop events
        frame_drop_points = []

        # Scan through for points with anomalously high speed
        scan_half_window = 4
        for i in range(len(path_speed)):
            scan_min = max(0, i - scan_half_window)
            scan_max = min(scan_min + 2 * scan_half_window,
                           len(path_speed) - 1)
            median_speed = max(np.median(path_speed[scan_min:scan_max]), 1)
            if (path_distance[i] > 16) and (path_speed[i] > 4 * median_speed):
                break_time = np.mean([path_x_y[i + 1][3], path_x_y[i][3]])
                video_time = break_time - all_metadata['pigazing:videoStart']
                break_distance = path_distance[i]
                # significance = path_speed[i]/median_speed
                frame_drop_points.append([
                    i + 1,
                    float("%.4f" % break_time),
                    float("%.1f" % video_time),
                    round(break_distance)
                ])

        # Report result
        if len(frame_drop_points) > 0:
            logging.info("{prefix} -- {x}".format(prefix=logging_prefix,
                                                  x=frame_drop_points))

        # Store frame-drop list
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="frame_drop:list",
                                        value=json.dumps(frame_drop_points)))

        # Video successfully analysed
        if len(frame_drop_points) == 0:
            outcomes['non_frame_drop_events'] += 1
        else:
            outcomes['frame_drop_events'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} videos with frame-drop.".format(
        outcomes['frame_drop_events']))
    logging.info("{:d} videos without frame-drop.".format(
        outcomes['non_frame_drop_events']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
Ejemplo n.º 26
0
def satellite_determination(utc_min, utc_max):
    """
    Estimate the identity of spacecraft observed between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should determine the identity of spacecraft (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the identity of spacecraft (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Starting satellite identification.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'unsuccessful_fits': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Status update
    logging.info("Searching for satellites within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for satellites within this time period
    conn.execute(
        """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
INNER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s AND
      (am2.stringValue='Plane' OR am2.stringValue='Satellite' OR am2.stringValue='Junk')
ORDER BY ao.obsTime
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the identity of {:d} spacecraft.".format(
        len(results)))

    # Analyse each spacecraft in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if 'pigazing:path' not in all_metadata:
            logging.info(
                "Cannot process <{}> due to inadequate metadata.".format(
                    item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}]".format(
            date=date_string(utc=item['obsTime']), obs=item['observationId'])

        # Project path from (x,y) coordinates into (RA, Dec)
        projector = PathProjection(db=db,
                                   obstory_id=item['observatory'],
                                   time=item['obsTime'],
                                   logging_prefix=logging_prefix)

        path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list_this = projector.ra_dec_from_x_y(
            path_json=all_metadata['pigazing:path'],
            path_bezier_json=all_metadata['pigazing:pathBezier'],
            detections=all_metadata['pigazing:detectionCount'],
            duration=all_metadata['pigazing:duration'])

        # Check for error
        if projector.error is not None:
            if projector.error in outcomes:
                outcomes[projector.error] += 1
            continue

        # Check for notifications
        for notification in projector.notifications:
            if notification in outcomes:
                outcomes[notification] += 1

        # Check number of points in path
        path_len = len(path_x_y)

        # Look up list of satellite orbital elements at the time of this sighting
        spacecraft_list = fetch_satellites(utc=item['obsTime'])

        # List of candidate satellites this object might be
        candidate_satellites = []

        # Check that we found a list of spacecraft
        if spacecraft_list is None:
            logging.info(
                "{date} [{obs}] -- No spacecraft records found.".format(
                    date=date_string(utc=item['obsTime']),
                    obs=item['observationId']))
            outcomes['insufficient_information'] += 1
            continue

        # Logging message about how many spacecraft we're testing
        # logging.info("{date} [{obs}] -- Matching against {count:7d} spacecraft.".format(
        #     date=date_string(utc=item['obsTime']),
        #     obs=item['observationId'],
        #     count=len(spacecraft_list)
        # ))

        # Test for each candidate satellite in turn
        for spacecraft in spacecraft_list:
            # Unit scaling
            deg2rad = pi / 180.0  # 0.0174532925199433
            xpdotp = 1440.0 / (2.0 * pi)  # 229.1831180523293

            # Model the path of this spacecraft
            model = Satrec()
            model.sgp4init(
                # whichconst: gravity model
                WGS72,

                # opsmode: 'a' = old AFSPC mode, 'i' = improved mode
                'i',

                # satnum: Satellite number
                spacecraft['noradId'],

                # epoch: days since 1949 December 31 00:00 UT
                jd_from_unix(spacecraft['epoch']) - 2433281.5,

                # bstar: drag coefficient (/earth radii)
                spacecraft['bStar'],

                # ndot (NOT USED): ballistic coefficient (revs/day)
                spacecraft['meanMotionDot'] / (xpdotp * 1440.0),

                # nddot (NOT USED): mean motion 2nd derivative (revs/day^3)
                spacecraft['meanMotionDotDot'] / (xpdotp * 1440.0 * 1440),

                # ecco: eccentricity
                spacecraft['ecc'],

                # argpo: argument of perigee (radians)
                spacecraft['argPeri'] * deg2rad,

                # inclo: inclination (radians)
                spacecraft['incl'] * deg2rad,

                # mo: mean anomaly (radians)
                spacecraft['meanAnom'] * deg2rad,

                # no_kozai: mean motion (radians/minute)
                spacecraft['meanMotion'] / xpdotp,

                # nodeo: right ascension of ascending node (radians)
                spacecraft['RAasc'] * deg2rad)

            # Wrap within skyfield to convert to topocentric coordinates
            ts = load.timescale()
            sat = EarthSatellite.from_satrec(model, ts)

            # Fetch spacecraft position at each time point along trajectory
            ang_mismatch_list = []
            distance_list = []

            # e, r, v = model.sgp4(jd_from_unix(utc=item['obsTime']), 0)
            # logging.info("{} {} {}".format(str(e), str(r), str(v)))
            tai_utc_offset = 39  # seconds

            def satellite_angular_offset(index, clock_offset):
                # Fetch observed position of object at this time point
                pt_utc = path_x_y[index][3]
                pt_alt = path_alt_az[index][0]
                pt_az = path_alt_az[index][1]

                # Project position of this satellite in space at this time point
                t = ts.tai_jd(jd=jd_from_unix(utc=pt_utc + tai_utc_offset +
                                              clock_offset))

                # Project position of this satellite in the observer's sky
                sight_line = sat - observer
                topocentric = sight_line.at(t)
                sat_alt, sat_az, sat_distance = topocentric.altaz()

                # Work out offset of satellite's position from observed moving object
                ang_mismatch = ang_dist(ra0=pt_az * pi / 180,
                                        dec0=pt_alt * pi / 180,
                                        ra1=sat_az.radians,
                                        dec1=sat_alt.radians) * 180 / pi

                return ang_mismatch, sat_distance

            def time_offset_objective(p):
                """
                Objective function that we minimise in order to find the best fit clock offset between the observed
                and model paths.

                :param p:
                    Vector with a single component: the clock offset
                :return:
                    Metric to minimise
                """

                # Turn input parameters into a time offset
                clock_offset = p[0]

                # Look up angular offset
                ang_mismatch, sat_distance = satellite_angular_offset(
                    index=0, clock_offset=clock_offset)

                # Return metric to minimise
                return ang_mismatch * exp(clock_offset / 8)

            # First, chuck out satellites with large angular offsets
            observer = wgs84.latlon(
                latitude_degrees=projector.obstory_info['latitude'],
                longitude_degrees=projector.obstory_info['longitude'],
                elevation_m=0)

            ang_mismatch, sat_distance = satellite_angular_offset(
                index=0, clock_offset=0)

            # Check angular offset is reasonable
            if ang_mismatch > global_settings['max_angular_mismatch']:
                continue

            # Work out the optimum time offset between the satellite's path and the observed path
            # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/>
            # for more information about how this works
            parameters_initial = [0]
            parameters_optimised = scipy.optimize.minimize(
                time_offset_objective,
                np.asarray(parameters_initial),
                options={
                    'disp': False,
                    'maxiter': 100
                }).x

            # Construct best-fit linear trajectory for best-fitting parameters
            clock_offset = float(parameters_optimised[0])

            # Check clock offset is reasonable
            if abs(clock_offset) > global_settings['max_clock_offset']:
                continue

            # Measure the offset between the satellite's position and the observed position at each time point
            for index in range(path_len):
                # Look up angular mismatch at this time point
                ang_mismatch, sat_distance = satellite_angular_offset(
                    index=index, clock_offset=clock_offset)

                # Keep list of the offsets at each recorded time point along the trajectory
                ang_mismatch_list.append(ang_mismatch)
                distance_list.append(sat_distance.km)

            # Consider adding this satellite to list of candidates
            mean_ang_mismatch = np.mean(np.asarray(ang_mismatch_list))
            distance_mean = np.mean(np.asarray(distance_list))

            if mean_ang_mismatch < global_settings['max_mean_angular_mismatch']:
                candidate_satellites.append({
                    'name':
                    spacecraft['name'],  # string
                    'noradId':
                    spacecraft['noradId'],  # int
                    'distance':
                    distance_mean,  # km
                    'clock_offset':
                    clock_offset,  # seconds
                    'offset':
                    mean_ang_mismatch,  # degrees
                    'absolute_magnitude':
                    spacecraft['mag']
                })

        # Add model possibility for null satellite
        candidate_satellites.append({
            'name': "Unidentified",
            'noradId': 0,
            'distance': 35.7e3 *
            0.25,  # Nothing is visible beyond 25% of geostationary orbit distance
            'clock_offset': 0,
            'offset': 0,
            'absolute_magnitude': None
        })

        # Sort candidates by score - use absolute mag = 20 for satellites with no mag
        for candidate in candidate_satellites:
            candidate['score'] = hypot(
                candidate['distance'] / 1e3,
                candidate['clock_offset'],
                (20 if candidate['absolute_magnitude'] is None else
                 candidate['absolute_magnitude']),
            )
        candidate_satellites.sort(key=itemgetter('score'))

        # Report possible satellite identifications
        logging.info("{prefix} -- {satellites}".format(
            prefix=logging_prefix,
            satellites=", ".join([
                "{} ({:.1f} deg offset; clock offset {:.1f} sec)".format(
                    satellite['name'], satellite['offset'],
                    satellite['clock_offset'])
                for satellite in candidate_satellites
            ])))

        # Identify most likely satellite
        most_likely_satellite = candidate_satellites[0]

        # Store satellite identification
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="satellite:name",
                                        value=most_likely_satellite['name']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="satellite:norad_id",
                         value=most_likely_satellite['noradId']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="satellite:clock_offset",
                         value=most_likely_satellite['clock_offset']))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="satellite:angular_offset",
                                        value=most_likely_satellite['offset']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="satellite:path_length",
                         value=ang_dist(ra0=path_ra_dec_at_epoch[0][0],
                                        dec0=path_ra_dec_at_epoch[0][1],
                                        ra1=path_ra_dec_at_epoch[-1][0],
                                        dec1=path_ra_dec_at_epoch[-1][1]) *
                         180 / pi))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(
                key="satellite:path_ra_dec",
                value="[[{:.3f},{:.3f}],[{:.3f},{:.3f}],[{:.3f},{:.3f}]]".
                format(
                    path_ra_dec_at_epoch[0][0] * 12 / pi,
                    path_ra_dec_at_epoch[0][1] * 180 / pi,
                    path_ra_dec_at_epoch[int(path_len / 2)][0] * 12 / pi,
                    path_ra_dec_at_epoch[int(path_len / 2)][1] * 180 / pi,
                    path_ra_dec_at_epoch[-1][0] * 12 / pi,
                    path_ra_dec_at_epoch[-1][1] * 180 / pi,
                )))

        # Satellite successfully identified
        if most_likely_satellite['name'] == "Unidentified":
            outcomes['unsuccessful_fits'] += 1
        else:
            outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} satellites successfully identified.".format(
        outcomes['successful_fits']))
    logging.info("{:d} satellites not identified.".format(
        outcomes['unsuccessful_fits']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))
    logging.info("{:d} satellites with incomplete data.".format(
        outcomes['insufficient_information']))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
Ejemplo n.º 27
0
def list_trigger_rate(utc_min, utc_max, obstory):
    """
    Compile a histogram of the rate of camera triggers, and the rate of time lapse images, over time.

    :param utc_min:
        The unix time at which to start searching
    :param utc_max:
        The unix time at which to end searching
    :param obstory:
        The publicId of the observatory we are to make the histogram for
    :return:
    """

    # Create a Pi Gazing database handle
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    # Check that requested observatory exists
    try:
        obstory_info = db.get_obstory_from_id(obstory_id=obstory)
    except ValueError:
        print("Unknown observatory <{}>. Run ./listObservatories.py to see a list of available observatories.".
              format(obstory))
        sys.exit(0)

    # Search for time-lapse images from this observatory
    search = obsarchive_model.FileRecordSearch(obstory_ids=[obstory],
                                               semantic_type="pigazing:timelapse/backgroundSubtracted",
                                               time_min=utc_min, time_max=utc_max,
                                               limit=1000000)
    files = db.search_files(search)
    files = files['files']
    files.sort(key=lambda x: x.file_time)

    # Search for moving objects seen by this observatory
    search = obsarchive_model.ObservationSearch(obstory_ids=[obstory],
                                                observation_type="pigazing:movingObject/",
                                                time_min=utc_min, time_max=utc_max,
                                                limit=1000000)
    events = db.search_observations(search)
    events = events['obs']

    # Convert list of events and images into a histogram
    histogram = {}

    # Loop over time-lapse images populating histogram
    for f in files:
        utc = f.file_time
        hour_start = math.floor(utc / 3600) * 3600
        if hour_start not in histogram:
            histogram[hour_start] = {'events': [], 'images': []}
        histogram[hour_start]['images'].append(f)

    # Loop over moving objects populating histogram
    for e in events:
        utc = e.obs_time
        hour_start = math.floor(utc / 3600) * 3600
        if hour_start not in histogram:
            histogram[hour_start] = {'events': [], 'images': []}
        histogram[hour_start]['events'].append(e)

    # Find time bounds of data
    keys = list(histogram.keys())
    keys.sort()
    if len(keys) == 0:
        print("No results found for observatory <{}>".format(obstory))
        sys.exit(0)
    utc_min = keys[0]
    utc_max = keys[-1]

    # Render quick and dirty table
    out = sys.stdout
    hour_start = utc_min
    printed_blank_line = True
    out.write("# {:12s} {:4s} {:2s} {:2s} {:2s} {:12s} {:12s} {:12s} {:12s}\n".format("UTC", "Year", "M", "D", "hr",
                                                                                      "N_images", "N_events",
                                                                                      "SkyClarity", "SunAltitude"))

    # Loop over histogram, hour by hour
    while hour_start <= utc_max:
        # If we have data in this hour, print a column in the table
        if hour_start in histogram:
            # Write date and time at start of line
            [year, month, day, h, m, s] = dcf_ast.inv_julian_day(dcf_ast.jd_from_unix(hour_start + 1))
            out.write("  {:12d} {:04d} {:02d} {:02d} {:02d} ".format(hour_start, year, month, day, h))

            d = histogram[hour_start]
            sun_alt = "---"
            sky_clarity = "---"

            # If we have any images, then use them to calculate mean Sun altitude and sky clairity
            if d['images']:
                # Calculate the mean altitude of the Sun within this time interval
                sun_alt = "{:.1f}".format(sum(get_file_metadata(db, i.id, 'pigazing:sunAlt') for i in d['images']) /
                                          len(d['images']))
                # Calculate the mean sky clarity measurement within this time interval
                sky_clarity = "{:.1f}".format(
                    sum(get_file_metadata(db, i.id, 'pigazing:skyClarity') for i in d['images']) /
                    len(d['images']))

            # Write output line
            if d['images'] or d['events']:
                out.write(
                    "{:12d} {:12d} {:12s} {:12s}\n".format(len(d['images']), len(d['events']), sky_clarity, sun_alt))
                printed_blank_line = False

        # If there is no data in this hour, separate it from previous line with a blank line
        else:
            if not printed_blank_line:
                out.write("\n")
            printed_blank_line = True

        # Move onto the next hour
        hour_start += 3600
Ejemplo n.º 28
0
def search_simultaneous_detections(utc_min, utc_max, utc_must_stop):
    # Count how many simultaneous detections we discover
    simultaneous_detections_by_type = {}

    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    # Search for moving objects within time span
    search = mp.ObservationSearch(observation_type="pigazing:movingObject/",
                                  time_min=utc_min,
                                  time_max=utc_max,
                                  limit=1000000)
    events_raw = db.search_observations(search)

    # Use only event descriptors, not other returned fields
    events = events_raw['obs']

    # Make a list of which events are already members of groups
    events_used = [False] * len(events)

    # Look up the categorisation of each event
    for event in events:
        event.category = db.get_observation_metadata(event.id, "web:category")

    # Throw out junk events and unclassified events
    events = [x for x in events if x.category is not None and x.category not in ('Junk', 'Bin')]

    # Look up which pre-existing observation groups each event is in
    for index, event in enumerate(events):
        db.con.execute("""
SELECT COUNT(*)
FROM archive_obs_groups grp
WHERE grp.semanticType = (SELECT y.uid FROM archive_semanticTypes y WHERE y.name=%s) AND
      EXISTS (SELECT 1 FROM archive_obs_group_members x
              WHERE x.groupId=grp.uid AND
                    x.childObservation=(SELECT z.uid FROM archive_observations z WHERE z.publicId=%s));
""", (simultaneous_event_type, event.id))

        if db.con.fetchone()['COUNT(*)'] > 0:
            events_used[index] = True

    # Sort event descriptors into chronological order
    events.sort(key=lambda x: x.obs_time)

    # Look up the duration of each event, and calculate its end time
    for event in events:
        duration = 0
        for meta in event.meta:
            if meta.key == "pigazing:duration":
                duration = meta.value
        event.duration = duration
        event.obs_time_end = event.obs_time + duration

    # Compile list of simultaneous object detections
    groups = []

    # Search for simultaneous object detections
    for index in range(len(events)):
        # If we have already put this event in another simultaneous detection, don't add it to others
        if events_used[index]:
            continue

        # Look up time span of event
        event = events[index]
        obstory_id_list = [event.obstory_id]  # List of all observatories which saw this event
        utc_min = event.obs_time  # Earliest start time of any of the events in this group
        utc_max = event.obs_time_end  # Latest end time of any of the events in this group
        events_used[index] = True
        prev_group_size = -1
        group_members = [index]

        # Most events must be seen within a maximum offset of 1 second at different stations.
        # Planes are allowed an offset of up to 30 seconds due to their large parallax
        search_margin = 60
        match_margin = 30 if event.category == "Plane" else 1

        # Search for other events which fall within the same time span
        # Do this iteratively, as a preceding event can expand the end time of the group, and vice versa
        while len(group_members) > prev_group_size:
            prev_group_size = len(group_members)
            # Search for events at earlier times, and then at later times
            for search_direction in (-1, 1):
                # Start from the reference event
                candidate_index = index

                # Step through other events, providing they're within range
                while ((candidate_index >= 0) and
                       (candidate_index < len(events))):
                    # Fetch event record
                    candidate = events[candidate_index]

                    # Stop search if we've gone out of time range
                    if ((candidate.obs_time_end < utc_min - search_margin) or
                            (candidate.obs_time > utc_max + search_margin)):
                        break

                    # Check whether this is a simultaneous detection, with same categorisation
                    if ((not events_used[candidate_index]) and
                            (candidate.category == event.category) and
                            (candidate.obs_time < utc_max + match_margin) and
                            (candidate.obs_time_end > utc_min - match_margin)):
                        # Add this event to the group, and update time span of event
                        group_members.append(candidate_index)
                        utc_min = min(utc_min, candidate.obs_time)
                        utc_max = max(utc_max, candidate.obs_time_end)

                        # Compile a list of all the observatories which saw this event
                        if candidate.obstory_id not in obstory_id_list:
                            obstory_id_list.append(candidate.obstory_id)

                        # Record that we have added this event to a group
                        events_used[candidate_index] = True

                    # Step on to the next candidate event to add into group
                    candidate_index += search_direction

        # We have found a coincident detection only if multiple observatories saw an event at the same time
        if len(obstory_id_list) < 2:
            continue

        # Update tally of events by type
        if event.category not in simultaneous_detections_by_type:
            simultaneous_detections_by_type[event.category] = 0
        simultaneous_detections_by_type[event.category] += 1

        # Initialise maximum baseline between the stations which saw this objects
        maximum_obstory_spacing = 0

        # Work out locations of all observatories which saw this event
        obstory_locs = []
        for obstory_id in obstory_id_list:
            obstory_info = db.get_obstory_from_id(obstory_id)
            obstory_loc = Point.from_lat_lng(lat=obstory_info['latitude'],
                                             lng=obstory_info['longitude'],
                                             alt=0,
                                             utc=(utc_min + utc_max) / 2
                                             )
            obstory_locs.append(obstory_loc)

        # Check the distances between all pairs of observatories
        pairs = [[obstory_locs[i], obstory_locs[j]]
                 for i in range(len(obstory_id_list))
                 for j in range(i + 1, len(obstory_id_list))
                 ]

        # Work out maximum baseline between the stations which saw this objects
        for pair in pairs:
            maximum_obstory_spacing = max(maximum_obstory_spacing,
                                          abs(pair[0].displacement_vector_from(pair[1])))

        # Create information about this simultaneous detection
        groups.append({'time': (utc_min + utc_max) / 2,
                       'obstory_list': obstory_id_list,
                       'time_spread': utc_max - utc_min,
                       'geographic_spacing': maximum_obstory_spacing,
                       'category': event.category,
                       'observations': [{'obs': events[x]} for x in group_members],
                       'ids': [events[x].id for x in group_members]})

    # Report individual events we found
    for item in groups:
        logging.info("""
{time} -- {count:3d} stations; max baseline {baseline:5.0f} m; time spread {spread:4.1f} sec; type <{category}>
""".format(time=dcf_ast.date_string(item['time']),
           count=len(item['obstory_list']),
           baseline=item['geographic_spacing'],
           spread=item['time_spread'],
           category=item['category']).strip())

    # Report statistics on events we found
    logging.info("{:6d} moving objects seen within this time period".
                 format(len(events_raw['obs'])))
    logging.info("{:6d} moving objects rejected because they were unclassified".
                 format(len(events_raw['obs']) - len(events)))
    logging.info("{:6d} simultaneous detections found.".
                 format(len(groups)))

    # Report statistics by event type
    logging.info("Tally of simultaneous detections by type:")
    for event_type in sorted(simultaneous_detections_by_type.keys()):
        logging.info("    * {:32s}: {:6d}".format(event_type, simultaneous_detections_by_type[event_type]))

    # Record simultaneous event detections into the database
    for item in groups:
        # Create new observation group
        group = db.register_obsgroup(title="Multi-station detection", user_id="system",
                                     semantic_type=simultaneous_event_type,
                                     obs_time=item['time'], set_time=time.time(),
                                     obs=item['ids'])

        # logging.info("Simultaneous detection at {time} by {count:3d} stations (time spread {spread:.1f} sec)".
        #              format(time=dcf_ast.date_string(item['time']),
        #                     count=len(item['obstory_list']),
        #                     spread=item['time_spread']))
        # logging.info("Observation IDs: %s" % item['ids'])

        # Register group metadata
        timestamp = time.time()
        db.set_obsgroup_metadata(user_id="system", group_id=group.id, utc=timestamp,
                                 meta=mp.Meta(key="web:category", value=item['category']))
        db.set_obsgroup_metadata(user_id="system", group_id=group.id, utc=timestamp,
                                 meta=mp.Meta(key="simultaneous:time_spread", value=item['time_spread']))
        db.set_obsgroup_metadata(user_id="system", group_id=group.id, utc=timestamp,
                                 meta=mp.Meta(key="simulataneous:geographic_spread", value=item['geographic_spacing']))

    # Commit changes
    db.commit()
Ejemplo n.º 29
0
def shower_determination(utc_min, utc_max):
    """
    Estimate the parent showers of all meteors observed between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should determine the parent showers of meteors (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the parent showers of meteors (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Load list of meteor showers
    shower_list = read_shower_list()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    logging.info("Starting meteor shower identification.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Status update
    logging.info("Searching for meteors within period {} to {}".format(date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for meteors within this time period
    conn.execute("""
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
INNER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s AND
      am2.stringValue = "Meteor"
ORDER BY ao.obsTime;
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the parent showers of {:d} meteors.".format(len(results)))

    # Count how many meteors we find in each shower
    meteor_count_by_shower = {}

    # Analyse each meteor in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if 'pigazing:path' not in all_metadata:
            logging.info("Cannot process <{}> due to inadequate metadata.".format(item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}]".format(
            date=date_string(utc=item['obsTime']),
            obs=item['observationId']
        )

        # Project path from (x,y) coordinates into (RA, Dec)
        projector = PathProjection(
            db=db,
            obstory_id=item['observatory'],
            time=item['obsTime'],
            logging_prefix=logging_prefix
        )

        path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list_this = projector.ra_dec_from_x_y(
            path_json=all_metadata['pigazing:path'],
            path_bezier_json=all_metadata['pigazing:pathBezier'],
            detections=all_metadata['pigazing:detectionCount'],
            duration=all_metadata['pigazing:duration']
        )

        # Check for error
        if projector.error is not None:
            if projector.error in outcomes:
                outcomes[projector.error] += 1
            continue

        # Check for notifications
        for notification in projector.notifications:
            if notification in outcomes:
                outcomes[notification] += 1

        # Check number of points in path
        path_len = len(path_x_y)

        # List of candidate showers this meteor might belong to
        candidate_showers = []

        # Test for each candidate meteor shower in turn
        for shower in shower_list:
            # Work out celestial coordinates of shower radiant in RA/Dec in hours/degs of epoch
            radiant_ra_at_epoch, radiant_dec_at_epoch = ra_dec_from_j2000(ra0=shower['RA'],
                                                                          dec0=shower['Decl'],
                                                                          utc_new=item['obsTime'])

            # Work out alt-az of the shower's radiant using known location of camera. Fits returned in degrees.
            alt_az_pos = alt_az(ra=radiant_ra_at_epoch, dec=radiant_dec_at_epoch,
                                utc=item['obsTime'],
                                latitude=projector.obstory_info['latitude'],
                                longitude=projector.obstory_info['longitude'])

            # Work out position of the Sun (J2000)
            sun_ra_j2000, sun_dec_j2000 = sun_pos(utc=item['obsTime'])

            # Work out position of the Sun (RA, Dec of epoch)
            sun_ra_at_epoch, sun_dec_at_epoch = ra_dec_from_j2000(ra0=sun_ra_j2000, dec0=sun_dec_j2000,
                                                                  utc_new=item['obsTime'])

            # Offset from peak of shower
            year = 365.2524
            peak_offset = (sun_ra_at_epoch * 180 / 12. - shower['peak']) * year / 360  # days
            while peak_offset < -year / 2:
                peak_offset += year
            while peak_offset > year / 2:
                peak_offset -= year

            start_offset = peak_offset + shower['start'] - 4
            end_offset = peak_offset + shower['end'] + 4

            # Estimate ZHR of shower at the time the meteor was observed
            zhr = 0
            if abs(peak_offset) < 2:
                zhr = shower['zhr']  # Shower is within 2 days of maximum; use quoted peak ZHR value
            if start_offset < 0 < end_offset:
                zhr = max(zhr, 5)  # Shower is not at peak, but is active; assume ZHR=5

            # Correct hourly rate for the altitude of the shower radiant
            hourly_rate = zhr * sin(alt_az_pos[0] * pi / 180)

            # If hourly rate is zero, this shower is not active
            if hourly_rate <= 0:
                # logging.info("Meteor shower <{}> has zero rate".format(shower['name']))
                continue

            # Work out angular distance of meteor from radiant (radians)
            path_radiant_sep = [ang_dist(ra0=pt[0], dec0=pt[1],
                                         ra1=radiant_ra_at_epoch * pi / 12, dec1=radiant_dec_at_epoch * pi / 180)
                                for pt in path_ra_dec_at_epoch]
            change_in_radiant_dist = path_radiant_sep[-1] - path_radiant_sep[0]  # radians

            # Reject meteors that travel *towards* the radiant
            if change_in_radiant_dist < 0:
                continue

            # Convert path to Cartesian coordinates on a unit sphere
            path_cartesian = [Vector.from_ra_dec(ra=ra * 12 / pi, dec=dec * 180 / pi)
                              for ra, dec in path_ra_dec_at_epoch]

            # Work out cross product of first and last point, which is normal to path of meteors
            first = path_cartesian[0]
            last = path_cartesian[-1]
            path_normal = first.cross_product(last)

            # Work out angle of path normal to meteor shower radiant
            radiant_cartesian = Vector.from_ra_dec(ra=radiant_ra_at_epoch, dec=radiant_dec_at_epoch)
            theta = path_normal.angle_with(radiant_cartesian)  # degrees

            if theta > 90:
                theta = 180 - theta

            # What is the angular separation of the meteor's path's closest approach to the shower radiant?
            radiant_angle = 90 - theta

            # Work out likelihood metric that this meteor belongs to this shower
            radiant_angle_std_dev = 2  # Allow 2 degree mismatch in radiant pos
            likelihood = hourly_rate * scipy.stats.norm(loc=0, scale=radiant_angle_std_dev).pdf(radiant_angle)

            # Store information about the likelihood this meteor belongs to this shower
            candidate_showers.append({
                'name': shower['name'],
                'likelihood': likelihood,
                'offset': radiant_angle,
                'change_radiant_dist': change_in_radiant_dist,
                'shower_rate': hourly_rate
            })

        # Add model possibility for sporadic meteor
        hourly_rate = 5
        likelihood = hourly_rate * (1. / 90.)  # Mean value of Gaussian in range 0-90 degs
        candidate_showers.append({
            'name': "Sporadic",
            'likelihood': likelihood,
            'offset': 0,
            'shower_rate': hourly_rate
        })

        # Renormalise likelihoods to sum to unity
        sum_likelihood = sum(shower['likelihood'] for shower in candidate_showers)
        for shower in candidate_showers:
            shower['likelihood'] *= 100 / sum_likelihood

        # Sort candidates by likelihood
        candidate_showers.sort(key=itemgetter('likelihood'), reverse=True)

        # Report possible meteor shower identifications
        logging.info("{date} [{obs}] -- {showers}".format(
            date=date_string(utc=item['obsTime']),
            obs=item['observationId'],
            showers=", ".join([
                "{} {:.1f}% ({:.1f} deg offset)".format(shower['name'], shower['likelihood'], shower['offset'])
                for shower in candidate_showers
            ])
        ))

        # Identify most likely shower
        most_likely_shower = candidate_showers[0]['name']

        # Update tally of meteors
        if most_likely_shower not in meteor_count_by_shower:
            meteor_count_by_shower[most_likely_shower] = 0
        meteor_count_by_shower[most_likely_shower] += 1

        # Store meteor identification
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:name", value=most_likely_shower))
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:radiant_offset", value=candidate_showers[0]['offset']))
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:path_length",
                                                 value=ang_dist(ra0=path_ra_dec_at_epoch[0][0],
                                                                dec0=path_ra_dec_at_epoch[0][1],
                                                                ra1=path_ra_dec_at_epoch[-1][0],
                                                                dec1=path_ra_dec_at_epoch[-1][1]
                                                                ) * 180 / pi
                                                 ))
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:path_ra_dec",
                                                 value="[[{:.3f},{:.3f}],[{:.3f},{:.3f}],[{:.3f},{:.3f}]]".format(
                                                     path_ra_dec_at_epoch[0][0] * 12 / pi,
                                                     path_ra_dec_at_epoch[0][1] * 180 / pi,
                                                     path_ra_dec_at_epoch[int(path_len / 2)][0] * 12 / pi,
                                                     path_ra_dec_at_epoch[int(path_len / 2)][1] * 180 / pi,
                                                     path_ra_dec_at_epoch[-1][0] * 12 / pi,
                                                     path_ra_dec_at_epoch[-1][1] * 180 / pi,
                                                 )
                                                 ))

        # Meteor successfully identified
        outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} meteors successfully identified.".format(outcomes['successful_fits']))
    logging.info("{:d} malformed database records.".format(outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(outcomes['rescued_records']))
    logging.info("{:d} meteors with incomplete data.".format(outcomes['insufficient_information']))

    # Report tally of meteors
    logging.info("Tally of meteors by shower:")
    for shower in sorted(meteor_count_by_shower.keys()):
        logging.info("    * {:32s}: {:6d}".format(shower, meteor_count_by_shower[shower]))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
Ejemplo n.º 30
0
def list_observatory_status(utc_min, utc_max, obstory):
    """
    List all the metadata updates posted by a particular observatory between two given unix times.

    :param utc_min:
        Only list metadata updates after the specified unix time
    :param utc_max:
        Only list metadata updates before the specified unix time
    :param obstory:
        ID of the observatory we are to list events from
    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    try:
        obstory_info = db.get_obstory_from_id(obstory_id=obstory)
    except ValueError:
        print("Unknown observatory <{}>. Run ./listObservatories.py to see a list of available observatories.".
              format(obstory))
        sys.exit(0)

    title = "Observatory <{}>".format(obstory)
    print("\n\n{}\n{}".format(title, "-" * len(title)))

    search = mp.ObservatoryMetadataSearch(obstory_ids=[obstory], time_min=utc_min, time_max=utc_max)
    data = db.search_obstory_metadata(search)
    data = data['items']
    data.sort(key=lambda x: x.time)
    print("  * {:d} matching metadata items in time range {} --> {}".format(len(data),
                                                                            dcf_ast.date_string(utc_min),
                                                                            dcf_ast.date_string(utc_max)))

    # Check which items remain current
    refreshed = False
    data.reverse()
    keys_seen = []
    for item in data:
        # The magic metadata keyword "refresh" causes all older metadata to be superseded
        if item.key == "refresh" and not refreshed:
            item.still_current = True
            refreshed = True
        # If we don't have a later metadata update for the same keyword, then this metadata remains current
        elif item.key not in keys_seen and not refreshed:
            item.still_current = True
            keys_seen.append(item.key)
        # This metadata item has been superseded
        else:
            item.still_current = False
    data.reverse()

    # Display list of items
    for item in data:
        if item.still_current:
            current_flag = "+"
        else:
            current_flag = " "
        print("  * {} [ID {}] {} -- {:16s} = {}".format(current_flag, item.id, dcf_ast.date_string(item.time),
                                                        item.key, item.value))