示例#1
0
def list_triangulations(utc_min=None, utc_max=None):
    """
    Display a list of all the trajectories of moving objects registered in the database.

    :param utc_min:
        Only show observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only show observations made before the specified time stamp.

    :type utc_max:
        float

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Compile search criteria for observation groups
    where = ["g.semanticType = (SELECT uid FROM archive_semanticTypes WHERE name=\"{}\")".
                 format(simultaneous_event_type)
             ]
    args = []

    if utc_min is not None:
        where.append("g.time>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("g.time<=%s")
        args.append(utc_max)

    # Search for observation groups containing groups of simultaneous detections
    conn.execute("""
SELECT g.publicId AS groupId, g.time AS time, am.stringValue AS objectType,
       am2.floatValue AS speed, am3.floatValue AS mean_altitude, am4.floatValue AS max_angular_offset,
       am5.floatValue AS max_baseline, am6.stringValue AS radiant_direction, am7.floatValue AS sight_line_count,
       am8.stringValue AS path
FROM archive_obs_groups g
INNER JOIN archive_metadata am ON g.uid = am.groupId AND
    am.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
INNER JOIN archive_metadata am2 ON g.uid = am2.groupId AND
    am2.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:speed")
INNER JOIN archive_metadata am3 ON g.uid = am3.groupId AND
    am3.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:mean_altitude")
INNER JOIN archive_metadata am4 ON g.uid = am4.groupId AND
    am4.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:max_angular_offset")
INNER JOIN archive_metadata am5 ON g.uid = am5.groupId AND
    am5.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:max_baseline")
INNER JOIN archive_metadata am6 ON g.uid = am6.groupId AND
    am6.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:radiant_direction")
INNER JOIN archive_metadata am7 ON g.uid = am7.groupId AND
    am7.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:sight_line_count")
INNER JOIN archive_metadata am8 ON g.uid = am8.groupId AND
    am8.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:path")
WHERE """ + " AND ".join(where) + """
ORDER BY g.time;
""", args)
    results = conn.fetchall()

    # Count how many simultaneous detections we find by type
    detections_by_type = {}

    # Compile tally by type
    for item in results:
        # Add this triangulation to tally
        if item['objectType'] not in detections_by_type:
            detections_by_type[item['objectType']] = 0
        detections_by_type[item['objectType']] += 1

    # List information about each observation in turn
    print("{:16s} {:20s} {:20s} {:8s} {:10s}".format("GroupId", "Time", "Object type", "Speed", "Altitude"))
    for item in results:
        # Print triangulation information
        print("{:16s} {:20s} {:20s} {:8.0f} {:10.0f}".format(item['groupId'],
                                                             date_string(item['time']),
                                                             item['objectType'],
                                                             item['speed'],
                                                             item['mean_altitude']
                                                             ))

    # Report tally of events
    print("\nTally of events by type:")
    for event_type in sorted(detections_by_type.keys()):
        print("    * {:26s}: {:6d}".format(event_type, detections_by_type[event_type]))
示例#2
0
def do_triangulation(utc_min, utc_max, utc_must_stop):
    # We need to share the list of sight lines to each moving object with the objective function that we minimise
    global sight_line_list, time_span, seed_position

    # Start triangulation process
    logging.info(
        "Triangulating simultaneous object detections between <{}> and <{}>.".
        format(date_string(utc_min), date_string(utc_max)))

    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Count how many objects we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'failed_fits': 0,
        'inadequate_baseline': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Compile search criteria for observation groups
    where = [
        "g.semanticType = (SELECT uid FROM archive_semanticTypes WHERE name=\"{}\")"
        .format(simultaneous_event_type)
    ]
    args = []

    if utc_min is not None:
        where.append("o.obsTime>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("o.obsTime<=%s")
        args.append(utc_max)

    # Open direct connection to database
    conn = db.con

    # Search for observation groups containing groups of simultaneous detections
    conn.execute(
        """
SELECT g.publicId AS groupId, o.publicId AS observationId, o.obsTime, f.repositoryFname,
       am.stringValue AS objectType, l.publicId AS observatory
FROM archive_obs_groups g
INNER JOIN archive_obs_group_members m on g.uid = m.groupId
INNER JOIN archive_observations o ON m.childObservation = o.uid
INNER JOIN archive_observatories l ON o.observatory = l.uid
LEFT OUTER JOIN archive_files f on o.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video")
INNER JOIN archive_metadata am ON g.uid = am.groupId AND
    am.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE """ + " AND ".join(where) + """
ORDER BY o.obsTime;
""", args)
    results = conn.fetchall()

    # Compile list of events into list of groups
    obs_groups = {}
    obs_group_ids = []
    for item in results:
        key = item['groupId']
        if key not in obs_groups:
            obs_groups[key] = []
            obs_group_ids.append({
                'groupId': key,
                'time': item['obsTime'],
                'type': item['objectType']
            })
        obs_groups[key].append(item)

    # Loop over list of simultaneous event detections
    for group_info in obs_group_ids:
        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}/{type:16s}]".format(
            date=date_string(utc=group_info['time']),
            obs=group_info['groupId'],
            type=group_info['type'])

        # If we've run out of time, stop now
        time_now = time.time()
        if utc_must_stop is not None and time_now > utc_must_stop:
            break

        # Make a list of all our sight-lines to this object, from all observatories
        sight_line_list = []
        observatory_list = {}

        # Fetch information about each observation in turn
        for item in obs_groups[group_info['groupId']]:
            # Fetch metadata about this object, some of which might be on the file, and some on the observation
            obs_obj = db.get_observation(observation_id=item['observationId'])
            obs_metadata = {item.key: item.value for item in obs_obj.meta}
            if item['repositoryFname']:
                file_obj = db.get_file(
                    repository_fname=item['repositoryFname'])
                file_metadata = {
                    item.key: item.value
                    for item in file_obj.meta
                }
            else:
                file_metadata = {}
            all_metadata = {**obs_metadata, **file_metadata}

            # Project path from (x,y) coordinates into (RA, Dec)
            projector = PathProjection(db=db,
                                       obstory_id=item['observatory'],
                                       time=item['obsTime'],
                                       logging_prefix=logging_prefix)

            path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list_this = projector.ra_dec_from_x_y(
                path_json=all_metadata['pigazing:path'],
                path_bezier_json=all_metadata['pigazing:pathBezier'],
                detections=all_metadata['pigazing:detectionCount'],
                duration=all_metadata['pigazing:duration'])

            # Check for error
            if projector.error is not None:
                if projector.error in outcomes:
                    outcomes[projector.error] += 1
                continue

            # Check for notifications
            for notification in projector.notifications:
                if notification in outcomes:
                    outcomes[notification] += 1

            # Add to observatory_list, now that we've checked this observatory has all necessary information
            if item['observatory'] not in observatory_list:
                observatory_list[item['observatory']] = projector.obstory_info

            # Add sight lines from this observatory to list which combines all observatories
            sight_line_list.extend(sight_line_list_this)

        # If we have fewer than four sight lines, don't bother trying to triangulate
        if len(sight_line_list) < 4:
            logging.info(
                "{prefix} -- Giving up triangulation as we only have {x:d} sight lines to object."
                .format(prefix=logging_prefix, x=len(sight_line_list)))
            continue

        # Initialise maximum baseline between the stations which saw this objects
        maximum_baseline = 0

        # Check the distances between all pairs of observatories
        obstory_info_list = [
            Point.from_lat_lng(lat=obstory['latitude'],
                               lng=obstory['longitude'],
                               alt=0,
                               utc=None)
            for obstory in observatory_list.values()
        ]

        pairs = [[obstory_info_list[i], obstory_info_list[j]]
                 for i in range(len(obstory_info_list))
                 for j in range(i + 1, len(obstory_info_list))]

        # Work out maximum baseline between the stations which saw this objects
        for pair in pairs:
            maximum_baseline = max(
                maximum_baseline,
                abs(pair[0].displacement_vector_from(pair[1])))

        # If we have no baselines of over 1 km, don't bother trying to triangulate
        if maximum_baseline < 1000:
            logging.info(
                "{prefix} -- Giving up triangulation as longest baseline is only {x:.0f} m."
                .format(prefix=logging_prefix, x=maximum_baseline))
            outcomes['inadequate_baseline'] += 1
            continue

        # Set time range of sight lines
        time_span = [
            min(item['utc'] for item in sight_line_list),
            max(item['utc'] for item in sight_line_list)
        ]

        # Create a seed point to start search for object path. We pick a point above the centroid of the observatories
        # that saw the object
        centroid_v = sum(item['obs_position'].to_vector()
                         for item in sight_line_list) / len(sight_line_list)
        centroid_p = Point(x=centroid_v.x, y=centroid_v.y, z=centroid_v.z)
        centroid_lat_lng = centroid_p.to_lat_lng(utc=None)
        seed_position = Point.from_lat_lng(lat=centroid_lat_lng['lat'],
                                           lng=centroid_lat_lng['lng'],
                                           alt=centroid_lat_lng['alt'] * 2e4,
                                           utc=None)

        # Attempt to fit a linear trajectory through all of the sight lines that we have collected
        parameters_initial = [0, 0, 0, 0, 0, 0]

        # Solve the system of equations
        # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/>
        # for more information about how this works
        parameters_optimised = scipy.optimize.minimize(
            angular_mismatch_objective,
            numpy.asarray(parameters_initial),
            options={
                'disp': False,
                'maxiter': 1e8
            }).x

        # Construct best-fit linear trajectory for best-fitting parameters
        best_triangulation = line_from_parameters(parameters_optimised)
        # logging.info("Best fit path of object is <{}>.".format(best_triangulation))

        # logging.info("Mismatch of observed sight lines from trajectory are {} deg.".format(
        #     ["{:.1f}".format(best_triangulation.find_closest_approach(s['line'])['angular_distance'])
        #      for s in sight_line_list]
        # ))

        # Find sight line with the worst match
        mismatch_list = sight_line_mismatch_list(trajectory=best_triangulation)
        maximum_mismatch = max(mismatch_list)

        # Reject trajectory if it deviates by more than 8 degrees from any observation
        if maximum_mismatch > 8:
            logging.info(
                "{prefix} -- Trajectory mismatch is too great ({x:.1f} deg).".
                format(prefix=logging_prefix, x=maximum_mismatch))
            outcomes['failed_fits'] += 1
            continue

        # Convert start and end points of path into (lat, lng, alt)
        start_point = best_triangulation.point(0).to_lat_lng(utc=None)
        start_point['utc'] = time_span[0]
        end_point = best_triangulation.point(1).to_lat_lng(utc=None)
        end_point['utc'] = time_span[1]

        # Calculate linear speed of object
        speed = abs(best_triangulation.direction) / (
            time_span[1] - time_span[0])  # m/s

        # Calculate radiant direction for this object
        radiant_direction_vector = best_triangulation.direction * -1
        radiant_direction_coordinates = radiant_direction_vector.to_ra_dec(
        )  # hours, degrees
        radiant_greenwich_hour_angle = radiant_direction_coordinates['ra']
        radiant_dec = radiant_direction_coordinates['dec']
        instantaneous_sidereal_time = sidereal_time(utc=(utc_min + utc_max) /
                                                    2)  # hours
        radiant_ra = radiant_greenwich_hour_angle + instantaneous_sidereal_time  # hours
        radiant_direction = [radiant_ra, radiant_dec]

        # Store triangulated information in database
        user = settings['pigazingUser']
        timestamp = time.time()
        triangulation_metadata = {
            "triangulation:speed":
            speed,
            "triangulation:mean_altitude":
            (start_point['alt'] + end_point['alt']) / 2 / 1e3,  # km
            "triangulation:max_angular_offset":
            maximum_mismatch,
            "triangulation:max_baseline":
            maximum_baseline,
            "triangulation:radiant_direction":
            json.dumps(radiant_direction),
            "triangulation:sight_line_count":
            len(sight_line_list),
            "triangulation:path":
            json.dumps([start_point, end_point])
        }

        # Set metadata on the observation group
        for metadata_key, metadata_value in triangulation_metadata.items():
            db.set_obsgroup_metadata(user_id=user,
                                     group_id=group_info['groupId'],
                                     utc=timestamp,
                                     meta=mp.Meta(key=metadata_key,
                                                  value=metadata_value))

        # Set metadata on each observation individually
        for item in obs_groups[group_info['groupId']]:
            for metadata_key, metadata_value in triangulation_metadata.items():
                db.set_observation_metadata(
                    user_id=user,
                    observation_id=item['observationId'],
                    utc=timestamp,
                    meta=mp.Meta(key=metadata_key, value=metadata_value))

        # Commit metadata to database
        db.commit()

        # Report outcome
        logging.info(
            "{prefix} -- Success -- {path}; speed {mph:11.1f} mph; {sight_lines:6d} detections."
            .format(
                prefix=logging_prefix,
                path="{:5.1f} {:5.1f} {:10.1f} -> {:5.1f} {:5.1f} {:10.1f}".
                format(
                    start_point['lat'],
                    start_point['lng'],
                    start_point['alt'] / 1e3,  # deg deg km
                    end_point['lat'],
                    end_point['lng'],
                    end_point['alt'] / 1e3),
                mph=speed / 0.44704,  # mph
                sight_lines=len(sight_line_list)))

        # Triangulation successful
        outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} objects successfully triangulated.".format(
        outcomes['successful_fits']))
    logging.info("{:d} objects could not be triangulated.".format(
        outcomes['failed_fits']))
    logging.info("{:d} objects had an inadequate baseline.".format(
        outcomes['inadequate_baseline']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))
    logging.info("{:d} objects with incomplete data.".format(
        outcomes['insufficient_information']))

    # Commit changes
    db.commit()
    db.close_db()
示例#3
0
def list_images(utc_min=None,
                utc_max=None,
                username=None,
                obstory=None,
                img_type=None,
                obs_type=None,
                stride=1):
    """
    Display a list of all the images registered in the database.

    :param utc_min:
        Only show observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only show observations made before the specified time stamp.

    :type utc_max:
        float

    :param username:
        Optionally specify a username, to filter only images by a particular user

    :type username:
        str

    :param obstory:
        The public id of the observatory we are to show observations from

    :type obstory:
        str

    :param img_type:
        Only show images with this semantic type

    :type img_type:
        str

    :param obs_type:
        Only show observations with this semantic type

    :type obs_type:
        str

    :param stride:
        Only show every nth observation matching the search criteria

    :type stride:
        int

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    where = ["1"]
    args = []

    if utc_min is not None:
        where.append("o.obsTime>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("o.obsTime<=%s")
        args.append(utc_max)
    if username is not None:
        where.append("o.userId=%s")
        args.append(username)
    if obstory is not None:
        where.append("l.publicId=%s")
        args.append(obstory)
    if obs_type is not None:
        where.append("ast.name=%s")
        args.append(obs_type)

    conn.execute(
        """
SELECT o.uid, o.userId, l.name AS place, o.obsTime
FROM archive_observations o
INNER JOIN archive_observatories l ON o.observatory = l.uid
INNER JOIN archive_semanticTypes ast ON o.obsType = ast.uid
WHERE """ + " AND ".join(where) + """
ORDER BY obsTime DESC LIMIT 200;
""", args)
    results = conn.fetchall()

    # List information about each observation in turn
    sys.stdout.write("{:6s} {:10s} {:32s} {:17s} {:20s}\n".format(
        "obsId", "Username", "Observatory", "Time", "Images"))
    for counter, obs in enumerate(results):
        # Only show every nth hit
        if counter % stride != 0:
            continue

        # Print observation information
        sys.stdout.write("{:6d} {:10s} {:32s} {:17s} ".format(
            obs['uid'], obs['userId'], obs['place'],
            date_string(obs['obsTime'])))

        where = ["f.observationId=%s"]
        args = [obs['uid']]

        if img_type is not None:
            where.append("ast.name=%s")
            args.append(img_type)

        # Fetch list of files in this observation
        conn.execute(
            """
SELECT ast.name AS semanticType, repositoryFname, am.floatValue AS skyClarity
FROM archive_files f
INNER JOIN archive_semanticTypes ast ON f.semanticType = ast.uid
LEFT OUTER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
WHERE """ + " AND ".join(where) + """;
""", args)

        files = conn.fetchall()

        for count, item in enumerate(files):
            if count > 0:
                sys.stdout.write("\n{:69s}".format(""))
            if item['skyClarity'] is None:
                item['skyClarity'] = 0
            sys.stdout.write("{:40s} {:32s} {:10.1f}".format(
                item['semanticType'], item['repositoryFname'],
                item['skyClarity']))
        sys.stdout.write("\n")
示例#4
0
def list_orientation_fixes(obstory_id, utc_min, utc_max):
    """
    List all the orientation fixes for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list orientation fixes for.
    :param utc_min:
        The start of the time period in which we should list orientation fixes (unix time).
    :param utc_max:
        The end of the time period in which we should list orientation fixes (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of orientation fixes
    orientation_fixes = []

    # Select observations with orientation fits
    conn.execute(
        """
SELECT am1.floatValue AS altitude, am2.floatValue AS azimuth, am3.floatValue AS tilt,
       am4.floatValue AS width_x_field, am5.floatValue AS width_y_field,
       o.obsTime AS time
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:altitude")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:azimuth")
INNER JOIN archive_metadata am3 ON o.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:tilt")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_x_field")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_y_field")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        orientation_fixes.append({
            'time': item['time'],
            'average': False,
            'fit': item
        })

    # Select observatory orientation fits
    conn.execute(
        """
SELECT am1.floatValue AS altitude, am2.floatValue AS azimuth, am3.floatValue AS tilt,
       am4.floatValue AS width_x_field, am5.floatValue AS width_y_field,
       am1.time AS time
FROM archive_observatories o
INNER JOIN archive_metadata am1 ON o.uid = am1.observatory AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:altitude")
INNER JOIN archive_metadata am2 ON o.uid = am2.observatory AND am2.time=am1.time AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:azimuth")
INNER JOIN archive_metadata am3 ON o.uid = am3.observatory AND am3.time=am1.time AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:tilt")
INNER JOIN archive_metadata am4 ON o.uid = am4.observatory AND am4.time=am1.time AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_x_field")
INNER JOIN archive_metadata am5 ON o.uid = am5.observatory AND am5.time=am1.time AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_y_field")
WHERE
    o.publicId=%s AND
    am1.time BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        orientation_fixes.append({
            'time': item['time'],
            'average': True,
            'fit': item
        })

    # Sort fixes by time
    orientation_fixes.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:1s} {:16s} {:9s} {:9s} {:9s} {:8s} {:8s}\
""".format("", "Time", "Alt", "Az", "Tilt", "FoV X", "FoV Y"))

    # Display fixes
    for item in orientation_fixes:
        print("""\
{:s} {:16s} {:9.4f} {:9.4f} {:9.4f} {:8.3f} {:8.3f} {:s}\
""".format("\n>" if item['average'] else " ", date_string(item['time']),
           item['fit']['altitude'], item['fit']['azimuth'],
           item['fit']['tilt'], item['fit']['width_x_field'],
           item['fit']['width_y_field'], "\n" if item['average'] else ""))

    # Clean up and exit
    return
示例#5
0
def frame_drop_detection(utc_min, utc_max):
    """
    Detect video frame drop events between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should search for video frame drop (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should search for video frame drop (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Starting video frame drop detection.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'frame_drop_events': 0,
        'non_frame_drop_events': 0,
        'error_records': 0,
        'rescued_records': 0
    }

    # Status update
    logging.info("Searching for frame drops within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for meteors within this time period
    conn.execute(
        """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory, am6.stringValue AS type
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
LEFT OUTER JOIN archive_metadata am6 ON ao.uid = am6.observationId AND
    am6.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s
ORDER BY ao.obsTime
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the videos we are going to work on
    logging.info("Searching for dropped frames within {:d} videos.".format(
        len(results)))

    # Analyse each video in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if ('pigazing:path' not in all_metadata) or ('pigazing:videoStart'
                                                     not in all_metadata):
            logging.info(
                "Cannot process <{}> due to inadequate metadata.".format(
                    item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}/{type:16s}]".format(
            date=date_string(utc=item['obsTime']),
            obs=item['observationId'],
            type=item['type'] if item['type'] is not None else '')

        # Read path of the moving object in pixel coordinates
        path_json = all_metadata['pigazing:path']
        try:
            path_x_y = json.loads(path_json)
        except json.decoder.JSONDecodeError:
            # Attempt JSON repair; sometimes JSON content gets truncated
            original_json = path_json
            fixed_json = "],[".join(original_json.split("],[")[:-1]) + "]]"
            try:
                path_x_y = json.loads(fixed_json)

                # logging.info("{prefix} -- RESCUE: In: {detections:.0f} / {duration:.1f} sec; "
                #              "Rescued: {count:d} / {json_span:.1f} sec".format(
                #     prefix=logging_prefix,
                #     detections=all_metadata['pigazing:detections'],
                #     duration=all_metadata['pigazing:duration'],
                #     count=len(path_x_y),
                #     json_span=path_x_y[-1][3] - path_x_y[0][3]
                # ))
                outcomes['rescued_records'] += 1
            except json.decoder.JSONDecodeError:
                logging.info(
                    "{prefix} -- !!! JSON error".format(prefix=logging_prefix))
            outcomes['error_records'] += 1
            continue

        # Check number of points in path
        path_len = len(path_x_y)

        # Make list of object speed at each point
        path_speed = []  # pixels/sec
        path_distance = []
        for i in range(path_len - 1):
            pixel_distance = hypot(path_x_y[i + 1][0] - path_x_y[i][0],
                                   path_x_y[i + 1][1] - path_x_y[i][1])
            time_interval = (path_x_y[i + 1][3] - path_x_y[i][3]) + 1e-8
            speed = pixel_distance / time_interval
            path_speed.append(speed)
            path_distance.append(pixel_distance)

        # Start making a list of frame-drop events
        frame_drop_points = []

        # Scan through for points with anomalously high speed
        scan_half_window = 4
        for i in range(len(path_speed)):
            scan_min = max(0, i - scan_half_window)
            scan_max = min(scan_min + 2 * scan_half_window,
                           len(path_speed) - 1)
            median_speed = max(np.median(path_speed[scan_min:scan_max]), 1)
            if (path_distance[i] > 16) and (path_speed[i] > 4 * median_speed):
                break_time = np.mean([path_x_y[i + 1][3], path_x_y[i][3]])
                video_time = break_time - all_metadata['pigazing:videoStart']
                break_distance = path_distance[i]
                # significance = path_speed[i]/median_speed
                frame_drop_points.append([
                    i + 1,
                    float("%.4f" % break_time),
                    float("%.1f" % video_time),
                    round(break_distance)
                ])

        # Report result
        if len(frame_drop_points) > 0:
            logging.info("{prefix} -- {x}".format(prefix=logging_prefix,
                                                  x=frame_drop_points))

        # Store frame-drop list
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="frame_drop:list",
                                        value=json.dumps(frame_drop_points)))

        # Video successfully analysed
        if len(frame_drop_points) == 0:
            outcomes['non_frame_drop_events'] += 1
        else:
            outcomes['frame_drop_events'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} videos with frame-drop.".format(
        outcomes['frame_drop_events']))
    logging.info("{:d} videos without frame-drop.".format(
        outcomes['non_frame_drop_events']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
示例#6
0
def satellite_determination(utc_min, utc_max):
    """
    Estimate the identity of spacecraft observed between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should determine the identity of spacecraft (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the identity of spacecraft (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Starting satellite identification.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'unsuccessful_fits': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Status update
    logging.info("Searching for satellites within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for satellites within this time period
    conn.execute(
        """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
INNER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s AND
      (am2.stringValue='Plane' OR am2.stringValue='Satellite' OR am2.stringValue='Junk')
ORDER BY ao.obsTime
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the identity of {:d} spacecraft.".format(
        len(results)))

    # Analyse each spacecraft in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if 'pigazing:path' not in all_metadata:
            logging.info(
                "Cannot process <{}> due to inadequate metadata.".format(
                    item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}]".format(
            date=date_string(utc=item['obsTime']), obs=item['observationId'])

        # Project path from (x,y) coordinates into (RA, Dec)
        projector = PathProjection(db=db,
                                   obstory_id=item['observatory'],
                                   time=item['obsTime'],
                                   logging_prefix=logging_prefix)

        path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list_this = projector.ra_dec_from_x_y(
            path_json=all_metadata['pigazing:path'],
            path_bezier_json=all_metadata['pigazing:pathBezier'],
            detections=all_metadata['pigazing:detectionCount'],
            duration=all_metadata['pigazing:duration'])

        # Check for error
        if projector.error is not None:
            if projector.error in outcomes:
                outcomes[projector.error] += 1
            continue

        # Check for notifications
        for notification in projector.notifications:
            if notification in outcomes:
                outcomes[notification] += 1

        # Check number of points in path
        path_len = len(path_x_y)

        # Look up list of satellite orbital elements at the time of this sighting
        spacecraft_list = fetch_satellites(utc=item['obsTime'])

        # List of candidate satellites this object might be
        candidate_satellites = []

        # Check that we found a list of spacecraft
        if spacecraft_list is None:
            logging.info(
                "{date} [{obs}] -- No spacecraft records found.".format(
                    date=date_string(utc=item['obsTime']),
                    obs=item['observationId']))
            outcomes['insufficient_information'] += 1
            continue

        # Logging message about how many spacecraft we're testing
        # logging.info("{date} [{obs}] -- Matching against {count:7d} spacecraft.".format(
        #     date=date_string(utc=item['obsTime']),
        #     obs=item['observationId'],
        #     count=len(spacecraft_list)
        # ))

        # Test for each candidate satellite in turn
        for spacecraft in spacecraft_list:
            # Unit scaling
            deg2rad = pi / 180.0  # 0.0174532925199433
            xpdotp = 1440.0 / (2.0 * pi)  # 229.1831180523293

            # Model the path of this spacecraft
            model = Satrec()
            model.sgp4init(
                # whichconst: gravity model
                WGS72,

                # opsmode: 'a' = old AFSPC mode, 'i' = improved mode
                'i',

                # satnum: Satellite number
                spacecraft['noradId'],

                # epoch: days since 1949 December 31 00:00 UT
                jd_from_unix(spacecraft['epoch']) - 2433281.5,

                # bstar: drag coefficient (/earth radii)
                spacecraft['bStar'],

                # ndot (NOT USED): ballistic coefficient (revs/day)
                spacecraft['meanMotionDot'] / (xpdotp * 1440.0),

                # nddot (NOT USED): mean motion 2nd derivative (revs/day^3)
                spacecraft['meanMotionDotDot'] / (xpdotp * 1440.0 * 1440),

                # ecco: eccentricity
                spacecraft['ecc'],

                # argpo: argument of perigee (radians)
                spacecraft['argPeri'] * deg2rad,

                # inclo: inclination (radians)
                spacecraft['incl'] * deg2rad,

                # mo: mean anomaly (radians)
                spacecraft['meanAnom'] * deg2rad,

                # no_kozai: mean motion (radians/minute)
                spacecraft['meanMotion'] / xpdotp,

                # nodeo: right ascension of ascending node (radians)
                spacecraft['RAasc'] * deg2rad)

            # Wrap within skyfield to convert to topocentric coordinates
            ts = load.timescale()
            sat = EarthSatellite.from_satrec(model, ts)

            # Fetch spacecraft position at each time point along trajectory
            ang_mismatch_list = []
            distance_list = []

            # e, r, v = model.sgp4(jd_from_unix(utc=item['obsTime']), 0)
            # logging.info("{} {} {}".format(str(e), str(r), str(v)))
            tai_utc_offset = 39  # seconds

            def satellite_angular_offset(index, clock_offset):
                # Fetch observed position of object at this time point
                pt_utc = path_x_y[index][3]
                pt_alt = path_alt_az[index][0]
                pt_az = path_alt_az[index][1]

                # Project position of this satellite in space at this time point
                t = ts.tai_jd(jd=jd_from_unix(utc=pt_utc + tai_utc_offset +
                                              clock_offset))

                # Project position of this satellite in the observer's sky
                sight_line = sat - observer
                topocentric = sight_line.at(t)
                sat_alt, sat_az, sat_distance = topocentric.altaz()

                # Work out offset of satellite's position from observed moving object
                ang_mismatch = ang_dist(ra0=pt_az * pi / 180,
                                        dec0=pt_alt * pi / 180,
                                        ra1=sat_az.radians,
                                        dec1=sat_alt.radians) * 180 / pi

                return ang_mismatch, sat_distance

            def time_offset_objective(p):
                """
                Objective function that we minimise in order to find the best fit clock offset between the observed
                and model paths.

                :param p:
                    Vector with a single component: the clock offset
                :return:
                    Metric to minimise
                """

                # Turn input parameters into a time offset
                clock_offset = p[0]

                # Look up angular offset
                ang_mismatch, sat_distance = satellite_angular_offset(
                    index=0, clock_offset=clock_offset)

                # Return metric to minimise
                return ang_mismatch * exp(clock_offset / 8)

            # First, chuck out satellites with large angular offsets
            observer = wgs84.latlon(
                latitude_degrees=projector.obstory_info['latitude'],
                longitude_degrees=projector.obstory_info['longitude'],
                elevation_m=0)

            ang_mismatch, sat_distance = satellite_angular_offset(
                index=0, clock_offset=0)

            # Check angular offset is reasonable
            if ang_mismatch > global_settings['max_angular_mismatch']:
                continue

            # Work out the optimum time offset between the satellite's path and the observed path
            # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/>
            # for more information about how this works
            parameters_initial = [0]
            parameters_optimised = scipy.optimize.minimize(
                time_offset_objective,
                np.asarray(parameters_initial),
                options={
                    'disp': False,
                    'maxiter': 100
                }).x

            # Construct best-fit linear trajectory for best-fitting parameters
            clock_offset = float(parameters_optimised[0])

            # Check clock offset is reasonable
            if abs(clock_offset) > global_settings['max_clock_offset']:
                continue

            # Measure the offset between the satellite's position and the observed position at each time point
            for index in range(path_len):
                # Look up angular mismatch at this time point
                ang_mismatch, sat_distance = satellite_angular_offset(
                    index=index, clock_offset=clock_offset)

                # Keep list of the offsets at each recorded time point along the trajectory
                ang_mismatch_list.append(ang_mismatch)
                distance_list.append(sat_distance.km)

            # Consider adding this satellite to list of candidates
            mean_ang_mismatch = np.mean(np.asarray(ang_mismatch_list))
            distance_mean = np.mean(np.asarray(distance_list))

            if mean_ang_mismatch < global_settings['max_mean_angular_mismatch']:
                candidate_satellites.append({
                    'name':
                    spacecraft['name'],  # string
                    'noradId':
                    spacecraft['noradId'],  # int
                    'distance':
                    distance_mean,  # km
                    'clock_offset':
                    clock_offset,  # seconds
                    'offset':
                    mean_ang_mismatch,  # degrees
                    'absolute_magnitude':
                    spacecraft['mag']
                })

        # Add model possibility for null satellite
        candidate_satellites.append({
            'name': "Unidentified",
            'noradId': 0,
            'distance': 35.7e3 *
            0.25,  # Nothing is visible beyond 25% of geostationary orbit distance
            'clock_offset': 0,
            'offset': 0,
            'absolute_magnitude': None
        })

        # Sort candidates by score - use absolute mag = 20 for satellites with no mag
        for candidate in candidate_satellites:
            candidate['score'] = hypot(
                candidate['distance'] / 1e3,
                candidate['clock_offset'],
                (20 if candidate['absolute_magnitude'] is None else
                 candidate['absolute_magnitude']),
            )
        candidate_satellites.sort(key=itemgetter('score'))

        # Report possible satellite identifications
        logging.info("{prefix} -- {satellites}".format(
            prefix=logging_prefix,
            satellites=", ".join([
                "{} ({:.1f} deg offset; clock offset {:.1f} sec)".format(
                    satellite['name'], satellite['offset'],
                    satellite['clock_offset'])
                for satellite in candidate_satellites
            ])))

        # Identify most likely satellite
        most_likely_satellite = candidate_satellites[0]

        # Store satellite identification
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="satellite:name",
                                        value=most_likely_satellite['name']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="satellite:norad_id",
                         value=most_likely_satellite['noradId']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="satellite:clock_offset",
                         value=most_likely_satellite['clock_offset']))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="satellite:angular_offset",
                                        value=most_likely_satellite['offset']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="satellite:path_length",
                         value=ang_dist(ra0=path_ra_dec_at_epoch[0][0],
                                        dec0=path_ra_dec_at_epoch[0][1],
                                        ra1=path_ra_dec_at_epoch[-1][0],
                                        dec1=path_ra_dec_at_epoch[-1][1]) *
                         180 / pi))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(
                key="satellite:path_ra_dec",
                value="[[{:.3f},{:.3f}],[{:.3f},{:.3f}],[{:.3f},{:.3f}]]".
                format(
                    path_ra_dec_at_epoch[0][0] * 12 / pi,
                    path_ra_dec_at_epoch[0][1] * 180 / pi,
                    path_ra_dec_at_epoch[int(path_len / 2)][0] * 12 / pi,
                    path_ra_dec_at_epoch[int(path_len / 2)][1] * 180 / pi,
                    path_ra_dec_at_epoch[-1][0] * 12 / pi,
                    path_ra_dec_at_epoch[-1][1] * 180 / pi,
                )))

        # Satellite successfully identified
        if most_likely_satellite['name'] == "Unidentified":
            outcomes['unsuccessful_fits'] += 1
        else:
            outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} satellites successfully identified.".format(
        outcomes['successful_fits']))
    logging.info("{:d} satellites not identified.".format(
        outcomes['unsuccessful_fits']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))
    logging.info("{:d} satellites with incomplete data.".format(
        outcomes['insufficient_information']))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
def calibrate_lens(obstory_id, utc_min, utc_max, utc_must_stop=None):
    """
    Use astrometry.net to determine the orientation of a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to determine the orientation for.
    :param utc_min:
        The start of the time period in which we should determine the observatory's orientation.
    :param utc_max:
        The end of the time period in which we should determine the observatory's orientation.
    :param utc_must_stop:
        The time by which we must finish work
    :return:
        None
    """
    global parameter_scales, fit_list

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info(
        "Starting estimation of lens calibration for <{}>".format(obstory_id))

    # Mathematical constants
    deg = pi / 180
    rad = 180 / pi

    # Count how many successful fits we achieve
    successful_fits = 0

    # Read properties of known lenses
    hw = hardware_properties.HardwareProps(
        path=os.path.join(settings['pythonPath'], "..", "configuration_global",
                          "camera_properties"))

    # Reduce time window to where observations are present
    conn.execute(
        """
SELECT obsTime
FROM archive_observations
WHERE obsTime BETWEEN %s AND %s
    AND observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
ORDER BY obsTime ASC LIMIT 1
""", (utc_min, utc_max, obstory_id))
    results = conn.fetchall()

    if len(results) == 0:
        logging.warning("No observations within requested time window.")
        return
    utc_min = results[0]['obsTime'] - 1

    conn.execute(
        """
SELECT obsTime
FROM archive_observations
WHERE obsTime BETWEEN %s AND %s
    AND observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
ORDER BY obsTime DESC LIMIT 1
""", (utc_min, utc_max, obstory_id))
    results = conn.fetchall()
    utc_max = results[0]['obsTime'] + 1

    # Divide up time interval into day-long blocks
    logging.info("Searching for images within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))
    block_size = 3600
    minimum_sky_clarity = 1e6 + 1400
    utc_min = (floor(utc_min / block_size + 0.5) -
               0.5) * block_size  # Make sure that blocks start at noon
    time_blocks = list(
        np.arange(start=utc_min, stop=utc_max + block_size, step=block_size))

    # Start new block whenever we have a hardware refresh
    conn.execute(
        """
SELECT time FROM archive_metadata
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='refresh')
      AND time BETWEEN %s AND %s
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()
    for item in results:
        time_blocks.append(item['time'])

    # Make sure that start points for time blocks are in order
    time_blocks.sort()

    # Build list of images we are to analyse
    images_for_analysis = []

    for block_index, utc_block_min in enumerate(time_blocks[:-1]):
        utc_block_max = time_blocks[block_index + 1]
        logging.info("Calibrating lens within period {} to {}".format(
            date_string(utc_block_min), date_string(utc_block_max)))

        # Search for background-subtracted time lapse image with best sky clarity within this time period
        conn.execute(
            """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, am.floatValue AS skyClarity
FROM archive_files f
INNER JOIN archive_observations ao on f.observationId = ao.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON f.uid = am2.fileId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
WHERE ao.obsTime BETWEEN %s AND %s
    AND ao.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
    AND f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:timelapse/backgroundSubtracted")
    AND am.floatValue > %s
    AND am2.uid IS NULL
    AND ao.astrometryProcessed IS NULL
ORDER BY am.floatValue DESC LIMIT 1
""", (utc_block_min, utc_block_max, obstory_id, minimum_sky_clarity))
        results = conn.fetchall()

        if len(results) > 0:
            images_for_analysis.append({
                'utc':
                results[0]['obsTime'],
                'skyClarity':
                results[0]['skyClarity'],
                'repositoryFname':
                results[0]['repositoryFname'],
                'observationId':
                results[0]['observationId']
            })

    # Sort images into order of sky clarity
    images_for_analysis.sort(key=itemgetter("skyClarity"))
    images_for_analysis.reverse()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the calibration of {:d} images:".format(
        len(images_for_analysis)))
    for item in images_for_analysis:
        logging.info("{:17s} {:04.0f} {:32s}".format(date_string(item['utc']),
                                                     item['skyClarity'],
                                                     item['repositoryFname']))

    # Analyse each image in turn
    for item_index, item in enumerate(images_for_analysis):
        logging.info("Working on image {:32s} ({:4d}/{:4d})".format(
            item['repositoryFname'], item_index + 1, len(images_for_analysis)))

        # Make a temporary directory to store files in.
        # This is necessary as astrometry.net spams the cwd with lots of temporary junk
        tmp0 = "/tmp/dcf21_calibrateLens_{}".format(item['repositoryFname'])
        # logging.info("Created temporary directory <{}>".format(tmp))
        os.system("mkdir {}".format(tmp0))

        # Fetch observatory status
        obstory_info = db.get_obstory_from_id(obstory_id)
        obstory_status = None
        if obstory_info and ('name' in obstory_info):
            obstory_status = db.get_obstory_status(obstory_id=obstory_id,
                                                   time=item['utc'])
        if not obstory_status:
            logging.info("Aborting -- no observatory status available.")
            continue

        # Fetch observatory status
        lens_name = obstory_status['lens']
        lens_props = hw.lens_data[lens_name]

        # This is an estimate of the *maximum* angular width we expect images to have.
        # It should be within a factor of two of correct!
        estimated_image_scale = lens_props.fov

        # Find image orientation orientation
        filename = os.path.join(settings['dbFilestore'],
                                item['repositoryFname'])

        if not os.path.exists(filename):
            logging.info("Error: File <{}> is missing!".format(
                item['repositoryFname']))
            continue

        # 1. Copy image into working directory
        # logging.info("Copying file")
        img_name = item['repositoryFname']
        command = "cp {} {}/{}_tmp.png".format(filename, tmp0, img_name)
        # logging.info(command)
        os.system(command)

        # 2. We estimate the distortion of the image by passing a series of small portions of the image to
        # astrometry.net. We use this to construct a mapping between (x, y) pixel coordinates to (RA, Dec).

        # Define the size of each small portion we pass to astrometry.net
        fraction_x = 0.15
        fraction_y = 0.15

        # Create a list of the centres of the portions we send
        fit_list = []
        portion_centres = [{'x': 0.5, 'y': 0.5}]

        # Points along the leading diagonal of the image
        for z in np.arange(0.1, 0.9, 0.1):
            if z != 0.5:
                portion_centres.append({'x': z, 'y': z})
                portion_centres.append({'x': (z + 0.5) / 2, 'y': z})
                portion_centres.append({'x': z, 'y': (z + 0.5) / 2})

        # Points along the trailing diagonal of the image
        for z in np.arange(0.1, 0.9, 0.1):
            if z != 0.5:
                portion_centres.append({'x': z, 'y': 1 - z})
                portion_centres.append({'x': (1.5 - z) / 2, 'y': z})
                portion_centres.append({'x': z, 'y': (1.5 - z) / 2})

        # Points down the vertical centre-line of the image
        for z in np.arange(0.15, 0.85, 0.1):
            portion_centres.append({'x': 0.5, 'y': z})

        # Points along the horizontal centre-line of the image
        for z in np.arange(0.15, 0.85, 0.1):
            portion_centres.append({'x': z, 'y': 0.5})

        # Fetch the pixel dimensions of the image we are working on
        d = image_dimensions("{}/{}_tmp.png".format(tmp0, img_name))

        @dask.delayed
        def analyse_image_portion(image_portion):

            # Make a temporary directory to store files in.
            # This is necessary as astrometry.net spams the cwd with lots of temporary junk
            tmp = "/tmp/dcf21_calibrateLens_{}_{}".format(
                item['repositoryFname'], image_portion['index'])
            # logging.info("Created temporary directory <{}>".format(tmp))
            os.system("mkdir {}".format(tmp))

            # Use ImageMagick to crop out each small piece of the image
            command = """
cd {6} ; \
rm -f {5}_tmp3.png ; \
convert {0}_tmp.png -colorspace sRGB -define png:format=png24 -crop {1:d}x{2:d}+{3:d}+{4:d} +repage {5}_tmp3.png
            """.format(os.path.join(tmp0, img_name), int(fraction_x * d[0]),
                       int(fraction_y * d[1]),
                       int((image_portion['x'] - fraction_x / 2) * d[0]),
                       int((image_portion['y'] - fraction_y / 2) * d[1]),
                       img_name, tmp)
            # logging.info(command)
            os.system(command)

            # Check that we've not run out of time
            if utc_must_stop and (time.time() > utc_must_stop):
                logging.info("We have run out of time! Aborting.")
                os.system("rm -Rf {}".format(tmp))
                return None

            # How long should we allow astrometry.net to run for?
            timeout = "40s"

            # Run astrometry.net. Insert --no-plots on the command line to speed things up.
            # logging.info("Running astrometry.net")
            estimated_width = 2 * math.atan(
                math.tan(estimated_image_scale / 2 * deg) * fraction_x) * rad
            astrometry_output = os.path.join(tmp, "txt")
            command = """
cd {5} ; \
timeout {0} solve-field --no-plots --crpix-center --scale-low {1:.1f} \
        --scale-high {2:.1f} --overwrite {3}_tmp3.png > {4} 2> /dev/null \
            """.format(timeout, estimated_width * 0.6, estimated_width * 1.2,
                       img_name, astrometry_output, tmp)
            # logging.info(command)
            os.system(command)

            # Parse the output from astrometry.net
            assert os.path.exists(
                astrometry_output), "Path <{}> doesn't exist".format(
                    astrometry_output)
            fit_text = open(astrometry_output).read()
            # logging.info(fit_text)

            # Clean up
            # logging.info("Removing temporary directory <{}>".format(tmp))
            os.system("rm -Rf {}".format(tmp))

            # Extract celestial coordinates of the centre of the frame from astrometry.net output
            test = re.search(
                r"\(RA H:M:S, Dec D:M:S\) = \(([\d-]*):(\d\d):([\d.]*), [+]?([\d-]*):(\d\d):([\d\.]*)\)",
                fit_text)
            if not test:
                logging.info("FAIL(POS): Point ({:.2f},{:.2f}).".format(
                    image_portion['x'], image_portion['y']))
                return None

            ra_sign = sgn(float(test.group(1)))
            ra = abs(float(test.group(1))) + float(test.group(2)) / 60 + float(
                test.group(3)) / 3600
            if ra_sign < 0:
                ra *= -1
            dec_sign = sgn(float(test.group(4)))
            dec = abs(float(test.group(4))) + float(
                test.group(5)) / 60 + float(test.group(6)) / 3600
            if dec_sign < 0:
                dec *= -1

            # If astrometry.net achieved a fit, then we report it to the user
            logging.info(
                "FIT: RA: {:7.2f}h. Dec {:7.2f} deg. Point ({:.2f},{:.2f}).".
                format(ra, dec, image_portion['x'], image_portion['y']))

            # Also, populate <fit_list> with a list of the central points of the image fragments, and their (RA, Dec)
            # coordinates.
            return {
                'ra': ra * pi / 12,
                'dec': dec * pi / 180,
                'x': image_portion['x'],
                'y': image_portion['y'],
                'radius': hypot(image_portion['x'] - 0.5,
                                image_portion['y'] - 0.5)
            }

        # Analyse each small portion of the image in turn
        dask_tasks = []
        for index, image_portion in enumerate(portion_centres):
            image_portion['index'] = index
            dask_tasks.append(
                analyse_image_portion(image_portion=image_portion))
        fit_list = dask.compute(*dask_tasks)

        # Remove fits which returned None
        fit_list = [i for i in fit_list if i is not None]

        # Clean up
        os.system("rm -Rf {}".format(tmp0))
        os.system("rm -Rf /tmp/tmp.*")

        # Make histogram of fits as a function of radius
        radius_histogram = [0] * 10
        for fit in fit_list:
            radius_histogram[int(fit['radius'] * 10)] += 1

        logging.info("Fit histogram vs radius: {}".format(radius_histogram))

        # Reject this image if there are insufficient fits from astrometry.net
        if min(radius_histogram[:5]) < 2:
            logging.info("Insufficient fits to continue")
            continue

        # Fit a gnomonic projection to the image, with barrel correction, to fit all the celestial positions of the
        # image fragments.

        # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/> for more information

        ra0 = fit_list[0]['ra']
        dec0 = fit_list[0]['dec']
        parameter_scales = [
            pi / 4, pi / 4, pi / 4, pi / 4, pi / 4, pi / 4, 5e-2, 5e-6
        ]
        parameters_default = [
            ra0, dec0, pi / 4, pi / 4, 0, lens_props.barrel_parameters[2], 0
        ]
        parameters_initial = [
            parameters_default[i] / parameter_scales[i]
            for i in range(len(parameters_default))
        ]
        fitting_result = scipy.optimize.minimize(mismatch,
                                                 parameters_initial,
                                                 method='nelder-mead',
                                                 options={
                                                     'xtol': 1e-8,
                                                     'disp': True,
                                                     'maxiter': 1e8,
                                                     'maxfev': 1e8
                                                 })
        parameters_optimal = fitting_result.x
        parameters_final = [
            parameters_optimal[i] * parameter_scales[i]
            for i in range(len(parameters_default))
        ]

        # Display best fit numbers
        headings = [["Central RA / hr", 12 / pi],
                    ["Central Decl / deg", 180 / pi],
                    ["Image width / deg", 180 / pi],
                    ["Image height / deg", 180 / pi],
                    ["Position angle / deg", 180 / pi], ["barrel_k1", 1],
                    ["barrel_k2", 1]]

        logging.info(
            "Fit achieved to {:d} points with offset of {:.5f}. Best fit parameters were:"
            .format(len(fit_list), fitting_result.fun))
        for i in range(len(parameters_default)):
            logging.info("{0:30s} : {1}".format(
                headings[i][0], parameters_final[i] * headings[i][1]))

        # Reject fit if objective function too large
        if fitting_result.fun > 1e-4:
            logging.info("Rejecting fit as chi-squared too large.")
            continue

        # Reject fit if k1/k2 values are too extreme
        if (abs(parameters_final[5]) > 0.3) or (abs(parameters_final[6]) >
                                                0.1):
            logging.info("Rejecting fit as parameters seem extreme.")
            continue

        # Update observation status
        successful_fits += 1
        user = settings['pigazingUser']
        timestamp = time.time()
        barrel_parameters = [
            parameters_final[2] * 180 / pi, parameters_final[3] * 180 / pi,
            parameters_final[5], parameters_final[6], 0
        ]
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="calibration:lens_barrel_parameters",
                         value=json.dumps(barrel_parameters)))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="calibration:chi_squared",
                                                 value=fitting_result.fun))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="calibration:point_count",
                                                 value=str(radius_histogram)))

    # Commit metadata changes
    db.commit()
    db0.commit()

    # Report how many fits we achieved
    logging.info(
        "Total of {:d} images successfully fitted.".format(successful_fits))

    if successful_fits > 0:
        # Now determine mean lens calibration each day
        logging.info("Averaging daily fits within period {} to {}".format(
            date_string(utc_min), date_string(utc_max)))
        block_size = 86400
        utc_min = (floor(utc_min / block_size + 0.5) -
                   0.5) * block_size  # Make sure that blocks start at noon
        time_blocks = list(
            np.arange(start=utc_min,
                      stop=utc_max + block_size,
                      step=block_size))

        # Start new block whenever we have a hardware refresh
        conn.execute(
            """
SELECT time FROM archive_metadata
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='refresh')
      AND time BETWEEN %s AND %s
""", (obstory_id, utc_min, utc_max))
        results = conn.fetchall()
        for item in results:
            time_blocks.append(item['time'])

        # Make sure that start points for time blocks are in order
        time_blocks.sort()

        for block_index, utc_block_min in enumerate(time_blocks[:-1]):
            utc_block_max = time_blocks[block_index + 1]

            # Select observations with calibration fits
            conn.execute(
                """
SELECT am1.stringValue AS barrel_parameters
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_block_min, utc_block_max))
            results = conn.fetchall()

            logging.info(
                "Averaging fits within period {} to {}: Found {} fits.".format(
                    date_string(utc_block_min), date_string(utc_block_max),
                    len(results)))

            # Average the fits we found
            if len(results) < 3:
                logging.info("Insufficient images to reliably average.")
                continue

            # Pick the median fit
            value_list = {
                'scale_x': [],
                'scale_y': [],
                'barrel_k1': [],
                'barrel_k2': [],
                'barrel_k3': []
            }
            for item in results:
                barrel_parameters = json.loads(item['barrel_parameters'])
                value_list['scale_x'].append(barrel_parameters[0])
                value_list['scale_y'].append(barrel_parameters[1])
                value_list['barrel_k1'].append(barrel_parameters[2])
                value_list['barrel_k2'].append(barrel_parameters[3])
                value_list['barrel_k3'].append(barrel_parameters[4])

            median_values = {}
            for key, values in value_list.items():
                values.sort()
                median_values[key] = values[len(values) // 2]

            # Print fit information
            logging.info("""\
CALIBRATION FIT from {:2d} images: %s. \
""".format(
                len(results), "; ".join([
                    "{}: {}".format(key, median)
                    for key, median in median_values.items()
                ])))

            # Flush any previous observation status
            flush_calibration(obstory_id=obstory_id,
                              utc_min=utc_block_min - 1,
                              utc_max=utc_block_min + 1)

            # Update observatory status
            user = settings['pigazingUser']
            timestamp = time.time()
            barrel_parameters = [
                median_values['scale_x'], median_values['scale_y'],
                median_values['barrel_k1'], median_values['barrel_k2'],
                median_values['barrel_k3']
            ]
            db.register_obstory_metadata(
                obstory_id=obstory_id,
                key="calibration:lens_barrel_parameters",
                value=json.dumps(barrel_parameters),
                time_created=timestamp,
                metadata_time=utc_block_min,
                user_created=user)
            db.commit()

    # Clean up and exit
    db.commit()
    db.close_db()
    db0.commit()
    conn.close()
    db0.close()
    return
示例#8
0
def list_calibration_fixes(obstory_id, utc_min, utc_max):
    """
    List all the calibration fixes for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list calibration fixes for.
    :param utc_min:
        The start of the time period in which we should list calibration fixes (unix time).
    :param utc_max:
        The end of the time period in which we should list calibration fixes (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of calibration fixes
    calibration_fixes = []

    # Select observatory with calibration fits
    conn.execute(
        """
SELECT am1.stringValue AS barrel_parameters,
       am4.floatValue AS chi_squared, am5.stringValue AS point_count,
       o.obsTime AS time
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:chi_squared")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:point_count")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        calibration_fixes.append({
            'time': item['time'],
            'average': False,
            'fit': item
        })

    # Select observation calibration fits
    conn.execute(
        """
SELECT am1.stringValue AS barrel_parameters,
       am3.floatValue AS chi_squared, am4.stringValue AS point_count,
       am1.time AS time
FROM archive_observatories o
INNER JOIN archive_metadata am1 ON o.uid = am1.observatory AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
LEFT OUTER JOIN archive_metadata am3 ON o.uid = am3.observatory AND am3.time=am1.time AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:chi_squared")
LEFT OUTER JOIN archive_metadata am4 ON o.uid = am4.observatory AND am4.time=am1.time AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:point_count")
WHERE
    o.publicId=%s AND
    am1.time BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        calibration_fixes.append({
            'time': item['time'],
            'average': True,
            'fit': item
        })

    # Sort fixes by time
    calibration_fixes.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:1s} {:16s} {:8s} {:8s} {:10s} {:12s} {:6s}\
""".format("", "Time", "barrelK1", "barrelK2", "barrelK3", "chi2", "points"))

    # Display fixes
    for item in calibration_fixes:
        # Deal with missing data
        if item['fit']['chi_squared'] is None:
            item['fit']['chi_squared'] = -1
        if item['fit']['point_count'] is None:
            item['fit']['point_count'] = "-"

        # Display calibration fix
        barrel_parameters = json.loads(item['fit']['barrel_parameters'])
        print("""\
{:s} {:16s} {:8.4f} {:8.4f} {:10.7f} {:12.9f} {:s} {:s}\
""".format("\n>" if item['average'] else " ", date_string(item['time']),
           barrel_parameters[2], barrel_parameters[3], barrel_parameters[4],
           item['fit']['chi_squared'], item['fit']['point_count'],
           "\n" if item['average'] else ""))

    # Clean up and exit
    return
示例#9
0
def list_simultaneous_detections(utc_min=None, utc_max=None):
    """
    Display a list of all the simultaneous object detections registered in the database.

    :param utc_min:
        Only show observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only show observations made before the specified time stamp.

    :type utc_max:
        float

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Compile search criteria for observation groups
    where = [
        "g.semanticType = (SELECT uid FROM archive_semanticTypes WHERE name=\"{}\")"
        .format(simultaneous_event_type)
    ]
    args = []

    if utc_min is not None:
        where.append("o.obsTime>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("o.obsTime<=%s")
        args.append(utc_max)

    # Search for observation groups containing groups of simultaneous detections
    conn.execute(
        """
SELECT g.publicId AS groupId, o.publicId AS obsId, o.obsTime, am.stringValue AS objectType
FROM archive_obs_groups g
INNER JOIN archive_obs_group_members m on g.uid = m.groupId
INNER JOIN archive_observations o ON m.childObservation = o.uid
INNER JOIN archive_metadata am ON g.uid = am.groupId AND
    am.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE """ + " AND ".join(where) + """
ORDER BY o.obsTime;
""", args)
    results = conn.fetchall()

    # Count how many simultaneous detections we find by type
    detections_by_type = {}

    # Compile list of groups
    obs_groups = {}
    obs_group_ids = []
    for item in results:
        key = item['groupId']
        if key not in obs_groups:
            obs_groups[key] = []
            obs_group_ids.append({
                'groupId': key,
                'time': item['obsTime'],
                'type': item['objectType']
            })

            # Add this simultaneous detection to tally
            if item['objectType'] not in detections_by_type:
                detections_by_type[item['objectType']] = 0
            detections_by_type[item['objectType']] += 1
        obs_groups[key].append(item['obsId'])

    # List information about each observation in turn
    print("{:16s} {:20s} {:20s} {:s}".format("Time", "groupId", "Object type",
                                             "Observations"))
    for group_info in obs_group_ids:
        # Print group information
        print("{:16s} {:20s} {:20s} {:s}".format(
            group_info['groupId'], date_string(group_info['time']),
            group_info['type'], " ".join(obs_groups[group_info['groupId']])))

    # Report tally of events
    print("\nTally of events by type:")
    for event_type in sorted(detections_by_type.keys()):
        print("    * {:26s}: {:6d}".format(event_type,
                                           detections_by_type[event_type]))
示例#10
0
def average_daily_fits(conn, db, obstory_id, utc_max, utc_min):
    """
    Average all of the orientation fixes within a given time period, excluding extreme fits. Update the observatory's
    status with a altitude and azimuth of the average fit, if it has a suitably small error bar.

    :param conn:
        Database connection object.
    :param db:
        Database object.
    :param obstory_id:
        Observatory publicId.
    :param utc_max:
        Unix time of the end of the time period.
    :param utc_min:
        Unix time of the beginning of the time period.
    :return:
        None
    """

    # Divide up the time period in which we are working into individual nights, and then work on each night individually
    logging.info("Averaging daily fits within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))

    # Each night is a 86400-second period
    daily_block_size = 86400

    # Make sure that blocks start at noon
    utc_min = (floor(utc_min / daily_block_size + 0.5) -
               0.5) * daily_block_size
    time_blocks = list(
        np.arange(start=utc_min,
                  stop=utc_max + daily_block_size,
                  step=daily_block_size))

    # Start new block whenever we have a hardware refresh, even if it's in the middle of the night!
    conn.execute(
        """
SELECT time FROM archive_metadata
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='refresh')
      AND time BETWEEN %s AND %s
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()
    for item in results:
        time_blocks.append(item['time'])

    # Make sure that start points for time blocks are in order
    time_blocks.sort()

    # Work on each time block (i.e. night) in turn
    for block_index, utc_block_min in enumerate(time_blocks[:-1]):
        # End point for this time block
        utc_block_max = time_blocks[block_index + 1]

        # Search for observations with orientation fits
        conn.execute(
            """
SELECT am1.floatValue AS altitude, am2.floatValue AS azimuth, am3.floatValue AS pa, am4.floatValue AS tilt,
       am5.floatValue AS width_x_field, am6.floatValue AS width_y_field,
       am7.stringValue AS fit_quality
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:altitude")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:azimuth")
INNER JOIN archive_metadata am3 ON o.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:pa")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:tilt")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_x_field")
INNER JOIN archive_metadata am6 ON o.uid = am6.observationId AND
    am6.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_y_field")
INNER JOIN archive_metadata am7 ON o.uid = am7.observationId AND
    am7.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_block_min, utc_block_max))
        results = conn.fetchall()

        # Remove results with poor fit
        results_filtered = []
        fit_threshold = 2  # pixels
        for item in results:
            fit_quality = float(json.loads(item['fit_quality'])[0])
            if fit_quality > fit_threshold:
                continue
            item['weight'] = 1 / (fit_quality + 0.1)
            results_filtered.append(item)
        results = results_filtered

        # Report how many images we found
        logging.info(
            "Averaging fits within period {} to {}: Found {} fits.".format(
                date_string(utc_block_min), date_string(utc_block_max),
                len(results)))

        # Average the fits we found
        if len(results) < 4:
            logging.info("Insufficient images to reliably average.")
            continue

        # What fraction of the worst fits do we reject?
        rejection_fraction = 0.25

        # Reject the 25% of fits which are further from the average
        rejection_count = int(len(results) * rejection_fraction)

        # Convert alt-az fits into radians and average
        # Iteratively remove the point furthest from the mean
        results_filtered = results

        # Iteratively take the average of the fits, reject the furthest outlier, and then take a new average
        for iteration in range(rejection_count):
            # Average the (alt, az) measurements for this observatory by finding their centroid on a sphere
            alt_az_list = [[i['altitude'] * deg, i['azimuth'] * deg]
                           for i in results_filtered]
            weights_list = [i['weight'] for i in results_filtered]
            alt_az_best = mean_angle_2d(pos_list=alt_az_list,
                                        weights=weights_list)[0]

            # Work out the offset of each fit from the average
            fit_offsets = [
                ang_dist(ra0=alt_az_best[1],
                         dec0=alt_az_best[0],
                         ra1=fitted_alt_az[1],
                         dec1=fitted_alt_az[0])
                for fitted_alt_az in alt_az_list
            ]

            # Reject the worst fit which is further from the average
            fits_with_weights = list(zip(fit_offsets, results_filtered))
            fits_with_weights.sort(key=operator.itemgetter(0))
            fits_with_weights.reverse()

            # Create a new list of orientation fits, with the worst outlier excluded
            results_filtered = [item[1] for item in fits_with_weights[1:]]

        # Convert alt-az fits into radians and average by finding their centroid on a sphere
        alt_az_list = [[i['altitude'] * deg, i['azimuth'] * deg]
                       for i in results_filtered]
        weights_list = [i['weight'] for i in results_filtered]
        [alt_az_best, alt_az_error] = mean_angle_2d(pos_list=alt_az_list,
                                                    weights=weights_list)

        # Average other angles by finding their centroid on a circle
        output_values = {}
        for quantity in ['tilt', 'pa', 'width_x_field', 'width_y_field']:
            # Iteratively remove the point furthest from the mean
            results_filtered = results

            # Iteratively take the average of the values for each parameter, reject the furthest outlier,
            # and then take a new average
            for iteration in range(rejection_count):
                # Average quantity measurements
                quantity_values = [i[quantity] * deg for i in results_filtered]
                weights_list = [i['weight'] for i in results_filtered]
                quantity_mean = mean_angle(angle_list=quantity_values,
                                           weights=weights_list)[0]

                # Work out the offset of each fit from the average
                fit_offsets = []
                for index, quantity_value in enumerate(quantity_values):
                    offset = quantity_value - quantity_mean
                    if offset < -pi:
                        offset += 2 * pi
                    if offset > pi:
                        offset -= 2 * pi
                    fit_offsets.append(abs(offset))

                # Reject the worst fit which is furthest from the average
                fits_with_weights = list(zip(fit_offsets, results_filtered))
                fits_with_weights.sort(key=operator.itemgetter(0))
                fits_with_weights.reverse()
                results_filtered = [item[1] for item in fits_with_weights[1:]]

            # Filtering finished; now convert each fit into radians and average
            values_filtered = [i[quantity] * deg for i in results_filtered]
            weights_list = [i['weight'] for i in results_filtered]
            value_best = mean_angle(angle_list=values_filtered,
                                    weights=weights_list)[0]
            output_values[quantity] = value_best * rad

        # Print fit information
        success = (
            alt_az_error * rad < 0.1
        )  # Only accept determinations with better precision than 0.1 deg
        adjective = "SUCCESSFUL" if success else "REJECTED"
        logging.info("""\
{} ORIENTATION FIT from {:2d} images: Alt: {:.2f} deg. Az: {:.2f} deg. PA: {:.2f} deg. \
ScaleX: {:.2f} deg. ScaleY: {:.2f} deg. Uncertainty: {:.2f} deg.\
""".format(adjective, len(results_filtered), alt_az_best[0] * rad,
           alt_az_best[1] * rad, output_values['tilt'],
           output_values['width_x_field'], output_values['width_y_field'],
           alt_az_error * rad))

        # Update observatory status
        if success:
            # Flush any previous observation status
            flush_orientation(obstory_id=obstory_id,
                              utc_min=utc_block_min - 1,
                              utc_max=utc_block_min + 1)

            user = settings['pigazingUser']
            timestamp = time.time()
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:altitude",
                                         value=alt_az_best[0] * rad,
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:azimuth",
                                         value=alt_az_best[1] * rad,
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:pa",
                                         value=output_values['pa'],
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:tilt",
                                         value=output_values['tilt'],
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:width_x_field",
                                         value=output_values['width_x_field'],
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:width_y_field",
                                         value=output_values['width_y_field'],
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:uncertainty",
                                         value=alt_az_error * rad,
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.register_obstory_metadata(obstory_id=obstory_id,
                                         key="orientation:image_count",
                                         value=len(results),
                                         time_created=timestamp,
                                         metadata_time=utc_block_min,
                                         user_created=user)
            db.commit()
示例#11
0
def search_simultaneous_detections(utc_min, utc_max, utc_must_stop):
    # Count how many simultaneous detections we discover
    simultaneous_detections_by_type = {}

    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    # Search for moving objects within time span
    search = mp.ObservationSearch(observation_type="pigazing:movingObject/",
                                  time_min=utc_min,
                                  time_max=utc_max,
                                  limit=1000000)
    events_raw = db.search_observations(search)

    # Use only event descriptors, not other returned fields
    events = events_raw['obs']

    # Make a list of which events are already members of groups
    events_used = [False] * len(events)

    # Look up the categorisation of each event
    for event in events:
        event.category = db.get_observation_metadata(event.id, "web:category")

    # Throw out junk events and unclassified events
    events = [x for x in events if x.category is not None and x.category not in ('Junk', 'Bin')]

    # Look up which pre-existing observation groups each event is in
    for index, event in enumerate(events):
        db.con.execute("""
SELECT COUNT(*)
FROM archive_obs_groups grp
WHERE grp.semanticType = (SELECT y.uid FROM archive_semanticTypes y WHERE y.name=%s) AND
      EXISTS (SELECT 1 FROM archive_obs_group_members x
              WHERE x.groupId=grp.uid AND
                    x.childObservation=(SELECT z.uid FROM archive_observations z WHERE z.publicId=%s));
""", (simultaneous_event_type, event.id))

        if db.con.fetchone()['COUNT(*)'] > 0:
            events_used[index] = True

    # Sort event descriptors into chronological order
    events.sort(key=lambda x: x.obs_time)

    # Look up the duration of each event, and calculate its end time
    for event in events:
        duration = 0
        for meta in event.meta:
            if meta.key == "pigazing:duration":
                duration = meta.value
        event.duration = duration
        event.obs_time_end = event.obs_time + duration

    # Compile list of simultaneous object detections
    groups = []

    # Search for simultaneous object detections
    for index in range(len(events)):
        # If we have already put this event in another simultaneous detection, don't add it to others
        if events_used[index]:
            continue

        # Look up time span of event
        event = events[index]
        obstory_id_list = [event.obstory_id]  # List of all observatories which saw this event
        utc_min = event.obs_time  # Earliest start time of any of the events in this group
        utc_max = event.obs_time_end  # Latest end time of any of the events in this group
        events_used[index] = True
        prev_group_size = -1
        group_members = [index]

        # Most events must be seen within a maximum offset of 1 second at different stations.
        # Planes are allowed an offset of up to 30 seconds due to their large parallax
        search_margin = 60
        match_margin = 30 if event.category == "Plane" else 1

        # Search for other events which fall within the same time span
        # Do this iteratively, as a preceding event can expand the end time of the group, and vice versa
        while len(group_members) > prev_group_size:
            prev_group_size = len(group_members)
            # Search for events at earlier times, and then at later times
            for search_direction in (-1, 1):
                # Start from the reference event
                candidate_index = index

                # Step through other events, providing they're within range
                while ((candidate_index >= 0) and
                       (candidate_index < len(events))):
                    # Fetch event record
                    candidate = events[candidate_index]

                    # Stop search if we've gone out of time range
                    if ((candidate.obs_time_end < utc_min - search_margin) or
                            (candidate.obs_time > utc_max + search_margin)):
                        break

                    # Check whether this is a simultaneous detection, with same categorisation
                    if ((not events_used[candidate_index]) and
                            (candidate.category == event.category) and
                            (candidate.obs_time < utc_max + match_margin) and
                            (candidate.obs_time_end > utc_min - match_margin)):
                        # Add this event to the group, and update time span of event
                        group_members.append(candidate_index)
                        utc_min = min(utc_min, candidate.obs_time)
                        utc_max = max(utc_max, candidate.obs_time_end)

                        # Compile a list of all the observatories which saw this event
                        if candidate.obstory_id not in obstory_id_list:
                            obstory_id_list.append(candidate.obstory_id)

                        # Record that we have added this event to a group
                        events_used[candidate_index] = True

                    # Step on to the next candidate event to add into group
                    candidate_index += search_direction

        # We have found a coincident detection only if multiple observatories saw an event at the same time
        if len(obstory_id_list) < 2:
            continue

        # Update tally of events by type
        if event.category not in simultaneous_detections_by_type:
            simultaneous_detections_by_type[event.category] = 0
        simultaneous_detections_by_type[event.category] += 1

        # Initialise maximum baseline between the stations which saw this objects
        maximum_obstory_spacing = 0

        # Work out locations of all observatories which saw this event
        obstory_locs = []
        for obstory_id in obstory_id_list:
            obstory_info = db.get_obstory_from_id(obstory_id)
            obstory_loc = Point.from_lat_lng(lat=obstory_info['latitude'],
                                             lng=obstory_info['longitude'],
                                             alt=0,
                                             utc=(utc_min + utc_max) / 2
                                             )
            obstory_locs.append(obstory_loc)

        # Check the distances between all pairs of observatories
        pairs = [[obstory_locs[i], obstory_locs[j]]
                 for i in range(len(obstory_id_list))
                 for j in range(i + 1, len(obstory_id_list))
                 ]

        # Work out maximum baseline between the stations which saw this objects
        for pair in pairs:
            maximum_obstory_spacing = max(maximum_obstory_spacing,
                                          abs(pair[0].displacement_vector_from(pair[1])))

        # Create information about this simultaneous detection
        groups.append({'time': (utc_min + utc_max) / 2,
                       'obstory_list': obstory_id_list,
                       'time_spread': utc_max - utc_min,
                       'geographic_spacing': maximum_obstory_spacing,
                       'category': event.category,
                       'observations': [{'obs': events[x]} for x in group_members],
                       'ids': [events[x].id for x in group_members]})

    # Report individual events we found
    for item in groups:
        logging.info("""
{time} -- {count:3d} stations; max baseline {baseline:5.0f} m; time spread {spread:4.1f} sec; type <{category}>
""".format(time=dcf_ast.date_string(item['time']),
           count=len(item['obstory_list']),
           baseline=item['geographic_spacing'],
           spread=item['time_spread'],
           category=item['category']).strip())

    # Report statistics on events we found
    logging.info("{:6d} moving objects seen within this time period".
                 format(len(events_raw['obs'])))
    logging.info("{:6d} moving objects rejected because they were unclassified".
                 format(len(events_raw['obs']) - len(events)))
    logging.info("{:6d} simultaneous detections found.".
                 format(len(groups)))

    # Report statistics by event type
    logging.info("Tally of simultaneous detections by type:")
    for event_type in sorted(simultaneous_detections_by_type.keys()):
        logging.info("    * {:32s}: {:6d}".format(event_type, simultaneous_detections_by_type[event_type]))

    # Record simultaneous event detections into the database
    for item in groups:
        # Create new observation group
        group = db.register_obsgroup(title="Multi-station detection", user_id="system",
                                     semantic_type=simultaneous_event_type,
                                     obs_time=item['time'], set_time=time.time(),
                                     obs=item['ids'])

        # logging.info("Simultaneous detection at {time} by {count:3d} stations (time spread {spread:.1f} sec)".
        #              format(time=dcf_ast.date_string(item['time']),
        #                     count=len(item['obstory_list']),
        #                     spread=item['time_spread']))
        # logging.info("Observation IDs: %s" % item['ids'])

        # Register group metadata
        timestamp = time.time()
        db.set_obsgroup_metadata(user_id="system", group_id=group.id, utc=timestamp,
                                 meta=mp.Meta(key="web:category", value=item['category']))
        db.set_obsgroup_metadata(user_id="system", group_id=group.id, utc=timestamp,
                                 meta=mp.Meta(key="simultaneous:time_spread", value=item['time_spread']))
        db.set_obsgroup_metadata(user_id="system", group_id=group.id, utc=timestamp,
                                 meta=mp.Meta(key="simulataneous:geographic_spread", value=item['geographic_spacing']))

    # Commit changes
    db.commit()
示例#12
0
def observing_loop():
    obstory_id = installation_info['observatoryId']

    logging.info("Observatory controller launched")

    # Fetch observatory status, e.g. location, etc
    logging.info("Fetching observatory status")
    latitude = known_observatories[obstory_id]['latitude']
    longitude = known_observatories[obstory_id]['longitude']
    altitude = 0
    latest_position_update = 0
    flag_gps = 0

    # Make sure that observatory exists in the database

    # Start main observing loop
    while True:
        # Get a new MySQL connection because old one may not be connected any longer
        db = obsarchive_db.ObservationDatabase(
            file_store_path=settings['dbFilestore'],
            db_host=installation_info['mysqlHost'],
            db_user=installation_info['mysqlUser'],
            db_password=installation_info['mysqlPassword'],
            db_name=installation_info['mysqlDatabase'],
            obstory_id=installation_info['observatoryId'])

        # Get a GPS fix on the current time and our location
        gps_fix = get_gps_fix()
        if gps_fix:
            latitude = gps_fix['latitude']
            longitude = gps_fix['longitude']
            altitude = gps_fix['altitude']
            flag_gps = 1

        # Decide whether we should observe, or do some day-time maintenance tasks
        logging.info("Observation controller considering what to do next.")

        time_now = time.time()

        # How far below the horizon do we require the Sun to be before we start observing?
        angle_below_horizon = settings['sunRequiredAngleBelowHorizon']

        sun_times_yesterday = sunset_times.sun_times(
            unix_time=time_now - 3600 * 24,
            longitude=longitude,
            latitude=latitude,
            angle_below_horizon=angle_below_horizon)
        sun_times_today = sunset_times.sun_times(
            unix_time=time_now,
            longitude=longitude,
            latitude=latitude,
            angle_below_horizon=angle_below_horizon)
        sun_times_tomorrow = sunset_times.sun_times(
            unix_time=time_now + 3600 * 24,
            longitude=longitude,
            latitude=latitude,
            angle_below_horizon=angle_below_horizon)

        logging.info("Sunrise at {}".format(
            dcf_ast.date_string(sun_times_yesterday[0])))
        logging.info("Sunset  at {}".format(
            dcf_ast.date_string(sun_times_yesterday[2])))
        logging.info("Sunrise at {}".format(
            dcf_ast.date_string(sun_times_today[0])))
        logging.info("Sunset  at {}".format(
            dcf_ast.date_string(sun_times_today[2])))
        logging.info("Sunrise at {}".format(
            dcf_ast.date_string(sun_times_tomorrow[0])))
        logging.info("Sunset  at {}".format(
            dcf_ast.date_string(sun_times_tomorrow[2])))

        sun_margin = settings['sunMargin']

        # Calculate whether it's currently night time, and how long until the next sunrise
        is_night_time = False
        seconds_till_sunrise = 0

        # Test whether it is night time is we are between yesterday's sunset and today's sunrise
        if (time_now > sun_times_yesterday[2] + sun_margin) and (
                time_now < sun_times_today[0] - sun_margin):
            logging.info("""
It is night time. We are between yesterday's sunset and today's sunrise.
""".strip())
            is_night_time = True
            seconds_till_sunrise = sun_times_today[0] - time_now

        # Test whether it is between yesterday's sunset and today's sunrise
        elif (time_now > sun_times_yesterday[2]) and (time_now <
                                                      sun_times_today[0]):
            next_observing_time = sun_times_yesterday[2] + sun_margin
            next_observing_wait = next_observing_time - time_now
            if next_observing_wait > 0:
                logging.info("""
We are between yesterday's sunset and today's sunrise, but sun has recently set. \
Waiting {:.0f} seconds (until {}) to start observing.
""".format(next_observing_wait,
                dcf_ast.date_string(next_observing_time)).strip())
                db.commit()
                db.close_db()
                del db
                time.sleep(next_observing_wait + 2)
                continue

        # Test whether it is night time, since we are between today's sunrise and tomorrow's sunset
        elif (time_now > sun_times_today[2] + sun_margin) and (
                time_now < sun_times_tomorrow[0] - sun_margin):
            logging.info("""
It is night time. We are between today's sunset and tomorrow's sunrise.
""".strip())
            is_night_time = True
            seconds_till_sunrise = sun_times_tomorrow[0] - time_now

        # Test whether we between today's sunset and tomorrow's sunrise
        elif (time_now > sun_times_today[2]) and (time_now <
                                                  sun_times_tomorrow[0]):
            next_observing_time = sun_times_today[2] + sun_margin
            next_observing_wait = next_observing_time - time_now
            if next_observing_time > 0:
                logging.info("""
We are between today's sunset and tomorrow's sunrise, but sun has recently set. \
Waiting {:.0f} seconds (until {}) to start observing.
""".format(next_observing_wait,
                dcf_ast.date_string(next_observing_time)).strip())
                db.commit()
                db.close_db()
                del db
                time.sleep(next_observing_wait + 2)
                continue

        # Calculate time until the next sunset
        seconds_till_sunset = sun_times_yesterday[2] - time_now
        if seconds_till_sunset < -sun_margin:
            seconds_till_sunset = sun_times_today[2] - time_now
        if seconds_till_sunset < -sun_margin:
            seconds_till_sunset = sun_times_tomorrow[2] - time_now

        # If sunset was well in the past, and sunrise is well in the future, we should observe!
        minimum_time_worth_observing = 600
        if is_night_time and (seconds_till_sunrise >
                              (sun_margin + minimum_time_worth_observing)):

            # Check that observatory exists
            check_observatory_exists(db_handle=db,
                                     obs_id=obstory_id,
                                     utc=time.time())

            # Fetch updated observatory status
            obstory_status = db.get_obstory_status(obstory_id=obstory_id)

            # If we've not stored a GPS fix in the database within the past six hours, do so now
            if flag_gps and (time.time() > latest_position_update + 6 * 3600):
                latest_position_update = time.time()
                db.register_obstory_metadata(
                    obstory_id=obstory_id,
                    key="latitude_gps",
                    value=latitude,
                    metadata_time=time.time(),
                    time_created=time.time(),
                    user_created=settings['pigazingUser'])
                db.register_obstory_metadata(
                    obstory_id=obstory_id,
                    key="longitude_gps",
                    value=longitude,
                    metadata_time=time.time(),
                    time_created=time.time(),
                    user_created=settings['pigazingUser'])
                db.register_obstory_metadata(
                    obstory_id=obstory_id,
                    key="altitude_gps",
                    value=altitude,
                    metadata_time=time.time(),
                    time_created=time.time(),
                    user_created=settings['pigazingUser'])

            # Create clipping region mask file
            mask_file = "/tmp/triggermask_%d.txt" % os.getpid()
            open(mask_file, "w").write("\n\n".join([
                "\n".join([("%d %d" % tuple(p)) for p in point_list])
                for point_list in json.loads(obstory_status["clipping_region"])
            ]))

            # Commit updates to the database
            db.commit()
            db.close_db()
            del db

            # Calculate how long to observe for
            observing_duration = seconds_till_sunrise - sun_margin

            # Do not record too much video in one file, as otherwise the file will be big
            if not settings['realTime']:
                observing_duration = min(observing_duration,
                                         settings['videoMaxRecordTime'])

            # Start observing run
            t_stop = time_now + observing_duration
            logging.info("""
Starting observing run until {} (running for {:.0f} seconds).
""".format(dcf_ast.date_string(t_stop), observing_duration).strip())

            # Flick the relay to turn the camera on
            relay_control.camera_on()
            time.sleep(5)
            logging.info("Camera has been turned on.")

            # Observe! We use different binaries depending whether we're using a webcam-like camera,
            # or a DSLR connected via gphoto2
            time_key = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S')

            # Work out which C binary we're using to do observing
            if settings['realTime']:
                output_argument = ""
                if obstory_status["camera_type"] == "gphoto2":
                    binary = "realtimeObserve_dslr"
                else:
                    binary = "realtimeObserve"
            else:
                output_argument = """ --output \"{}/raw_video/{}_{}\" """.format(
                    settings['dataPath'], time_key, obstory_id)
                if settings['i_am_a_rpi']:
                    binary = "recordH264_openmax"
                else:
                    binary = "recordH264_libav"

            binary_full_path = "{path}{debug}/{binary}".format(
                path=settings['binaryPath'],
                debug="/debug" if settings['debug'] else "",
                binary=binary)
            cmd = """
timeout {timeout} \
{binary} --utc-stop {utc_stop:.1f} \
         --obsid \"{obsid}\" \
         --device \"{device}\" \
         --fps {fps} \
         --width {width:d} \
         --height {height:d} \
         --mask \"{mask_file}\" \
         --latitude {latitude} \
         --longitude {longitude} \
         --flag-gps {flag_gps} \
         --flag-upside-down {upside_down} \
         {output_argument}
""".format(timeout=float(observing_duration + 300),
            binary=binary_full_path,
            utc_stop=float(t_stop),
            obsid=obstory_id,
            device=settings['videoDev'],
            width=int(obstory_status['camera_width']),
            height=int(obstory_status['camera_height']),
            fps=float(obstory_status['camera_fps']),
            mask_file=mask_file,
            latitude=float(latitude),
            longitude=float(longitude),
            flag_gps=int(flag_gps),
            upside_down=int(obstory_status['camera_upside_down']),
            output_argument=output_argument).strip()

            logging.info("Running command: {}".format(cmd))
            os.system(cmd)

            # Flick the relay to turn the camera off
            relay_control.camera_off()
            time.sleep(5)
            logging.info("Camera has been turned off.")

            # Snooze for up to 10 minutes; we may rerun observing tasks in a while if they ended prematurely
            if time.time() < t_stop:
                snooze_duration = float(min(t_stop - time.time(), 600))
                logging.info(
                    "Snoozing for {:.0f} seconds".format(snooze_duration))
                time.sleep(snooze_duration)

            continue

        # It is day time, so consider running day time tasks

        # First, commit updates to the database
        db.commit()
        db.close_db()
        del db

        # Estimate roughly when we're next going to be able to observe (i.e. shortly after sunset)
        next_observing_wait = seconds_till_sunset + sun_margin

        # If we've got more than an hour, it's worth doing some day time tasks
        # Do daytime tasks on a RPi only if we are doing real-time observation
        if (next_observing_wait > 3600) and (settings['realTime']
                                             or not settings['i_am_a_rpi']):
            t_stop = time_now + next_observing_wait
            logging.info("""
Starting daytime tasks until {} (running for {:.0f} seconds).
""".format(dcf_ast.date_string(t_stop), next_observing_wait).strip())
            os.system("cd {} ; ./daytimeTasks.py --stop-by {}".format(
                os.path.join(settings['pythonPath'], "observe"), t_stop))

            # Snooze for up to 30 minutes; we may rerun daytime tasks in a while if they ended prematurely
            if time.time() < t_stop:
                snooze_duration = float(min(t_stop - time.time(), 1800))
                logging.info(
                    "Snoozing for {:.0f} seconds".format(snooze_duration))
                time.sleep(snooze_duration)

        else:
            if next_observing_wait < 0:
                next_observing_wait = 0
            next_observing_wait += 30
            t_stop = time_now + next_observing_wait
            logging.info("""
Not time to start observing yet, so sleeping until {} ({:.0f} seconds away).
""".format(dcf_ast.date_string(t_stop), next_observing_wait).strip())
            time.sleep(next_observing_wait)

        # Little snooze to prevent spinning around the loop
        snooze_duration = float(10)
        logging.info("Snoozing for {:.0f} seconds".format(snooze_duration))
        time.sleep(snooze_duration)
def list_planes(obstory_id, utc_min, utc_max):
    """
    List all the plane identifications for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list identifications for.
    :param utc_min:
        The start of the time period in which we should list identifications (unix time).
    :param utc_max:
        The end of the time period in which we should list identifications (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of plane identifications
    plane_identifications = []

    # Select moving objects with plane identifications
    conn.execute(
        """
SELECT am1.stringValue AS call_sign, am2.floatValue AS ang_offset,
       am3.floatValue AS clock_offset, am4.floatValue AS duration, am5.stringValue AS hex_ident,
       am6.floatValue AS distance,
       am7.stringValue AS operator, am8.stringValue AS model, am9.stringValue AS manufacturer,
       o.obsTime AS time, o.publicId AS obsId
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:call_sign")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:angular_offset")
INNER JOIN archive_metadata am3 ON o.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:clock_offset")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:duration")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:hex_ident")
LEFT JOIN archive_metadata am6 ON o.uid = am6.observationId AND
    am6.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:distance")

LEFT JOIN archive_metadata am7 ON o.uid = am7.observationId AND
    am7.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:operator")
LEFT JOIN archive_metadata am8 ON o.uid = am8.observationId AND
    am8.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:model")
LEFT JOIN archive_metadata am9 ON o.uid = am9.observationId AND
    am9.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:manufacturer")


WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        plane_identifications.append({
            'id': item['obsId'],
            'time': item['time'],
            'call_sign': item['call_sign'],
            'ang_offset': item['ang_offset'],
            'clock_offset': item['clock_offset'],
            'duration': item['duration'],
            'hex_ident': item['hex_ident'],
            'distance': item['distance'],
            'operator': item['operator'],
            'model': item['model'],
            'manufacturer': item['manufacturer']
        })

    # Sort identifications by time
    plane_identifications.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:16s} {:18s} {:18s} {:8s} {:10s} {:10s} {:10s} {:30s} {:30s} {:30s}\
""".format("Time", "Call sign", "Hex ident", "Duration", "Ang offset",
           "Clock off", "Distance", "Operator", "Model", "Manufacturer"))

    # Display list of meteors
    for item in plane_identifications:
        print("""\
{:16s} {:18s} {:18s} {:8.1f} {:10.1f} {:10.1f} {:10.1f} {:30s} {:30s} {:30s}\
""".format(date_string(item['time']), item['call_sign'], item['hex_ident'],
           item['duration'], item['ang_offset'], item['clock_offset'],
           item['distance'], item['operator'], item['model'],
           item['manufacturer']))

    # Clean up and exit
    return
示例#14
0
def plane_determination(utc_min, utc_max, source):
    """
    Estimate the identity of aircraft observed between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should determine the identity of aircraft (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the identity of aircraft (unix time).
    :type utc_max:
        float
    :param source:
        The source we should use for plane trajectories. Either 'adsb' or 'fr24'.
    :type source:
        str
    :return:
        None
    """

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Starting aircraft identification.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'unsuccessful_fits': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Status update
    logging.info("Searching for aircraft within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for planes and satellites within this time period
    conn.execute(
        """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
INNER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s AND
      (am2.stringValue='Plane' OR am2.stringValue='Satellite' OR am2.stringValue='Junk')
ORDER BY ao.obsTime
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the identity of {:d} aircraft.".format(
        len(results)))

    # Analyse each aircraft in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if 'pigazing:path' not in all_metadata:
            logging.info(
                "Cannot process <{}> due to inadequate metadata.".format(
                    item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}]".format(
            date=date_string(utc=item['obsTime']), obs=item['observationId'])

        # Project path from (x,y) coordinates into (RA, Dec)
        projector = PathProjection(db=db,
                                   obstory_id=item['observatory'],
                                   time=item['obsTime'],
                                   logging_prefix=logging_prefix)

        path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list = projector.ra_dec_from_x_y(
            path_json=all_metadata['pigazing:path'],
            path_bezier_json=all_metadata['pigazing:pathBezier'],
            detections=all_metadata['pigazing:detectionCount'],
            duration=all_metadata['pigazing:duration'])

        # Check for error
        if projector.error is not None:
            if projector.error in outcomes:
                outcomes[projector.error] += 1
            continue

        # Check for notifications
        for notification in projector.notifications:
            if notification in outcomes:
                outcomes[notification] += 1

        # Check number of points in path
        path_len = len(path_x_y)

        # Look up list of aircraft tracks at the time of this sighting
        if source == 'adsb':
            aircraft_list = fetch_planes_from_adsb(utc=item['obsTime'])
        elif source == 'fr24':
            aircraft_list = fetch_planes_from_fr24(utc=item['obsTime'])
        else:
            raise ValueError("Unknown source <{}>".format(source))

        # List of aircraft this moving object might be
        candidate_aircraft = []

        # Check that we found a list of aircraft
        if aircraft_list is None:
            logging.info("{date} [{obs}] -- No aircraft records found.".format(
                date=date_string(utc=item['obsTime']),
                obs=item['observationId']))
            outcomes['insufficient_information'] += 1
            continue

        # Logging message about how many aircraft we're testing
        # logging.info("{date} [{obs}] -- Matching against {count:7d} aircraft.".format(
        #     date=date_string(utc=item['obsTime']),
        #     obs=item['observationId'],
        #     count=len(aircraft_list)
        # ))

        # Test for each candidate aircraft in turn
        for aircraft in aircraft_list:
            # Fetch aircraft position at each time point along trajectory
            ang_mismatch_list = []
            distance_list = []
            altitude_list = []

            def aircraft_angular_offset(index, clock_offset):
                # Fetch observed position of object at this time point
                pt_utc = sight_line_list[index]['utc']
                observatory_position = sight_line_list[index]['obs_position']
                observed_sight_line = sight_line_list[index]['line'].direction

                # Project position of this aircraft in space at this time point
                aircraft_position = path_interpolate(aircraft=aircraft,
                                                     utc=pt_utc + clock_offset)
                if aircraft_position is None:
                    return np.nan, np.nan, np.nan

                # Convert position to Cartesian coordinates
                aircraft_point = Point.from_lat_lng(
                    lat=aircraft_position['lat'],
                    lng=aircraft_position['lon'],
                    alt=aircraft_position['altitude'] * feet,
                    utc=None)

                # Work out offset of plane's position from observed moving object
                aircraft_sight_line = aircraft_point.to_vector(
                ) - observatory_position.to_vector()
                angular_offset = aircraft_sight_line.angle_with(
                    other=observed_sight_line)  # degrees
                distance = abs(aircraft_sight_line)
                altitude = aircraft_position['altitude'] * feet

                return angular_offset, distance, altitude

            def time_offset_objective(p):
                """
                Objective function that we minimise in order to find the best fit clock offset between the observed
                and model paths.

                :param p:
                    Vector with a single component: the clock offset
                :return:
                    Metric to minimise
                """

                # Turn input parameters into a time offset
                clock_offset = p[0]

                # Look up angular offset
                ang_mismatch, distance, altitude = aircraft_angular_offset(
                    index=0, clock_offset=clock_offset)

                # Return metric to minimise
                return ang_mismatch * exp(clock_offset / 8)

            # Work out the optimum time offset between the plane's path and the observed path
            # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/>
            # for more information about how this works
            parameters_initial = [0]
            parameters_optimised = scipy.optimize.minimize(
                time_offset_objective,
                np.asarray(parameters_initial),
                options={
                    'disp': False,
                    'maxiter': 100
                }).x

            # Construct best-fit linear trajectory for best-fitting parameters
            clock_offset = float(parameters_optimised[0])

            # Check clock offset is reasonable
            if abs(clock_offset) > global_settings['max_clock_offset']:
                continue

            # Measure the offset between the plane's position and the observed position at each time point
            for index in range(path_len):
                # Look up angular mismatch at this time point
                ang_mismatch, distance, altitude = aircraft_angular_offset(
                    index=index, clock_offset=clock_offset)

                # Keep list of the offsets at each recorded time point along the trajectory
                ang_mismatch_list.append(ang_mismatch)
                distance_list.append(distance)
                altitude_list.append(altitude)

            # Consider adding this plane to list of candidates
            mean_ang_mismatch = np.mean(
                np.asarray(ang_mismatch_list))  # degrees
            distance_mean = np.mean(np.asarray(distance_list))  # metres
            altitude_mean = np.mean(np.asarray(altitude_list))  # metres

            if mean_ang_mismatch < global_settings['max_mean_angular_mismatch']:
                start_time = sight_line_list[0]['utc']
                end_time = sight_line_list[-1]['utc']
                start_point = path_interpolate(aircraft=aircraft,
                                               utc=start_time + clock_offset)
                end_point = path_interpolate(aircraft=aircraft,
                                             utc=end_time + clock_offset)
                candidate_aircraft.append({
                    'call_sign': aircraft['call_sign'],  # string
                    'hex_ident': aircraft['hex_ident'],  # string
                    'distance': distance_mean / 1e3,  # km
                    'altitude': altitude_mean / 1e3,  # km
                    'clock_offset': clock_offset,  # seconds
                    'offset': mean_ang_mismatch,  # degrees
                    'start_point': start_point,
                    'end_point': end_point
                })

        # Add model possibility for null aircraft
        if len(candidate_aircraft) == 0:
            candidate_aircraft.append({
                'call_sign': "Unidentified",
                'hex_ident': "Unidentified",
                'distance': 0,
                'altitude': 0,
                'clock_offset': 0,
                'offset': 0,
                'start_point': None,
                'end_point': None
            })

        # Sort candidates by score
        for candidate in candidate_aircraft:
            candidate['score'] = hypot(
                candidate['offset'],
                candidate['clock_offset'],
            )
        candidate_aircraft.sort(key=itemgetter('score'))

        # Report possible satellite identifications
        logging.info("{prefix} -- {aircraft}".format(
            prefix=logging_prefix,
            aircraft=", ".join([
                "{} ({:.1f} deg offset; clock offset {:.1f} sec; distance {:.1f} km)"
                .format(aircraft['call_sign'], aircraft['offset'],
                        aircraft['clock_offset'], aircraft['distance'])
                for aircraft in candidate_aircraft
            ])))

        # Identify most likely aircraft
        most_likely_aircraft = candidate_aircraft[0]

        # Fetch extra information about plane
        plane_info = fetch_aircraft_data(
            hex_ident=most_likely_aircraft['hex_ident'])

        # Store aircraft identification
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:call_sign",
                         value=most_likely_aircraft['call_sign']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:hex_ident",
                         value=most_likely_aircraft['hex_ident']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:clock_offset",
                         value=most_likely_aircraft['clock_offset']))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(
                                        key="plane:angular_offset",
                                        value=most_likely_aircraft['offset']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:distance",
                         value=most_likely_aircraft['distance']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:mean_altitude",
                         value=most_likely_aircraft['altitude']))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:path",
                         value=json.dumps([
                             most_likely_aircraft['start_point'],
                             most_likely_aircraft['end_point']
                         ])))
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="plane:path_length",
                         value=ang_dist(ra0=path_ra_dec_at_epoch[0][0],
                                        dec0=path_ra_dec_at_epoch[0][1],
                                        ra1=path_ra_dec_at_epoch[-1][0],
                                        dec1=path_ra_dec_at_epoch[-1][1]) *
                         180 / pi))

        aircraft_operator = ""
        if 'operator' in plane_info and plane_info['operator']:
            aircraft_operator = plane_info['operator']
        elif 'owner' in plane_info and plane_info['owner']:
            aircraft_operator = plane_info['owner']

        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="plane:operator",
                                                 value=aircraft_operator))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="plane:model",
                                                 value=plane_info.get(
                                                     'model', '')))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="plane:manufacturer",
                                                 value=plane_info.get(
                                                     'manufacturername', '')))

        # Aircraft successfully identified
        if most_likely_aircraft['call_sign'] == "Unidentified":
            outcomes['unsuccessful_fits'] += 1
        else:
            outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} aircraft successfully identified.".format(
        outcomes['successful_fits']))
    logging.info("{:d} aircraft not identified.".format(
        outcomes['unsuccessful_fits']))
    logging.info("{:d} malformed database records.".format(
        outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(
        outcomes['rescued_records']))
    logging.info("{:d} aircraft with incomplete data.".format(
        outcomes['insufficient_information']))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
def list_satellites(obstory_id, utc_min, utc_max):
    """
    List all the satellite identifications for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list identifications for.
    :param utc_min:
        The start of the time period in which we should list identifications (unix time).
    :param utc_max:
        The end of the time period in which we should list identifications (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of satellite identifications
    satellite_identifications = []

    # Select moving objects with satellite identifications
    conn.execute(
        """
SELECT am1.stringValue AS satellite_name, am2.floatValue AS ang_offset,
       am3.floatValue AS clock_offset, am4.floatValue AS duration, am5.floatValue AS norad_id,
       o.obsTime AS time, o.publicId AS obsId
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:name")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:angular_offset")
INNER JOIN archive_metadata am3 ON o.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:clock_offset")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:duration")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:norad_id")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        satellite_identifications.append({
            'id':
            item['obsId'],
            'time':
            item['time'],
            'satellite_name':
            item['satellite_name'],
            'ang_offset':
            item['ang_offset'],
            'clock_offset':
            item['clock_offset'],
            'duration':
            item['duration'],
            'norad_id':
            int(item['norad_id'])
        })

    # Sort identifications by time
    satellite_identifications.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:16s} {:7s} {:32s} {:26s} {:8s} {:10s} {:10s}\
""".format("Time", "NORAD", "ID", "Satellite", "Duration", "Ang offset",
           "Clock offset"))

    # Display list of meteors
    for item in satellite_identifications:
        print("""\
{:16s} {:7d} {:32s} {:26s} {:8.1f} {:10.1f} {:10.1f}\
""".format(
            date_string(item['time']),
            item['norad_id'],
            item['id'],
            item['satellite_name'],
            item['duration'],
            item['ang_offset'],
            item['clock_offset'],
        ))

    # Clean up and exit
    return
示例#16
0
def timelapse_movie(utc_min, utc_max, obstory, img_types, stride, label):
    """
    Make a time lapse video of images registered in the database using the command line tool ffmpeg.

    :param utc_min:
        Only return observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only return observations made before the specified time stamp.

    :type utc_max:
        float

    :param obstory:
        The public id of the observatory we are to fetch observations from

    :type obstory:
        str

    :param img_types:
        Only return images with these semantic types

    :type img_types:
        list[str]

    :param stride:
        Only return every nth observation matching the search criteria

    :type stride:
        int

    :return:
        None
    """

    # Temporary directory to hold the images we are going to show
    pid = os.getpid()
    tmp = os.path.join("/tmp", "dcf_movie_images_{:d}".format(pid))
    os.system("mkdir -p {}".format(tmp))

    file_list = fetch_images(utc_min=utc_min,
                             utc_max=utc_max,
                             obstory=obstory,
                             img_types=img_types,
                             stride=stride)

    # Report how many files we found
    print("Observatory <{}>".format(obstory))
    print("  * {:d} matching files in time range {} --> {}".format(len(file_list),
                                                                   dcf_ast.date_string(utc_min),
                                                                   dcf_ast.date_string(utc_max)))

    # Make list of the stitched files
    filename_list = []
    filename_format = "frame_{:d}_%08d.jpg".format(pid)

    for counter, file_item in enumerate(file_list):
        # Look up the date of this file
        [year, month, day, h, m, s] = dcf_ast.inv_julian_day(dcf_ast.jd_from_unix(
            utc=file_item['observation']['obsTime']
        ))

        # Filename for stitched image
        fn = filename_format % counter

        # Make list of input files
        input_files = [os.path.join(settings['dbFilestore'],
                                    file_item[semanticType]['repositoryFname'])
                       for semanticType in img_types]

        command = "\
convert {inputs} +append -gravity SouthWest -fill Red -pointsize 26 -font Ubuntu-Bold \
-annotate +16+10 '{date}  -  {label1}  -  {label2}' {output} \
".format(inputs=" ".join(input_files),
         date="{:02d}/{:02d}/{:04d} {:02d}:{:02d}".format(day, month, year, h, m),
         label1="Sky clarity: {}".format(" / ".join(["{:04.0f}".format(file_item[semanticType]['skyClarity'])
                                                    for semanticType in img_types])),
         label2=label,
         output=os.path.join(tmp, fn))
        # print(command)
        os.system(command)
        filename_list.append(fn)

    command_line = "cd {} ; ffmpeg -r 10 -i {} -codec:v libx264 {}".format(tmp , filename_format, "timelapse.mp4")
    print(command_line)
    os.system(command_line)
示例#17
0
def list_meteors(obstory_id, utc_min, utc_max):
    """
    List all the meteor identifications for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list meteor identifications for.
    :param utc_min:
        The start of the time period in which we should list meteor identifications (unix time).
    :param utc_max:
        The end of the time period in which we should list meteor identifications (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of meteor identifications
    meteor_identifications = []

    # Count how many meteors we find in each shower
    meteor_count_by_shower = {}

    # Select observations with orientation fits
    conn.execute("""
SELECT am1.stringValue AS name, am2.floatValue AS radiant_offset,
       o.obsTime AS time, o.publicId AS obsId
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="shower:name")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="shower:radiant_offset")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        meteor_identifications.append({
            'id': item['obsId'],
            'time': item['time'],
            'shower': item['name'],
            'offset': item['radiant_offset']
        })

        # Update tally of meteors
        if item['name'] not in meteor_count_by_shower:
            meteor_count_by_shower[item['name']] = 0
        meteor_count_by_shower[item['name']] += 1

    # Sort meteors by time
    meteor_identifications.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:16s} {:20s} {:20s} {:5s}\
""".format("Time", "ID", "Shower", "Offset"))

    # Display list of meteors
    for item in meteor_identifications:
        print("""\
{:16s} {:20s} {:26s} {:5.1f}\
""".format(date_string(item['time']),
           item['id'],
           item['shower'],
           item['offset']
           ))

    # Report tally of meteors
    logging.info("Tally of meteors by shower:")
    for shower in sorted(meteor_count_by_shower.keys()):
        logging.info("    * {:26s}: {:6d}".format(shower, meteor_count_by_shower[shower]))

    # Clean up and exit
    return
示例#18
0
def shower_determination(utc_min, utc_max):
    """
    Estimate the parent showers of all meteors observed between the unix times <utc_min> and <utc_max>.

    :param utc_min:
        The start of the time period in which we should determine the parent showers of meteors (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the parent showers of meteors (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Load list of meteor showers
    shower_list = read_shower_list()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    logging.info("Starting meteor shower identification.")

    # Count how many images we manage to successfully fit
    outcomes = {
        'successful_fits': 0,
        'error_records': 0,
        'rescued_records': 0,
        'insufficient_information': 0
    }

    # Status update
    logging.info("Searching for meteors within period {} to {}".format(date_string(utc_min), date_string(utc_max)))

    # Open direct connection to database
    conn = db.con

    # Search for meteors within this time period
    conn.execute("""
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, l.publicId AS observatory
FROM archive_observations ao
LEFT OUTER JOIN archive_files f ON (ao.uid = f.observationId AND
    f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:movingObject/video"))
INNER JOIN archive_observatories l ON ao.observatory = l.uid
INNER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE ao.obsType=(SELECT uid FROM archive_semanticTypes WHERE name='pigazing:movingObject/') AND
      ao.obsTime BETWEEN %s AND %s AND
      am2.stringValue = "Meteor"
ORDER BY ao.obsTime;
""", (utc_min, utc_max))
    results = conn.fetchall()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the parent showers of {:d} meteors.".format(len(results)))

    # Count how many meteors we find in each shower
    meteor_count_by_shower = {}

    # Analyse each meteor in turn
    for item_index, item in enumerate(results):
        # Fetch metadata about this object, some of which might be on the file, and some on the observation
        obs_obj = db.get_observation(observation_id=item['observationId'])
        obs_metadata = {item.key: item.value for item in obs_obj.meta}
        if item['repositoryFname']:
            file_obj = db.get_file(repository_fname=item['repositoryFname'])
            file_metadata = {item.key: item.value for item in file_obj.meta}
        else:
            file_metadata = {}
        all_metadata = {**obs_metadata, **file_metadata}

        # Check we have all required metadata
        if 'pigazing:path' not in all_metadata:
            logging.info("Cannot process <{}> due to inadequate metadata.".format(item['observationId']))
            continue

        # Make ID string to prefix to all logging messages about this event
        logging_prefix = "{date} [{obs}]".format(
            date=date_string(utc=item['obsTime']),
            obs=item['observationId']
        )

        # Project path from (x,y) coordinates into (RA, Dec)
        projector = PathProjection(
            db=db,
            obstory_id=item['observatory'],
            time=item['obsTime'],
            logging_prefix=logging_prefix
        )

        path_x_y, path_ra_dec_at_epoch, path_alt_az, sight_line_list_this = projector.ra_dec_from_x_y(
            path_json=all_metadata['pigazing:path'],
            path_bezier_json=all_metadata['pigazing:pathBezier'],
            detections=all_metadata['pigazing:detectionCount'],
            duration=all_metadata['pigazing:duration']
        )

        # Check for error
        if projector.error is not None:
            if projector.error in outcomes:
                outcomes[projector.error] += 1
            continue

        # Check for notifications
        for notification in projector.notifications:
            if notification in outcomes:
                outcomes[notification] += 1

        # Check number of points in path
        path_len = len(path_x_y)

        # List of candidate showers this meteor might belong to
        candidate_showers = []

        # Test for each candidate meteor shower in turn
        for shower in shower_list:
            # Work out celestial coordinates of shower radiant in RA/Dec in hours/degs of epoch
            radiant_ra_at_epoch, radiant_dec_at_epoch = ra_dec_from_j2000(ra0=shower['RA'],
                                                                          dec0=shower['Decl'],
                                                                          utc_new=item['obsTime'])

            # Work out alt-az of the shower's radiant using known location of camera. Fits returned in degrees.
            alt_az_pos = alt_az(ra=radiant_ra_at_epoch, dec=radiant_dec_at_epoch,
                                utc=item['obsTime'],
                                latitude=projector.obstory_info['latitude'],
                                longitude=projector.obstory_info['longitude'])

            # Work out position of the Sun (J2000)
            sun_ra_j2000, sun_dec_j2000 = sun_pos(utc=item['obsTime'])

            # Work out position of the Sun (RA, Dec of epoch)
            sun_ra_at_epoch, sun_dec_at_epoch = ra_dec_from_j2000(ra0=sun_ra_j2000, dec0=sun_dec_j2000,
                                                                  utc_new=item['obsTime'])

            # Offset from peak of shower
            year = 365.2524
            peak_offset = (sun_ra_at_epoch * 180 / 12. - shower['peak']) * year / 360  # days
            while peak_offset < -year / 2:
                peak_offset += year
            while peak_offset > year / 2:
                peak_offset -= year

            start_offset = peak_offset + shower['start'] - 4
            end_offset = peak_offset + shower['end'] + 4

            # Estimate ZHR of shower at the time the meteor was observed
            zhr = 0
            if abs(peak_offset) < 2:
                zhr = shower['zhr']  # Shower is within 2 days of maximum; use quoted peak ZHR value
            if start_offset < 0 < end_offset:
                zhr = max(zhr, 5)  # Shower is not at peak, but is active; assume ZHR=5

            # Correct hourly rate for the altitude of the shower radiant
            hourly_rate = zhr * sin(alt_az_pos[0] * pi / 180)

            # If hourly rate is zero, this shower is not active
            if hourly_rate <= 0:
                # logging.info("Meteor shower <{}> has zero rate".format(shower['name']))
                continue

            # Work out angular distance of meteor from radiant (radians)
            path_radiant_sep = [ang_dist(ra0=pt[0], dec0=pt[1],
                                         ra1=radiant_ra_at_epoch * pi / 12, dec1=radiant_dec_at_epoch * pi / 180)
                                for pt in path_ra_dec_at_epoch]
            change_in_radiant_dist = path_radiant_sep[-1] - path_radiant_sep[0]  # radians

            # Reject meteors that travel *towards* the radiant
            if change_in_radiant_dist < 0:
                continue

            # Convert path to Cartesian coordinates on a unit sphere
            path_cartesian = [Vector.from_ra_dec(ra=ra * 12 / pi, dec=dec * 180 / pi)
                              for ra, dec in path_ra_dec_at_epoch]

            # Work out cross product of first and last point, which is normal to path of meteors
            first = path_cartesian[0]
            last = path_cartesian[-1]
            path_normal = first.cross_product(last)

            # Work out angle of path normal to meteor shower radiant
            radiant_cartesian = Vector.from_ra_dec(ra=radiant_ra_at_epoch, dec=radiant_dec_at_epoch)
            theta = path_normal.angle_with(radiant_cartesian)  # degrees

            if theta > 90:
                theta = 180 - theta

            # What is the angular separation of the meteor's path's closest approach to the shower radiant?
            radiant_angle = 90 - theta

            # Work out likelihood metric that this meteor belongs to this shower
            radiant_angle_std_dev = 2  # Allow 2 degree mismatch in radiant pos
            likelihood = hourly_rate * scipy.stats.norm(loc=0, scale=radiant_angle_std_dev).pdf(radiant_angle)

            # Store information about the likelihood this meteor belongs to this shower
            candidate_showers.append({
                'name': shower['name'],
                'likelihood': likelihood,
                'offset': radiant_angle,
                'change_radiant_dist': change_in_radiant_dist,
                'shower_rate': hourly_rate
            })

        # Add model possibility for sporadic meteor
        hourly_rate = 5
        likelihood = hourly_rate * (1. / 90.)  # Mean value of Gaussian in range 0-90 degs
        candidate_showers.append({
            'name': "Sporadic",
            'likelihood': likelihood,
            'offset': 0,
            'shower_rate': hourly_rate
        })

        # Renormalise likelihoods to sum to unity
        sum_likelihood = sum(shower['likelihood'] for shower in candidate_showers)
        for shower in candidate_showers:
            shower['likelihood'] *= 100 / sum_likelihood

        # Sort candidates by likelihood
        candidate_showers.sort(key=itemgetter('likelihood'), reverse=True)

        # Report possible meteor shower identifications
        logging.info("{date} [{obs}] -- {showers}".format(
            date=date_string(utc=item['obsTime']),
            obs=item['observationId'],
            showers=", ".join([
                "{} {:.1f}% ({:.1f} deg offset)".format(shower['name'], shower['likelihood'], shower['offset'])
                for shower in candidate_showers
            ])
        ))

        # Identify most likely shower
        most_likely_shower = candidate_showers[0]['name']

        # Update tally of meteors
        if most_likely_shower not in meteor_count_by_shower:
            meteor_count_by_shower[most_likely_shower] = 0
        meteor_count_by_shower[most_likely_shower] += 1

        # Store meteor identification
        user = settings['pigazingUser']
        timestamp = time.time()
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:name", value=most_likely_shower))
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:radiant_offset", value=candidate_showers[0]['offset']))
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:path_length",
                                                 value=ang_dist(ra0=path_ra_dec_at_epoch[0][0],
                                                                dec0=path_ra_dec_at_epoch[0][1],
                                                                ra1=path_ra_dec_at_epoch[-1][0],
                                                                dec1=path_ra_dec_at_epoch[-1][1]
                                                                ) * 180 / pi
                                                 ))
        db.set_observation_metadata(user_id=user, observation_id=item['observationId'], utc=timestamp,
                                    meta=mp.Meta(key="shower:path_ra_dec",
                                                 value="[[{:.3f},{:.3f}],[{:.3f},{:.3f}],[{:.3f},{:.3f}]]".format(
                                                     path_ra_dec_at_epoch[0][0] * 12 / pi,
                                                     path_ra_dec_at_epoch[0][1] * 180 / pi,
                                                     path_ra_dec_at_epoch[int(path_len / 2)][0] * 12 / pi,
                                                     path_ra_dec_at_epoch[int(path_len / 2)][1] * 180 / pi,
                                                     path_ra_dec_at_epoch[-1][0] * 12 / pi,
                                                     path_ra_dec_at_epoch[-1][1] * 180 / pi,
                                                 )
                                                 ))

        # Meteor successfully identified
        outcomes['successful_fits'] += 1

        # Update database
        db.commit()

    # Report how many fits we achieved
    logging.info("{:d} meteors successfully identified.".format(outcomes['successful_fits']))
    logging.info("{:d} malformed database records.".format(outcomes['error_records']))
    logging.info("{:d} rescued database records.".format(outcomes['rescued_records']))
    logging.info("{:d} meteors with incomplete data.".format(outcomes['insufficient_information']))

    # Report tally of meteors
    logging.info("Tally of meteors by shower:")
    for shower in sorted(meteor_count_by_shower.keys()):
        logging.info("    * {:32s}: {:6d}".format(shower, meteor_count_by_shower[shower]))

    # Clean up and exit
    db.commit()
    db.close_db()
    return
示例#19
0
def list_observatory_status(utc_min, utc_max, obstory):
    """
    List all the metadata updates posted by a particular observatory between two given unix times.

    :param utc_min:
        Only list metadata updates after the specified unix time
    :param utc_max:
        Only list metadata updates before the specified unix time
    :param obstory:
        ID of the observatory we are to list events from
    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    try:
        obstory_info = db.get_obstory_from_id(obstory_id=obstory)
    except ValueError:
        print("Unknown observatory <{}>. Run ./listObservatories.py to see a list of available observatories.".
              format(obstory))
        sys.exit(0)

    title = "Observatory <{}>".format(obstory)
    print("\n\n{}\n{}".format(title, "-" * len(title)))

    search = mp.ObservatoryMetadataSearch(obstory_ids=[obstory], time_min=utc_min, time_max=utc_max)
    data = db.search_obstory_metadata(search)
    data = data['items']
    data.sort(key=lambda x: x.time)
    print("  * {:d} matching metadata items in time range {} --> {}".format(len(data),
                                                                            dcf_ast.date_string(utc_min),
                                                                            dcf_ast.date_string(utc_max)))

    # Check which items remain current
    refreshed = False
    data.reverse()
    keys_seen = []
    for item in data:
        # The magic metadata keyword "refresh" causes all older metadata to be superseded
        if item.key == "refresh" and not refreshed:
            item.still_current = True
            refreshed = True
        # If we don't have a later metadata update for the same keyword, then this metadata remains current
        elif item.key not in keys_seen and not refreshed:
            item.still_current = True
            keys_seen.append(item.key)
        # This metadata item has been superseded
        else:
            item.still_current = False
    data.reverse()

    # Display list of items
    for item in data:
        if item.still_current:
            current_flag = "+"
        else:
            current_flag = " "
        print("  * {} [ID {}] {} -- {:16s} = {}".format(current_flag, item.id, dcf_ast.date_string(item.time),
                                                        item.key, item.value))