コード例 #1
0
def flush_triangulation(utc_min, utc_max):
    """
    Remove all pre-existing triangulation metadata.

    :param utc_min:
        The earliest time for which we are to flush observation groups.
    :param utc_max:
        The latest time for which we are to flush observation groups.
    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Delete group metadata fields that start 'triangulation:*'
    conn.execute(
        """
DELETE m
FROM archive_metadata m
INNER JOIN archive_obs_groups o ON m.groupId = o.uid
WHERE
    fieldId IN (SELECT uid FROM archive_metadataFields WHERE metaKey LIKE 'triangulation:%%') AND
    o.time BETWEEN %s AND %s;
""", (utc_min, utc_max))

    # Commit changes to database
    db0.commit()
    conn.close()
    db0.close()
コード例 #2
0
def flush_identifications(utc_min, utc_max):
    """
    Remove all meteor identifications within a specified time period.

    :param utc_min:
        The earliest time for which we are to flush shower data.
    :param utc_max:
        The latest time for which we are to flush shower data.
    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Delete observation metadata fields that start 'shower:*'
    conn.execute("""
DELETE m
FROM archive_metadata m
INNER JOIN archive_observations o ON m.observationId = o.uid
WHERE
    fieldId IN (SELECT uid FROM archive_metadataFields WHERE metaKey LIKE 'shower:%%') AND
    o.obsTime BETWEEN %s AND %s;
""", (utc_min, utc_max))

    # Commit changes to database
    db0.commit()
    conn.close()
    db0.close()
コード例 #3
0
def flush_calibration(obstory_id, utc_min, utc_max):
    """
    Remove all calibration data for a particular observatory within a specified time period.

    :param obstory_id:
        The publicId of the observatory we are to flush.
    :param utc_min:
        The earliest time for which we are to flush calibration data.
    :param utc_max:
        The latest time for which we are to flush calibration data.
    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Delete observatory metadata fields that start 'calibration:*'
    conn.execute(
        """
DELETE m
FROM archive_metadata m
WHERE
    fieldId IN (SELECT uid FROM archive_metadataFields WHERE metaKey LIKE 'calibration:%%') AND
    m.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    m.time BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))

    # Delete observation metadata fields that start 'calibration:*'
    conn.execute(
        """
DELETE m
FROM archive_metadata m
INNER JOIN archive_observations o ON m.observationId = o.uid
WHERE
    fieldId IN (SELECT uid FROM archive_metadataFields WHERE metaKey LIKE 'calibration:%%') AND
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))

    # Clear astrometryProcessed fields
    conn.execute(
        """
UPDATE archive_observations
SET astrometryProcessed=NULL, astrometryProcessingTime=NULL, astrometrySource=NULL,
    fieldWidth=NULL, fieldHeight=NULL, positionAngle=NULL, centralConstellation=NULL,
    skyArea=ST_GEOMFROMTEXT(%s), position=POINT(-999,-999)
WHERE observatory = (SELECT x.uid FROM archive_observatories x WHERE x.publicId=%s) AND
      obsTime BETWEEN %s AND %s;
""", (obsarchive_sky_area.null_polygon, obstory_id, utc_min, utc_max))

    # Commit changes to database
    db0.commit()
    conn.close()
    db0.close()
コード例 #4
0
ファイル: editObservatory.py プロジェクト: dcf21/pi-gazing
def edit_observatory(public_id, delete=False, name=None, latitude=None, longitude=None, owner=None):
    """
    Edit an observatory record, or create a new one.

    :param public_id:
        The public id of the observatory we are to edit
    :param delete:
        Boolean flag indicating whether we are to delete the observatory altogether
    :param name:
        The new name of the observatory
    :param latitude:
        The new latitude of the observatory
    :param longitude:
        The new longitude of the observatory
    :param owner:
        The user responsible for this observatory
    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Fetch observatory ID
    obs_id = None
    while True:
        conn.execute('SELECT uid FROM archive_observatories WHERE publicId=%s;', (public_id,))
        results = conn.fetchall()
        if len(results) > 0:
            obs_id = results[0]['uid']
            break
        conn.execute('INSERT INTO archive_observatories (publicId, location) VALUES (%s, POINT(-999,-999));',
                     (public_id,))

    if delete:
        conn.execute('DELETE FROM archive_observatories WHERE uid=%s;', (obs_id,))
        db0.commit()
        db0.close()
        return

    if name:
        conn.execute('UPDATE archive_observatories SET name=%s WHERE uid=%s;', (name, obs_id))

    if latitude and longitude:
        conn.execute('UPDATE archive_observatories SET location=POINT(%s,%s) WHERE uid=%s;',
                     (longitude, latitude, obs_id))
        
    if owner:
        conn.execute('UPDATE archive_observatories SET userId=%s WHERE uid=%s;', (owner, obs_id))

    # Commit changes to database
    db0.commit()
    db0.close()
コード例 #5
0
def flush_orientation(obstory_id, utc_min, utc_max):
    """
    Remove all orientation data for a particular observatory within a specified time period.

    :param obstory_id:
        The publicId of the observatory we are to flush.
    :param utc_min:
        The earliest time for which we are to flush orientation data.
    :param utc_max:
        The latest time for which we are to flush orientation data.
    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Delete observatory metadata fields that start 'orientation:*'
    conn.execute(
        """
DELETE m
FROM archive_metadata m
WHERE
    fieldId IN (SELECT uid FROM archive_metadataFields WHERE metaKey LIKE 'orientation:%%') AND
    m.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    m.time BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))

    # Delete observation metadata field 'orientation:fit_quality_to_daily'
    conn.execute(
        """
DELETE m
FROM archive_metadata m
INNER JOIN archive_observations o ON m.observationId = o.uid
WHERE
    fieldId IN (SELECT uid FROM archive_metadataFields WHERE metaKey LIKE 'orientation:fit_quality_to_daily') AND
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))

    # Commit changes to database
    db0.commit()
    conn.close()
    db0.close()
コード例 #6
0
def list_observatories():
    """
    Display a list of all the observatories registered in the database.

    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Fetch observatory IDs
    obstory_ids = db.get_obstory_ids()
    obstory_ids.sort()

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # List information about each observatory
    print("{:6s} {:32s} {:32s} {:6s} {:6s} {:s}".format(
        "ObsID", "Public ID", "Name", "Lat", "Lng", "Observations"))
    for item in obstory_ids:
        obstory_info = db.get_obstory_from_id(obstory_id=item)

        # Count observations
        conn.execute(
            'SELECT COUNT(*) FROM archive_observations WHERE observatory=%s;',
            (obstory_info['uid'], ))
        results = conn.fetchall()
        obs_count = results[0]["COUNT(*)"]

        print("{:6d} {:32s} {:32s} {:6.1f} {:6.1f} {:7d}".format(
            obstory_info['uid'], obstory_info['publicId'],
            obstory_info['name'], obstory_info['latitude'],
            obstory_info['longitude'], obs_count))
コード例 #7
0
def list_users():
    """
    Display a list of all user accounts in the database.

    :return:
        None
    """
    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    # Fetch list of users
    user_ids = db.get_users()
    user_ids.sort(key=operator.attrgetter('user_id'))

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # List information about each user in turn
    print("{:32s} {:32s} {:48s} {:s}".format("Username", "Name", "Roles",
                                             "Observations"))
    for user_info in user_ids:
        # Count observations
        conn.execute(
            'SELECT COUNT(*) FROM archive_observations WHERE userId=%s;',
            (user_info.user_id, ))
        results = conn.fetchall()
        obs_count = results[0]["COUNT(*)"]

        # Print user information
        print("{:32s} {:32s} {:48s} {:9d}".format(user_info.user_id,
                                                  user_info.name,
                                                  str(user_info.roles),
                                                  obs_count))
コード例 #8
0
ファイル: deleteData.py プロジェクト: dcf21/pi-gazing
def delete_data(utc_min, utc_max, obstory, dry_run):
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Search for observations
    conn.execute(
        """
SELECT o.publicId
FROM archive_observations o
INNER JOIN archive_observatories ao on o.observatory = ao.uid
WHERE (o.obsTime BETWEEN %s AND %s) AND ao.publicId=%s;
""", (utc_min, utc_max, obstory))
    results_observations = conn.fetchall()

    # Delete each observation in turn
    for observation in results_observations:
        command = """
./deleteObservation.py --id {}
""".format(observation['publicId']).strip()

        logging.info(command)

        if not dry_run:
            os.system(command)
コード例 #9
0
def list_simultaneous_detections(utc_min=None, utc_max=None):
    """
    Display a list of all the simultaneous object detections registered in the database.

    :param utc_min:
        Only show observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only show observations made before the specified time stamp.

    :type utc_max:
        float

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Compile search criteria for observation groups
    where = [
        "g.semanticType = (SELECT uid FROM archive_semanticTypes WHERE name=\"{}\")"
        .format(simultaneous_event_type)
    ]
    args = []

    if utc_min is not None:
        where.append("o.obsTime>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("o.obsTime<=%s")
        args.append(utc_max)

    # Search for observation groups containing groups of simultaneous detections
    conn.execute(
        """
SELECT g.publicId AS groupId, o.publicId AS obsId, o.obsTime, am.stringValue AS objectType
FROM archive_obs_groups g
INNER JOIN archive_obs_group_members m on g.uid = m.groupId
INNER JOIN archive_observations o ON m.childObservation = o.uid
INNER JOIN archive_metadata am ON g.uid = am.groupId AND
    am.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
WHERE """ + " AND ".join(where) + """
ORDER BY o.obsTime;
""", args)
    results = conn.fetchall()

    # Count how many simultaneous detections we find by type
    detections_by_type = {}

    # Compile list of groups
    obs_groups = {}
    obs_group_ids = []
    for item in results:
        key = item['groupId']
        if key not in obs_groups:
            obs_groups[key] = []
            obs_group_ids.append({
                'groupId': key,
                'time': item['obsTime'],
                'type': item['objectType']
            })

            # Add this simultaneous detection to tally
            if item['objectType'] not in detections_by_type:
                detections_by_type[item['objectType']] = 0
            detections_by_type[item['objectType']] += 1
        obs_groups[key].append(item['obsId'])

    # List information about each observation in turn
    print("{:16s} {:20s} {:20s} {:s}".format("Time", "groupId", "Object type",
                                             "Observations"))
    for group_info in obs_group_ids:
        # Print group information
        print("{:16s} {:20s} {:20s} {:s}".format(
            group_info['groupId'], date_string(group_info['time']),
            group_info['type'], " ".join(obs_groups[group_info['groupId']])))

    # Report tally of events
    print("\nTally of events by type:")
    for event_type in sorted(detections_by_type.keys()):
        print("    * {:26s}: {:6d}".format(event_type,
                                           detections_by_type[event_type]))
コード例 #10
0
def list_images(utc_min=None,
                utc_max=None,
                username=None,
                obstory=None,
                img_type=None,
                obs_type=None,
                stride=1):
    """
    Display a list of all the images registered in the database.

    :param utc_min:
        Only show observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only show observations made before the specified time stamp.

    :type utc_max:
        float

    :param username:
        Optionally specify a username, to filter only images by a particular user

    :type username:
        str

    :param obstory:
        The public id of the observatory we are to show observations from

    :type obstory:
        str

    :param img_type:
        Only show images with this semantic type

    :type img_type:
        str

    :param obs_type:
        Only show observations with this semantic type

    :type obs_type:
        str

    :param stride:
        Only show every nth observation matching the search criteria

    :type stride:
        int

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    where = ["1"]
    args = []

    if utc_min is not None:
        where.append("o.obsTime>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("o.obsTime<=%s")
        args.append(utc_max)
    if username is not None:
        where.append("o.userId=%s")
        args.append(username)
    if obstory is not None:
        where.append("l.publicId=%s")
        args.append(obstory)
    if obs_type is not None:
        where.append("ast.name=%s")
        args.append(obs_type)

    conn.execute(
        """
SELECT o.uid, o.userId, l.name AS place, o.obsTime
FROM archive_observations o
INNER JOIN archive_observatories l ON o.observatory = l.uid
INNER JOIN archive_semanticTypes ast ON o.obsType = ast.uid
WHERE """ + " AND ".join(where) + """
ORDER BY obsTime DESC LIMIT 200;
""", args)
    results = conn.fetchall()

    # List information about each observation in turn
    sys.stdout.write("{:6s} {:10s} {:32s} {:17s} {:20s}\n".format(
        "obsId", "Username", "Observatory", "Time", "Images"))
    for counter, obs in enumerate(results):
        # Only show every nth hit
        if counter % stride != 0:
            continue

        # Print observation information
        sys.stdout.write("{:6d} {:10s} {:32s} {:17s} ".format(
            obs['uid'], obs['userId'], obs['place'],
            date_string(obs['obsTime'])))

        where = ["f.observationId=%s"]
        args = [obs['uid']]

        if img_type is not None:
            where.append("ast.name=%s")
            args.append(img_type)

        # Fetch list of files in this observation
        conn.execute(
            """
SELECT ast.name AS semanticType, repositoryFname, am.floatValue AS skyClarity
FROM archive_files f
INNER JOIN archive_semanticTypes ast ON f.semanticType = ast.uid
LEFT OUTER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
WHERE """ + " AND ".join(where) + """;
""", args)

        files = conn.fetchall()

        for count, item in enumerate(files):
            if count > 0:
                sys.stdout.write("\n{:69s}".format(""))
            if item['skyClarity'] is None:
                item['skyClarity'] = 0
            sys.stdout.write("{:40s} {:32s} {:10.1f}".format(
                item['semanticType'], item['repositoryFname'],
                item['skyClarity']))
        sys.stdout.write("\n")
コード例 #11
0
def calibrate_lens(obstory_id, utc_min, utc_max, utc_must_stop=None):
    """
    Use astrometry.net to determine the orientation of a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to determine the orientation for.
    :param utc_min:
        The start of the time period in which we should determine the observatory's orientation.
    :param utc_max:
        The end of the time period in which we should determine the observatory's orientation.
    :param utc_must_stop:
        The time by which we must finish work
    :return:
        None
    """
    global parameter_scales, fit_list

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info(
        "Starting estimation of lens calibration for <{}>".format(obstory_id))

    # Mathematical constants
    deg = pi / 180
    rad = 180 / pi

    # Count how many successful fits we achieve
    successful_fits = 0

    # Read properties of known lenses
    hw = hardware_properties.HardwareProps(
        path=os.path.join(settings['pythonPath'], "..", "configuration_global",
                          "camera_properties"))

    # Reduce time window to where observations are present
    conn.execute(
        """
SELECT obsTime
FROM archive_observations
WHERE obsTime BETWEEN %s AND %s
    AND observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
ORDER BY obsTime ASC LIMIT 1
""", (utc_min, utc_max, obstory_id))
    results = conn.fetchall()

    if len(results) == 0:
        logging.warning("No observations within requested time window.")
        return
    utc_min = results[0]['obsTime'] - 1

    conn.execute(
        """
SELECT obsTime
FROM archive_observations
WHERE obsTime BETWEEN %s AND %s
    AND observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
ORDER BY obsTime DESC LIMIT 1
""", (utc_min, utc_max, obstory_id))
    results = conn.fetchall()
    utc_max = results[0]['obsTime'] + 1

    # Divide up time interval into day-long blocks
    logging.info("Searching for images within period {} to {}".format(
        date_string(utc_min), date_string(utc_max)))
    block_size = 3600
    minimum_sky_clarity = 1e6 + 1400
    utc_min = (floor(utc_min / block_size + 0.5) -
               0.5) * block_size  # Make sure that blocks start at noon
    time_blocks = list(
        np.arange(start=utc_min, stop=utc_max + block_size, step=block_size))

    # Start new block whenever we have a hardware refresh
    conn.execute(
        """
SELECT time FROM archive_metadata
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='refresh')
      AND time BETWEEN %s AND %s
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()
    for item in results:
        time_blocks.append(item['time'])

    # Make sure that start points for time blocks are in order
    time_blocks.sort()

    # Build list of images we are to analyse
    images_for_analysis = []

    for block_index, utc_block_min in enumerate(time_blocks[:-1]):
        utc_block_max = time_blocks[block_index + 1]
        logging.info("Calibrating lens within period {} to {}".format(
            date_string(utc_block_min), date_string(utc_block_max)))

        # Search for background-subtracted time lapse image with best sky clarity within this time period
        conn.execute(
            """
SELECT ao.obsTime, ao.publicId AS observationId, f.repositoryFname, am.floatValue AS skyClarity
FROM archive_files f
INNER JOIN archive_observations ao on f.observationId = ao.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON f.uid = am2.fileId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
WHERE ao.obsTime BETWEEN %s AND %s
    AND ao.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
    AND f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:timelapse/backgroundSubtracted")
    AND am.floatValue > %s
    AND am2.uid IS NULL
    AND ao.astrometryProcessed IS NULL
ORDER BY am.floatValue DESC LIMIT 1
""", (utc_block_min, utc_block_max, obstory_id, minimum_sky_clarity))
        results = conn.fetchall()

        if len(results) > 0:
            images_for_analysis.append({
                'utc':
                results[0]['obsTime'],
                'skyClarity':
                results[0]['skyClarity'],
                'repositoryFname':
                results[0]['repositoryFname'],
                'observationId':
                results[0]['observationId']
            })

    # Sort images into order of sky clarity
    images_for_analysis.sort(key=itemgetter("skyClarity"))
    images_for_analysis.reverse()

    # Display logging list of the images we are going to work on
    logging.info("Estimating the calibration of {:d} images:".format(
        len(images_for_analysis)))
    for item in images_for_analysis:
        logging.info("{:17s} {:04.0f} {:32s}".format(date_string(item['utc']),
                                                     item['skyClarity'],
                                                     item['repositoryFname']))

    # Analyse each image in turn
    for item_index, item in enumerate(images_for_analysis):
        logging.info("Working on image {:32s} ({:4d}/{:4d})".format(
            item['repositoryFname'], item_index + 1, len(images_for_analysis)))

        # Make a temporary directory to store files in.
        # This is necessary as astrometry.net spams the cwd with lots of temporary junk
        tmp0 = "/tmp/dcf21_calibrateLens_{}".format(item['repositoryFname'])
        # logging.info("Created temporary directory <{}>".format(tmp))
        os.system("mkdir {}".format(tmp0))

        # Fetch observatory status
        obstory_info = db.get_obstory_from_id(obstory_id)
        obstory_status = None
        if obstory_info and ('name' in obstory_info):
            obstory_status = db.get_obstory_status(obstory_id=obstory_id,
                                                   time=item['utc'])
        if not obstory_status:
            logging.info("Aborting -- no observatory status available.")
            continue

        # Fetch observatory status
        lens_name = obstory_status['lens']
        lens_props = hw.lens_data[lens_name]

        # This is an estimate of the *maximum* angular width we expect images to have.
        # It should be within a factor of two of correct!
        estimated_image_scale = lens_props.fov

        # Find image orientation orientation
        filename = os.path.join(settings['dbFilestore'],
                                item['repositoryFname'])

        if not os.path.exists(filename):
            logging.info("Error: File <{}> is missing!".format(
                item['repositoryFname']))
            continue

        # 1. Copy image into working directory
        # logging.info("Copying file")
        img_name = item['repositoryFname']
        command = "cp {} {}/{}_tmp.png".format(filename, tmp0, img_name)
        # logging.info(command)
        os.system(command)

        # 2. We estimate the distortion of the image by passing a series of small portions of the image to
        # astrometry.net. We use this to construct a mapping between (x, y) pixel coordinates to (RA, Dec).

        # Define the size of each small portion we pass to astrometry.net
        fraction_x = 0.15
        fraction_y = 0.15

        # Create a list of the centres of the portions we send
        fit_list = []
        portion_centres = [{'x': 0.5, 'y': 0.5}]

        # Points along the leading diagonal of the image
        for z in np.arange(0.1, 0.9, 0.1):
            if z != 0.5:
                portion_centres.append({'x': z, 'y': z})
                portion_centres.append({'x': (z + 0.5) / 2, 'y': z})
                portion_centres.append({'x': z, 'y': (z + 0.5) / 2})

        # Points along the trailing diagonal of the image
        for z in np.arange(0.1, 0.9, 0.1):
            if z != 0.5:
                portion_centres.append({'x': z, 'y': 1 - z})
                portion_centres.append({'x': (1.5 - z) / 2, 'y': z})
                portion_centres.append({'x': z, 'y': (1.5 - z) / 2})

        # Points down the vertical centre-line of the image
        for z in np.arange(0.15, 0.85, 0.1):
            portion_centres.append({'x': 0.5, 'y': z})

        # Points along the horizontal centre-line of the image
        for z in np.arange(0.15, 0.85, 0.1):
            portion_centres.append({'x': z, 'y': 0.5})

        # Fetch the pixel dimensions of the image we are working on
        d = image_dimensions("{}/{}_tmp.png".format(tmp0, img_name))

        @dask.delayed
        def analyse_image_portion(image_portion):

            # Make a temporary directory to store files in.
            # This is necessary as astrometry.net spams the cwd with lots of temporary junk
            tmp = "/tmp/dcf21_calibrateLens_{}_{}".format(
                item['repositoryFname'], image_portion['index'])
            # logging.info("Created temporary directory <{}>".format(tmp))
            os.system("mkdir {}".format(tmp))

            # Use ImageMagick to crop out each small piece of the image
            command = """
cd {6} ; \
rm -f {5}_tmp3.png ; \
convert {0}_tmp.png -colorspace sRGB -define png:format=png24 -crop {1:d}x{2:d}+{3:d}+{4:d} +repage {5}_tmp3.png
            """.format(os.path.join(tmp0, img_name), int(fraction_x * d[0]),
                       int(fraction_y * d[1]),
                       int((image_portion['x'] - fraction_x / 2) * d[0]),
                       int((image_portion['y'] - fraction_y / 2) * d[1]),
                       img_name, tmp)
            # logging.info(command)
            os.system(command)

            # Check that we've not run out of time
            if utc_must_stop and (time.time() > utc_must_stop):
                logging.info("We have run out of time! Aborting.")
                os.system("rm -Rf {}".format(tmp))
                return None

            # How long should we allow astrometry.net to run for?
            timeout = "40s"

            # Run astrometry.net. Insert --no-plots on the command line to speed things up.
            # logging.info("Running astrometry.net")
            estimated_width = 2 * math.atan(
                math.tan(estimated_image_scale / 2 * deg) * fraction_x) * rad
            astrometry_output = os.path.join(tmp, "txt")
            command = """
cd {5} ; \
timeout {0} solve-field --no-plots --crpix-center --scale-low {1:.1f} \
        --scale-high {2:.1f} --overwrite {3}_tmp3.png > {4} 2> /dev/null \
            """.format(timeout, estimated_width * 0.6, estimated_width * 1.2,
                       img_name, astrometry_output, tmp)
            # logging.info(command)
            os.system(command)

            # Parse the output from astrometry.net
            assert os.path.exists(
                astrometry_output), "Path <{}> doesn't exist".format(
                    astrometry_output)
            fit_text = open(astrometry_output).read()
            # logging.info(fit_text)

            # Clean up
            # logging.info("Removing temporary directory <{}>".format(tmp))
            os.system("rm -Rf {}".format(tmp))

            # Extract celestial coordinates of the centre of the frame from astrometry.net output
            test = re.search(
                r"\(RA H:M:S, Dec D:M:S\) = \(([\d-]*):(\d\d):([\d.]*), [+]?([\d-]*):(\d\d):([\d\.]*)\)",
                fit_text)
            if not test:
                logging.info("FAIL(POS): Point ({:.2f},{:.2f}).".format(
                    image_portion['x'], image_portion['y']))
                return None

            ra_sign = sgn(float(test.group(1)))
            ra = abs(float(test.group(1))) + float(test.group(2)) / 60 + float(
                test.group(3)) / 3600
            if ra_sign < 0:
                ra *= -1
            dec_sign = sgn(float(test.group(4)))
            dec = abs(float(test.group(4))) + float(
                test.group(5)) / 60 + float(test.group(6)) / 3600
            if dec_sign < 0:
                dec *= -1

            # If astrometry.net achieved a fit, then we report it to the user
            logging.info(
                "FIT: RA: {:7.2f}h. Dec {:7.2f} deg. Point ({:.2f},{:.2f}).".
                format(ra, dec, image_portion['x'], image_portion['y']))

            # Also, populate <fit_list> with a list of the central points of the image fragments, and their (RA, Dec)
            # coordinates.
            return {
                'ra': ra * pi / 12,
                'dec': dec * pi / 180,
                'x': image_portion['x'],
                'y': image_portion['y'],
                'radius': hypot(image_portion['x'] - 0.5,
                                image_portion['y'] - 0.5)
            }

        # Analyse each small portion of the image in turn
        dask_tasks = []
        for index, image_portion in enumerate(portion_centres):
            image_portion['index'] = index
            dask_tasks.append(
                analyse_image_portion(image_portion=image_portion))
        fit_list = dask.compute(*dask_tasks)

        # Remove fits which returned None
        fit_list = [i for i in fit_list if i is not None]

        # Clean up
        os.system("rm -Rf {}".format(tmp0))
        os.system("rm -Rf /tmp/tmp.*")

        # Make histogram of fits as a function of radius
        radius_histogram = [0] * 10
        for fit in fit_list:
            radius_histogram[int(fit['radius'] * 10)] += 1

        logging.info("Fit histogram vs radius: {}".format(radius_histogram))

        # Reject this image if there are insufficient fits from astrometry.net
        if min(radius_histogram[:5]) < 2:
            logging.info("Insufficient fits to continue")
            continue

        # Fit a gnomonic projection to the image, with barrel correction, to fit all the celestial positions of the
        # image fragments.

        # See <http://www.scipy-lectures.org/advanced/mathematical_optimization/> for more information

        ra0 = fit_list[0]['ra']
        dec0 = fit_list[0]['dec']
        parameter_scales = [
            pi / 4, pi / 4, pi / 4, pi / 4, pi / 4, pi / 4, 5e-2, 5e-6
        ]
        parameters_default = [
            ra0, dec0, pi / 4, pi / 4, 0, lens_props.barrel_parameters[2], 0
        ]
        parameters_initial = [
            parameters_default[i] / parameter_scales[i]
            for i in range(len(parameters_default))
        ]
        fitting_result = scipy.optimize.minimize(mismatch,
                                                 parameters_initial,
                                                 method='nelder-mead',
                                                 options={
                                                     'xtol': 1e-8,
                                                     'disp': True,
                                                     'maxiter': 1e8,
                                                     'maxfev': 1e8
                                                 })
        parameters_optimal = fitting_result.x
        parameters_final = [
            parameters_optimal[i] * parameter_scales[i]
            for i in range(len(parameters_default))
        ]

        # Display best fit numbers
        headings = [["Central RA / hr", 12 / pi],
                    ["Central Decl / deg", 180 / pi],
                    ["Image width / deg", 180 / pi],
                    ["Image height / deg", 180 / pi],
                    ["Position angle / deg", 180 / pi], ["barrel_k1", 1],
                    ["barrel_k2", 1]]

        logging.info(
            "Fit achieved to {:d} points with offset of {:.5f}. Best fit parameters were:"
            .format(len(fit_list), fitting_result.fun))
        for i in range(len(parameters_default)):
            logging.info("{0:30s} : {1}".format(
                headings[i][0], parameters_final[i] * headings[i][1]))

        # Reject fit if objective function too large
        if fitting_result.fun > 1e-4:
            logging.info("Rejecting fit as chi-squared too large.")
            continue

        # Reject fit if k1/k2 values are too extreme
        if (abs(parameters_final[5]) > 0.3) or (abs(parameters_final[6]) >
                                                0.1):
            logging.info("Rejecting fit as parameters seem extreme.")
            continue

        # Update observation status
        successful_fits += 1
        user = settings['pigazingUser']
        timestamp = time.time()
        barrel_parameters = [
            parameters_final[2] * 180 / pi, parameters_final[3] * 180 / pi,
            parameters_final[5], parameters_final[6], 0
        ]
        db.set_observation_metadata(
            user_id=user,
            observation_id=item['observationId'],
            utc=timestamp,
            meta=mp.Meta(key="calibration:lens_barrel_parameters",
                         value=json.dumps(barrel_parameters)))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="calibration:chi_squared",
                                                 value=fitting_result.fun))
        db.set_observation_metadata(user_id=user,
                                    observation_id=item['observationId'],
                                    utc=timestamp,
                                    meta=mp.Meta(key="calibration:point_count",
                                                 value=str(radius_histogram)))

    # Commit metadata changes
    db.commit()
    db0.commit()

    # Report how many fits we achieved
    logging.info(
        "Total of {:d} images successfully fitted.".format(successful_fits))

    if successful_fits > 0:
        # Now determine mean lens calibration each day
        logging.info("Averaging daily fits within period {} to {}".format(
            date_string(utc_min), date_string(utc_max)))
        block_size = 86400
        utc_min = (floor(utc_min / block_size + 0.5) -
                   0.5) * block_size  # Make sure that blocks start at noon
        time_blocks = list(
            np.arange(start=utc_min,
                      stop=utc_max + block_size,
                      step=block_size))

        # Start new block whenever we have a hardware refresh
        conn.execute(
            """
SELECT time FROM archive_metadata
WHERE observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
      AND fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey='refresh')
      AND time BETWEEN %s AND %s
""", (obstory_id, utc_min, utc_max))
        results = conn.fetchall()
        for item in results:
            time_blocks.append(item['time'])

        # Make sure that start points for time blocks are in order
        time_blocks.sort()

        for block_index, utc_block_min in enumerate(time_blocks[:-1]):
            utc_block_max = time_blocks[block_index + 1]

            # Select observations with calibration fits
            conn.execute(
                """
SELECT am1.stringValue AS barrel_parameters
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_block_min, utc_block_max))
            results = conn.fetchall()

            logging.info(
                "Averaging fits within period {} to {}: Found {} fits.".format(
                    date_string(utc_block_min), date_string(utc_block_max),
                    len(results)))

            # Average the fits we found
            if len(results) < 3:
                logging.info("Insufficient images to reliably average.")
                continue

            # Pick the median fit
            value_list = {
                'scale_x': [],
                'scale_y': [],
                'barrel_k1': [],
                'barrel_k2': [],
                'barrel_k3': []
            }
            for item in results:
                barrel_parameters = json.loads(item['barrel_parameters'])
                value_list['scale_x'].append(barrel_parameters[0])
                value_list['scale_y'].append(barrel_parameters[1])
                value_list['barrel_k1'].append(barrel_parameters[2])
                value_list['barrel_k2'].append(barrel_parameters[3])
                value_list['barrel_k3'].append(barrel_parameters[4])

            median_values = {}
            for key, values in value_list.items():
                values.sort()
                median_values[key] = values[len(values) // 2]

            # Print fit information
            logging.info("""\
CALIBRATION FIT from {:2d} images: %s. \
""".format(
                len(results), "; ".join([
                    "{}: {}".format(key, median)
                    for key, median in median_values.items()
                ])))

            # Flush any previous observation status
            flush_calibration(obstory_id=obstory_id,
                              utc_min=utc_block_min - 1,
                              utc_max=utc_block_min + 1)

            # Update observatory status
            user = settings['pigazingUser']
            timestamp = time.time()
            barrel_parameters = [
                median_values['scale_x'], median_values['scale_y'],
                median_values['barrel_k1'], median_values['barrel_k2'],
                median_values['barrel_k3']
            ]
            db.register_obstory_metadata(
                obstory_id=obstory_id,
                key="calibration:lens_barrel_parameters",
                value=json.dumps(barrel_parameters),
                time_created=timestamp,
                metadata_time=utc_block_min,
                user_created=user)
            db.commit()

    # Clean up and exit
    db.commit()
    db.close_db()
    db0.commit()
    conn.close()
    db0.close()
    return
コード例 #12
0
def orientation_calc(obstory_id, utc_min, utc_max):
    """
    Use astrometry.net to determine the orientation of a particular observatory within each night within the time
    period between the unix times <utc_min> and <utc_max>.

    :param obstory_id:
        The ID of the observatory we want to determine the orientation for.
    :type obstory_id:
        str
    :param utc_min:
        The start of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_max:
        float
    :param utc_must_stop:
        The unix time after which we must abort and finish work as quickly as possible.
    :type utc_must_stop:
        float
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info(
        "Starting calculation of camera alignment for <{}>".format(obstory_id))

    # Reduce time window we are searching to the interval in which observations are present (to save time)
    utc_max, utc_min = reduce_time_window(conn=conn,
                                          obstory_id=obstory_id,
                                          utc_max=utc_max,
                                          utc_min=utc_min)

    # Try to average the fits within each night to determine the sigma-clipped mean orientation
    average_daily_fits(conn=conn,
                       db=db,
                       obstory_id=obstory_id,
                       utc_max=utc_max,
                       utc_min=utc_min)
    measure_fit_quality_to_daily_fits(conn=conn,
                                      db=db,
                                      obstory_id=obstory_id,
                                      utc_max=utc_max,
                                      utc_min=utc_min)

    # Clean up and exit
    db.commit()
    db.close_db()
    db0.commit()
    conn.close()
    db0.close()
    return
コード例 #13
0
ファイル: plot_orientation.py プロジェクト: dcf21/pi-gazing
def plot_orientation(obstory_ids, utc_min, utc_max):
    """
    Plot the orientation of a particular observatory within the time period between the unix times
    <utc_min> and <utc_max>.

    :param obstory_ids:
        The IDs of the observatories we want to plot the orientation for.
    :type obstory_ids:
        list<str>
    :param utc_min:
        The start of the time period in which we should plot the observatory's orientation (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should plot the observatory's orientation (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Plotting camera alignment for <{}>".format(obstory_ids))

    # Data filename stem
    filename_stem = "/tmp/orientation_plot_{:.1f}".format(utc_min)

    # Make data file for each observatory in turn
    for counter, obstory_id in enumerate(obstory_ids):
        # Search for background-subtracted time lapse image with best sky clarity, and no existing orientation fit,
        # within this time period
        conn.execute(
            """
SELECT ao.obsTime, ao.publicId AS observationId,
       am.floatValue AS skyClarity, am2.stringValue AS fitQuality, am3.stringValue AS fitQualityToAverage
FROM archive_files f
INNER JOIN archive_observations ao on f.observationId = ao.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality")
LEFT OUTER JOIN archive_metadata am3 ON ao.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality_to_daily")
WHERE ao.obsTime BETWEEN %s AND %s
    AND ao.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
    AND f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:timelapse/backgroundSubtracted")
ORDER BY ao.obsTime
""", (utc_min, utc_max, obstory_id))
        results = conn.fetchall()

        # Data filename
        filename = "{}_{:02d}".format(filename_stem, counter)

        # Loop over results and write to data file
        with open("{}.dat".format(filename), "w") as f:
            for item in results:
                utc = float(item['obsTime'])
                sky_clarity = float(item['skyClarity'])
                fit_quality = -99
                fit_quality_to_average = -99

                if item['fitQuality'] is not None:
                    fit_quality = float(json.loads(item['fitQuality'])[0])

                if item['fitQualityToAverage'] is not None:
                    fit_quality_to_average = float(
                        json.loads(item['fitQualityToAverage'])[0])

                f.write("{:.1f} {:6.1f} {:6.3f} {:6.3f}\n".format(
                    utc, sky_clarity, fit_quality, fit_quality_to_average))

    # Write pyxplot command file
    with open("{}.ppl".format(filename_stem), "w") as ppl:
        plot_settings = {
            "x_min": utc_min,
            "x_max": utc_max,
            "width": 18,
            "spacing": 4,
            "pt": 17,
            "filename": filename_stem
        }
        ppl.write("""
set width {width}
set multiplot ; set nodisplay
set key below
set xlabel 'Time / hour' ; set xrange [{x_min}:{x_max}]
set xformat "%.1f"%((x/3600) % 24)
set ylabel 'Fit quality' ; set yrange [0:6]
set y2label 'Sky clarity' ; set y2range [0:1000]
    """.format(**plot_settings))

        for counter, obstory_id in enumerate(obstory_ids):
            ppl.write("""
            set origin ({width}+{spacing})*{counter}, 0
plot '{filename}_{counter:02d}.dat' title 'Fit quality' using 1:3 axes x1y1 with p col green pt {pt}, \
     '{filename}_{counter:02d}.dat' title 'Fit quality (to daily average)' using 1:4 axes x1y1 with p col red pt {pt}, \
     '{filename}_{counter:02d}.dat' title 'Sky clarity' using 1:2 axes x1y2 with p col blue pt {pt}
    """.format(**plot_settings, counter=counter))

        ppl.write("""
set term png ; set output '{filename}.png'
set term dpi 100
set display ; refresh
    """.format(**plot_settings))

    os.system("pyxplot {}.ppl".format(filename_stem))

    # Close database handles
    db.close_db()
    conn.close()
    db0.close()
    return
コード例 #14
0
def list_triangulations(utc_min=None, utc_max=None):
    """
    Display a list of all the trajectories of moving objects registered in the database.

    :param utc_min:
        Only show observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only show observations made before the specified time stamp.

    :type utc_max:
        float

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Compile search criteria for observation groups
    where = ["g.semanticType = (SELECT uid FROM archive_semanticTypes WHERE name=\"{}\")".
                 format(simultaneous_event_type)
             ]
    args = []

    if utc_min is not None:
        where.append("g.time>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("g.time<=%s")
        args.append(utc_max)

    # Search for observation groups containing groups of simultaneous detections
    conn.execute("""
SELECT g.publicId AS groupId, g.time AS time, am.stringValue AS objectType,
       am2.floatValue AS speed, am3.floatValue AS mean_altitude, am4.floatValue AS max_angular_offset,
       am5.floatValue AS max_baseline, am6.stringValue AS radiant_direction, am7.floatValue AS sight_line_count,
       am8.stringValue AS path
FROM archive_obs_groups g
INNER JOIN archive_metadata am ON g.uid = am.groupId AND
    am.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="web:category")
INNER JOIN archive_metadata am2 ON g.uid = am2.groupId AND
    am2.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:speed")
INNER JOIN archive_metadata am3 ON g.uid = am3.groupId AND
    am3.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:mean_altitude")
INNER JOIN archive_metadata am4 ON g.uid = am4.groupId AND
    am4.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:max_angular_offset")
INNER JOIN archive_metadata am5 ON g.uid = am5.groupId AND
    am5.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:max_baseline")
INNER JOIN archive_metadata am6 ON g.uid = am6.groupId AND
    am6.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:radiant_direction")
INNER JOIN archive_metadata am7 ON g.uid = am7.groupId AND
    am7.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:sight_line_count")
INNER JOIN archive_metadata am8 ON g.uid = am8.groupId AND
    am8.fieldId = (SELECT uid FROM archive_metadataFields WHERE metaKey="triangulation:path")
WHERE """ + " AND ".join(where) + """
ORDER BY g.time;
""", args)
    results = conn.fetchall()

    # Count how many simultaneous detections we find by type
    detections_by_type = {}

    # Compile tally by type
    for item in results:
        # Add this triangulation to tally
        if item['objectType'] not in detections_by_type:
            detections_by_type[item['objectType']] = 0
        detections_by_type[item['objectType']] += 1

    # List information about each observation in turn
    print("{:16s} {:20s} {:20s} {:8s} {:10s}".format("GroupId", "Time", "Object type", "Speed", "Altitude"))
    for item in results:
        # Print triangulation information
        print("{:16s} {:20s} {:20s} {:8.0f} {:10.0f}".format(item['groupId'],
                                                             date_string(item['time']),
                                                             item['objectType'],
                                                             item['speed'],
                                                             item['mean_altitude']
                                                             ))

    # Report tally of events
    print("\nTally of events by type:")
    for event_type in sorted(detections_by_type.keys()):
        print("    * {:26s}: {:6d}".format(event_type, detections_by_type[event_type]))
コード例 #15
0
def new_image(image, username, observatory, title, semantic_type='Original', time_offset=0):
    """
    Insert an image file into the database.

    :param image:
        The filename of the image to be inserted
    :param username:
        The username of the user who is to own this image
    :param observatory:
        The observatory from which this observation was made
    :param title:
        The title of this image
    :param semantic_type:
        The semantic type of this image file, e.g. "Original"
    :param time_offset:
        Time offset to apply to image, seconds (positive means we move time forwards).
    :type time_offset:
        int
    :return:
        None
    """
    our_path = os_path.split(os_path.abspath(__file__))[0]

    db = obsarchive_db.ObservationDatabase(file_store_path=settings['dbFilestore'],
                                           db_host=installation_info['mysqlHost'],
                                           db_user=installation_info['mysqlUser'],
                                           db_password=installation_info['mysqlPassword'],
                                           db_name=installation_info['mysqlDatabase'],
                                           obstory_id=installation_info['observatoryId'])

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Fetch user ID
    conn.execute('SELECT userId FROM pigazing_users WHERE username=%s;', (username,))
    results = conn.fetchall()
    assert len(results) > 0, "No such user <{}>".format(username)

    # Fetch observatory ID
    conn.execute('SELECT uid FROM archive_observatories WHERE publicId=%s;', (observatory,))
    results = conn.fetchall()
    assert len(results) > 0, "No such observatory <{}>".format(observatory)

    # Look up image EXIF metadata
    metadata = fetch_exif_metadata(input_path=image, time_offset=time_offset)

    # Create observation object for this image
    utc = time.time()
    obs_obj = db.register_observation(obstory_id=observatory,
                                      random_id=True,
                                      obs_time=metadata['Time'],
                                      creation_time=utc,
                                      obs_type="image", user_id=username,
                                      obs_meta=[],
                                      published=1, moderated=1, featured=0,
                                      ra=-999, dec=-999,
                                      field_width=None, field_height=None,
                                      position_angle=None, central_constellation=None,
                                      altitude=-999, azimuth=-999, alt_az_pa=None,
                                      astrometry_processed=None, astrometry_processing_time=None,
                                      astrometry_source=None)

    # Create metadata about image
    obs_id = obs_obj.id
    db.set_observation_metadata(username, obs_id, obsarchive_model.Meta("Observer", username))
    db.set_observation_metadata(username, obs_id, obsarchive_model.Meta("Caption", title))

    for key, value in metadata.items():
        db.set_observation_metadata(user_id=username,
                                    observation_id=obs_id,
                                    meta=obsarchive_model.Meta(key, value))

    db.commit()

    # Make copy of file
    tmp_file_path = os_path.join(our_path, "../auto/tmp/dss_images")
    os.system("mkdir -p {}".format(tmp_file_path))
    img_name = os_path.split(image)[1]
    tmp_filename = os_path.join(tmp_file_path, img_name)
    os.system("cp '{}' '{}'".format(image, tmp_filename))
    os.system("chmod 644 '{}'".format(tmp_filename))

    # Create file object for this image
    file_obj = db.register_file(file_path=tmp_filename, user_id=username, mime_type="image/png",
                                semantic_type=semantic_type, primary_image=True,
                                file_time=metadata['Time'], file_meta=[],
                                observation_id=obs_id,
                                random_id=True)
    db.commit()
コード例 #16
0
ファイル: editUser.py プロジェクト: dcf21/pi-gazing
def edit_user(username,
              delete=False,
              password=None,
              name=None,
              job=None,
              email=None,
              join_date=None,
              profile_pic=None,
              profile_text=None,
              add_roles=None,
              remove_roles=None):
    """
    Edit a user's account details.

    :param username:
        Username for the user account that we are to edit
    :param delete:
        Boolean flag indicating whether we are to delete the observatory altogether
    :param password:
        New password for this user
    :param name:
        New real name for this user
    :param job:
        New job description for this user
    :param email:
        New email address for this user
    :param join_date:
        New join date for this user
    :param profile_pic:
        New profile picture for this user
    :param profile_text:
        New profile text for this user
    :param add_roles:
        New list of roles for this user
    :type add_roles:
        list
    :param remove_roles:
        List of roles to remove from this user
    :type remove_roles:
        list
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Fetch user ID
    user_id = None
    while True:
        conn.execute('SELECT userId FROM pigazing_users WHERE username=%s;',
                     (username, ))
        results = conn.fetchall()
        if len(results) > 0:
            user_id = results[0]['userId']
            break
        conn.execute('INSERT INTO pigazing_users (username) VALUES (%s);',
                     (username, ))

    if delete:
        conn.execute('DELETE FROM pigazing_users WHERE userId=%s;',
                     (user_id, ))
        db0.commit()
        db0.close()
        return

    if password:
        conn.execute('UPDATE pigazing_users SET password=%s WHERE userId=%s;',
                     (passlib.hash.bcrypt.encrypt(password), user_id))

    if name is not None:
        conn.execute('UPDATE pigazing_users SET name = %s WHERE userId = %s',
                     (name, user_id))
    if job is not None:
        conn.execute('UPDATE pigazing_users SET job = %s WHERE userId = %s',
                     (job, user_id))
    if email is not None:
        conn.execute('UPDATE pigazing_users SET email = %s WHERE userId = %s',
                     (email, user_id))
    if join_date is not None:
        conn.execute(
            'UPDATE pigazing_users SET joinDate = %s WHERE userId = %s',
            (join_date, user_id))
    if profile_pic is not None:
        conn.execute(
            'UPDATE pigazing_users SET profilePic = %s WHERE userId = %s',
            (profile_pic, user_id))
    if profile_text is not None:
        conn.execute(
            'UPDATE pigazing_users SET profileText = %s WHERE userId = %s',
            (profile_text, user_id))

    if add_roles:
        for role in add_roles:
            conn.execute("SELECT roleId FROM pigazing_roles WHERE name=%s;",
                         (role, ))
            results = conn.fetchall()
            if len(results) < 1:
                conn.execute("INSERT INTO pigazing_roles (name) VALUES (%s);",
                             (role, ))
                conn.execute(
                    "SELECT roleId FROM pigazing_roles WHERE name=%s;",
                    (role, ))
                results = conn.fetchall()

            conn.execute(
                'REPLACE INTO pigazing_user_roles (userId, roleId) VALUES '
                '((SELECT u.userId FROM pigazing_users u WHERE u.userId=%s),'
                '%s)', (user_id, results[0]['roleId']))

    if remove_roles:
        for role in remove_roles:
            conn.execute(
                'DELETE FROM pigazing_user_roles WHERE userId=%s AND '
                'roleId=(SELECT roleId FROM pigazing_roles WHERE name=%s);',
                (user_id, role))

    # Commit changes to database
    db0.commit()
    db0.close()
コード例 #17
0
def list_orientations(obstory_id, utc_min, utc_max):
    """
    List the worst orientation fits of a particular observatory within the time period between the unix times
    <utc_min> and <utc_max>.

    :param obstory_id:
        The ID of the observatory we want to determine the orientation for.
    :type obstory_id:
        str
    :param utc_min:
        The start of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_min:
        float
    :param utc_max:
        The end of the time period in which we should determine the observatory's orientation (unix time).
    :type utc_max:
        float
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Open connection to image archive
    db = obsarchive_db.ObservationDatabase(
        file_store_path=settings['dbFilestore'],
        db_host=installation_info['mysqlHost'],
        db_user=installation_info['mysqlUser'],
        db_password=installation_info['mysqlPassword'],
        db_name=installation_info['mysqlDatabase'],
        obstory_id=installation_info['observatoryId'])

    logging.info("Plotting camera alignment for <{}>".format(obstory_id))

    # Search for background-subtracted time lapse image with best sky clarity, and no existing orientation fit,
    # within this time period
    conn.execute(
        """
SELECT ao.obsTime, f.repositoryFname AS observationId,
       am.floatValue AS skyClarity, am2.stringValue AS fitQuality, am3.stringValue AS fitQualityToAverage
FROM archive_files f
INNER JOIN archive_observations ao on f.observationId = ao.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON ao.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality")
LEFT OUTER JOIN archive_metadata am3 ON ao.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:fit_quality_to_daily")
WHERE ao.obsTime BETWEEN %s AND %s
    AND ao.observatory=(SELECT uid FROM archive_observatories WHERE publicId=%s)
    AND f.semanticType=(SELECT uid FROM archive_semanticTypes WHERE name="pigazing:timelapse/backgroundSubtracted")
    AND am.floatValue > %s
ORDER BY ao.obsTime
""", (utc_min, utc_max, obstory_id, minimum_sky_clarity))
    results = conn.fetchall()

    # Data filename
    filename = "/tmp/worst_orientations"

    # Loop over results
    data = []
    for item in results:
        utc = float(item['obsTime'])
        sky_clarity = float(item['skyClarity'])
        fit_quality = -99
        fit_quality_to_average = -99

        if item['fitQuality'] is not None:
            fit_quality = float(json.loads(item['fitQuality'])[0])

        if item['fitQualityToAverage'] is not None:
            fit_quality_to_average = float(
                json.loads(item['fitQualityToAverage'])[0])

        data.append({
            'uid': item['observationId'],
            'utc': utc,
            'sky_clarity': sky_clarity,
            'fit_quality': fit_quality,
            'fit_quality_to_average': fit_quality_to_average
        })

    # Sort on fit quality
    data.sort(key=itemgetter('fit_quality_to_average'))
    data.reverse()

    # Limit to 1000 worst points
    data = data[:1000]

    # Write to data file
    with open("{}.dat".format(filename), "w") as f:
        for item in data:
            f.write("{} {:6.1f} {:6.3f} {:6.3f}\n".format(
                item['uid'], item['sky_clarity'], item['fit_quality'],
                item['fit_quality_to_average']))

    # Close database handles
    db.close_db()
    conn.close()
    db0.close()
    return
コード例 #18
0
def list_satellites(obstory_id, utc_min, utc_max):
    """
    List all the satellite identifications for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list identifications for.
    :param utc_min:
        The start of the time period in which we should list identifications (unix time).
    :param utc_max:
        The end of the time period in which we should list identifications (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of satellite identifications
    satellite_identifications = []

    # Select moving objects with satellite identifications
    conn.execute(
        """
SELECT am1.stringValue AS satellite_name, am2.floatValue AS ang_offset,
       am3.floatValue AS clock_offset, am4.floatValue AS duration, am5.floatValue AS norad_id,
       o.obsTime AS time, o.publicId AS obsId
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:name")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:angular_offset")
INNER JOIN archive_metadata am3 ON o.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:clock_offset")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:duration")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="satellite:norad_id")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        satellite_identifications.append({
            'id':
            item['obsId'],
            'time':
            item['time'],
            'satellite_name':
            item['satellite_name'],
            'ang_offset':
            item['ang_offset'],
            'clock_offset':
            item['clock_offset'],
            'duration':
            item['duration'],
            'norad_id':
            int(item['norad_id'])
        })

    # Sort identifications by time
    satellite_identifications.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:16s} {:7s} {:32s} {:26s} {:8s} {:10s} {:10s}\
""".format("Time", "NORAD", "ID", "Satellite", "Duration", "Ang offset",
           "Clock offset"))

    # Display list of meteors
    for item in satellite_identifications:
        print("""\
{:16s} {:7d} {:32s} {:26s} {:8.1f} {:10.1f} {:10.1f}\
""".format(
            date_string(item['time']),
            item['norad_id'],
            item['id'],
            item['satellite_name'],
            item['duration'],
            item['ang_offset'],
            item['clock_offset'],
        ))

    # Clean up and exit
    return
コード例 #19
0
def delete_observation(id, dry_run):
    """
    Delete an observation and all associated files.

    :param id:
        The publicId of the observation to delete

    :param dry_run:
        Boolean indicating whether we should do a dry run, without actually deleting anything

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Search for observation
    conn.execute(
        """
SELECT o.uid
FROM archive_observations o
WHERE o.publicId=%s;
""", (id, ))
    results_observations = conn.fetchall()

    # Delete each observation in turn
    for observation in results_observations:
        # Search for files
        conn.execute(
            """
    SELECT f.uid, f.repositoryFname
    FROM archive_files f
    WHERE f.observationId=%s
    """, (observation['uid'], ))
        results_files = conn.fetchall()

        # Delete each file in turn
        for file in results_files:

            logging.info("Deleting file <{}>".format(file['repositoryFname']))

            # Delete files
            if not dry_run:
                os.unlink(
                    os.path.join(settings['dbFilestore'],
                                 file['repositoryFname']))

            # Delete file record
            if not dry_run:
                conn.execute("DELETE FROM archive_files WHERE uid=%s",
                             (file['uid'], ))

        # Delete observation
        if not dry_run:
            conn.execute("DELETE FROM archive_observations WHERE uid=%s",
                         (observation['uid'], ))

    # Commit changes to database
    db0.commit()
    conn.close()
    db0.close()
コード例 #20
0
def update_sky_clarity(utc_min=None,
                       utc_max=None,
                       username=None,
                       obstory=None):
    """
    Update the sky clarity measurements in the database. This is useful if the algorithm is changed.

    :param utc_min:
        Only update observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only update observations made before the specified time stamp.

    :type utc_max:
        float

    :param username:
        Optionally specify a username, to filter only images by a particular user

    :type username:
        str

    :param obstory:
        The public id of the observatory we are to update observations from

    :type obstory:
        str

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    where = ["1"]
    args = []

    if utc_min is not None:
        where.append("o.obsTime>=%s")
        args.append(utc_min)
    if utc_max is not None:
        where.append("o.obsTime<=%s")
        args.append(utc_max)
    if username is not None:
        where.append("o.userId=%s")
        args.append(username)
    if obstory is not None:
        where.append("l.publicId=%s")
        args.append(obstory)

    conn.execute(
        """
SELECT o.uid, o.userId, l.name AS place, o.obsTime, am.uid AS skyClarityUid
FROM archive_observations o
INNER JOIN archive_observatories l ON o.observatory = l.uid
INNER JOIN archive_semanticTypes ast ON o.obsType = ast.uid
LEFT OUTER JOIN archive_metadata am ON o.uid = am.observationId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
WHERE """ + " AND ".join(where) + """
ORDER BY obsTime ASC;
""", args)
    results = conn.fetchall()

    # Keep track of how many sky clarity values we have updated
    values_unchanged = 0
    values_updated = 0

    # Update each observation in turn
    for counter, obs in enumerate(results):

        # Fetch list of files in this observation
        conn.execute(
            """
SELECT ast.name AS semanticType, repositoryFname,
am.floatValue AS skyClarity, am.uid AS skyClarityUid, am2.floatValue AS noiseLevel
FROM archive_files f
INNER JOIN archive_semanticTypes ast ON f.semanticType = ast.uid
INNER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
LEFT OUTER JOIN archive_metadata am2 ON f.uid = am2.fileId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:stackNoiseLevel")
WHERE f.observationId=%s;
""", (obs['uid'], ))

        files = conn.fetchall()

        for item in files:
            # Run sky clarity calculator
            p = subprocess.Popen(args=[
                os.path.join(settings['imageProcessorPath'],
                             "skyClarity"), '--input',
                os.path.join(settings['dbFilestore'], item['repositoryFname']),
                '--noise',
                str(float(item['noiseLevel']))
            ],
                                 stdout=subprocess.PIPE,
                                 stdin=subprocess.PIPE,
                                 stderr=subprocess.STDOUT)

            # Extract new sky clarity measurement
            new_sky_clarity = float(
                p.communicate(input=bytes("", 'utf-8'))[0].decode('utf-8'))

            if new_sky_clarity == item['skyClarity']:
                values_unchanged += 1
                continue

            # Print new measurement
            values_updated += 1
            logging.info("Updating {} from {:.0f} to {:.0f}".format(
                item['repositoryFname'], item['skyClarity'], new_sky_clarity))

            # Commit to database
            conn.execute(
                "UPDATE archive_metadata SET floatValue=%s WHERE uid=%s",
                (new_sky_clarity, item['skyClarityUid']))

            # Update the observation with the background-subtracted image's sky clarity
            if ((item['semanticType']
                 == 'pigazing:timelapse/backgroundSubtracted')
                    and (obs['skyClarityUid'] is not None)):
                conn.execute(
                    "UPDATE archive_metadata SET floatValue=%s WHERE uid=%s",
                    (new_sky_clarity, obs['skyClarityUid']))

    # Commit changes to database
    db0.commit()
    db0.close()

    # Report how many values we changed
    logging.info("Updated {:d} images. Left {:d} images unchanged".format(
        values_updated, values_unchanged))
コード例 #21
0
def list_meteors(obstory_id, utc_min, utc_max):
    """
    List all the meteor identifications for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list meteor identifications for.
    :param utc_min:
        The start of the time period in which we should list meteor identifications (unix time).
    :param utc_max:
        The end of the time period in which we should list meteor identifications (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of meteor identifications
    meteor_identifications = []

    # Count how many meteors we find in each shower
    meteor_count_by_shower = {}

    # Select observations with orientation fits
    conn.execute("""
SELECT am1.stringValue AS name, am2.floatValue AS radiant_offset,
       o.obsTime AS time, o.publicId AS obsId
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="shower:name")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="shower:radiant_offset")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        meteor_identifications.append({
            'id': item['obsId'],
            'time': item['time'],
            'shower': item['name'],
            'offset': item['radiant_offset']
        })

        # Update tally of meteors
        if item['name'] not in meteor_count_by_shower:
            meteor_count_by_shower[item['name']] = 0
        meteor_count_by_shower[item['name']] += 1

    # Sort meteors by time
    meteor_identifications.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:16s} {:20s} {:20s} {:5s}\
""".format("Time", "ID", "Shower", "Offset"))

    # Display list of meteors
    for item in meteor_identifications:
        print("""\
{:16s} {:20s} {:26s} {:5.1f}\
""".format(date_string(item['time']),
           item['id'],
           item['shower'],
           item['offset']
           ))

    # Report tally of meteors
    logging.info("Tally of meteors by shower:")
    for shower in sorted(meteor_count_by_shower.keys()):
        logging.info("    * {:26s}: {:6d}".format(shower, meteor_count_by_shower[shower]))

    # Clean up and exit
    return
コード例 #22
0
def list_calibration_fixes(obstory_id, utc_min, utc_max):
    """
    List all the calibration fixes for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list calibration fixes for.
    :param utc_min:
        The start of the time period in which we should list calibration fixes (unix time).
    :param utc_max:
        The end of the time period in which we should list calibration fixes (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of calibration fixes
    calibration_fixes = []

    # Select observatory with calibration fits
    conn.execute(
        """
SELECT am1.stringValue AS barrel_parameters,
       am4.floatValue AS chi_squared, am5.stringValue AS point_count,
       o.obsTime AS time
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:chi_squared")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:point_count")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        calibration_fixes.append({
            'time': item['time'],
            'average': False,
            'fit': item
        })

    # Select observation calibration fits
    conn.execute(
        """
SELECT am1.stringValue AS barrel_parameters,
       am3.floatValue AS chi_squared, am4.stringValue AS point_count,
       am1.time AS time
FROM archive_observatories o
INNER JOIN archive_metadata am1 ON o.uid = am1.observatory AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:lens_barrel_parameters")
LEFT OUTER JOIN archive_metadata am3 ON o.uid = am3.observatory AND am3.time=am1.time AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:chi_squared")
LEFT OUTER JOIN archive_metadata am4 ON o.uid = am4.observatory AND am4.time=am1.time AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="calibration:point_count")
WHERE
    o.publicId=%s AND
    am1.time BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        calibration_fixes.append({
            'time': item['time'],
            'average': True,
            'fit': item
        })

    # Sort fixes by time
    calibration_fixes.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:1s} {:16s} {:8s} {:8s} {:10s} {:12s} {:6s}\
""".format("", "Time", "barrelK1", "barrelK2", "barrelK3", "chi2", "points"))

    # Display fixes
    for item in calibration_fixes:
        # Deal with missing data
        if item['fit']['chi_squared'] is None:
            item['fit']['chi_squared'] = -1
        if item['fit']['point_count'] is None:
            item['fit']['point_count'] = "-"

        # Display calibration fix
        barrel_parameters = json.loads(item['fit']['barrel_parameters'])
        print("""\
{:s} {:16s} {:8.4f} {:8.4f} {:10.7f} {:12.9f} {:s} {:s}\
""".format("\n>" if item['average'] else " ", date_string(item['time']),
           barrel_parameters[2], barrel_parameters[3], barrel_parameters[4],
           item['fit']['chi_squared'], item['fit']['point_count'],
           "\n" if item['average'] else ""))

    # Clean up and exit
    return
コード例 #23
0
def list_orientation_fixes(obstory_id, utc_min, utc_max):
    """
    List all the orientation fixes for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list orientation fixes for.
    :param utc_min:
        The start of the time period in which we should list orientation fixes (unix time).
    :param utc_max:
        The end of the time period in which we should list orientation fixes (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of orientation fixes
    orientation_fixes = []

    # Select observations with orientation fits
    conn.execute(
        """
SELECT am1.floatValue AS altitude, am2.floatValue AS azimuth, am3.floatValue AS tilt,
       am4.floatValue AS width_x_field, am5.floatValue AS width_y_field,
       o.obsTime AS time
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:altitude")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:azimuth")
INNER JOIN archive_metadata am3 ON o.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:tilt")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_x_field")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_y_field")
WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        orientation_fixes.append({
            'time': item['time'],
            'average': False,
            'fit': item
        })

    # Select observatory orientation fits
    conn.execute(
        """
SELECT am1.floatValue AS altitude, am2.floatValue AS azimuth, am3.floatValue AS tilt,
       am4.floatValue AS width_x_field, am5.floatValue AS width_y_field,
       am1.time AS time
FROM archive_observatories o
INNER JOIN archive_metadata am1 ON o.uid = am1.observatory AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:altitude")
INNER JOIN archive_metadata am2 ON o.uid = am2.observatory AND am2.time=am1.time AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:azimuth")
INNER JOIN archive_metadata am3 ON o.uid = am3.observatory AND am3.time=am1.time AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:tilt")
INNER JOIN archive_metadata am4 ON o.uid = am4.observatory AND am4.time=am1.time AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_x_field")
INNER JOIN archive_metadata am5 ON o.uid = am5.observatory AND am5.time=am1.time AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="orientation:width_y_field")
WHERE
    o.publicId=%s AND
    am1.time BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        orientation_fixes.append({
            'time': item['time'],
            'average': True,
            'fit': item
        })

    # Sort fixes by time
    orientation_fixes.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:1s} {:16s} {:9s} {:9s} {:9s} {:8s} {:8s}\
""".format("", "Time", "Alt", "Az", "Tilt", "FoV X", "FoV Y"))

    # Display fixes
    for item in orientation_fixes:
        print("""\
{:s} {:16s} {:9.4f} {:9.4f} {:9.4f} {:8.3f} {:8.3f} {:s}\
""".format("\n>" if item['average'] else " ", date_string(item['time']),
           item['fit']['altitude'], item['fit']['azimuth'],
           item['fit']['tilt'], item['fit']['width_x_field'],
           item['fit']['width_y_field'], "\n" if item['average'] else ""))

    # Clean up and exit
    return
コード例 #24
0
def fetch_images(utc_min, utc_max, obstory, img_types, stride):
    """
    Fetch a list of all the images registered in the database.

    :param utc_min:
        Only return observations made after the specified time stamp.

    :type utc_min:
        float

    :param utc_max:
        Only return observations made before the specified time stamp.

    :type utc_max:
        float

    :param obstory:
        The public id of the observatory we are to fetch observations from

    :type obstory:
        str

    :param img_types:
        Only return images with these semantic types

    :type img_types:
        list[str]

    :param stride:
        Only return every nth observation matching the search criteria

    :type stride:
        int

    :return:
        None
    """
    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Get list of observations
    conn.execute(
        """
SELECT o.uid, o.userId, l.name AS place, o.obsTime
FROM archive_observations o
INNER JOIN archive_observatories l ON o.observatory = l.uid
INNER JOIN archive_semanticTypes ast ON o.obsType = ast.uid
WHERE o.obsTime BETWEEN %s AND %s AND l.publicId=%s
ORDER BY obsTime ASC;
""", (utc_min, utc_max, obstory))
    results = conn.fetchall()

    # Show each observation in turn
    file_list = []
    for counter, obs in enumerate(results):
        # Only show every nth hit
        if counter % stride != 0:
            continue

        # Fetch list of files in this observation
        conn.execute(
            """
SELECT ast.name AS semanticType, repositoryFname, am.floatValue AS skyClarity
FROM archive_files f
INNER JOIN archive_semanticTypes ast ON f.semanticType = ast.uid
LEFT OUTER JOIN archive_metadata am ON f.uid = am.fileId AND
    am.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:skyClarity")
WHERE f.observationId=%s;
""", (obs['uid'], ))

        files = conn.fetchall()

        # Make dictionary of files by semantic type
        files_by_type = {'observation': obs}
        for file in files:
            files_by_type[file['semanticType']] = file

        # Check that we have all the requested types of file
        have_all_types = True
        for img_type in img_types:
            if img_type not in files_by_type:
                have_all_types = False

        if not have_all_types:
            continue

        file_list.append(files_by_type)

    return file_list
コード例 #25
0
def list_planes(obstory_id, utc_min, utc_max):
    """
    List all the plane identifications for a particular observatory.

    :param obstory_id:
        The ID of the observatory we want to list identifications for.
    :param utc_min:
        The start of the time period in which we should list identifications (unix time).
    :param utc_max:
        The end of the time period in which we should list identifications (unix time).
    :return:
        None
    """

    # Open connection to database
    [db0, conn] = connect_db.connect_db()

    # Start compiling list of plane identifications
    plane_identifications = []

    # Select moving objects with plane identifications
    conn.execute(
        """
SELECT am1.stringValue AS call_sign, am2.floatValue AS ang_offset,
       am3.floatValue AS clock_offset, am4.floatValue AS duration, am5.stringValue AS hex_ident,
       am6.floatValue AS distance,
       am7.stringValue AS operator, am8.stringValue AS model, am9.stringValue AS manufacturer,
       o.obsTime AS time, o.publicId AS obsId
FROM archive_observations o
INNER JOIN archive_metadata am1 ON o.uid = am1.observationId AND
    am1.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:call_sign")
INNER JOIN archive_metadata am2 ON o.uid = am2.observationId AND
    am2.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:angular_offset")
INNER JOIN archive_metadata am3 ON o.uid = am3.observationId AND
    am3.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:clock_offset")
INNER JOIN archive_metadata am4 ON o.uid = am4.observationId AND
    am4.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="pigazing:duration")
INNER JOIN archive_metadata am5 ON o.uid = am5.observationId AND
    am5.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:hex_ident")
LEFT JOIN archive_metadata am6 ON o.uid = am6.observationId AND
    am6.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:distance")

LEFT JOIN archive_metadata am7 ON o.uid = am7.observationId AND
    am7.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:operator")
LEFT JOIN archive_metadata am8 ON o.uid = am8.observationId AND
    am8.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:model")
LEFT JOIN archive_metadata am9 ON o.uid = am9.observationId AND
    am9.fieldId=(SELECT uid FROM archive_metadataFields WHERE metaKey="plane:manufacturer")


WHERE
    o.observatory = (SELECT uid FROM archive_observatories WHERE publicId=%s) AND
    o.obsTime BETWEEN %s AND %s;
""", (obstory_id, utc_min, utc_max))
    results = conn.fetchall()

    for item in results:
        plane_identifications.append({
            'id': item['obsId'],
            'time': item['time'],
            'call_sign': item['call_sign'],
            'ang_offset': item['ang_offset'],
            'clock_offset': item['clock_offset'],
            'duration': item['duration'],
            'hex_ident': item['hex_ident'],
            'distance': item['distance'],
            'operator': item['operator'],
            'model': item['model'],
            'manufacturer': item['manufacturer']
        })

    # Sort identifications by time
    plane_identifications.sort(key=itemgetter('time'))

    # Display column headings
    print("""\
{:16s} {:18s} {:18s} {:8s} {:10s} {:10s} {:10s} {:30s} {:30s} {:30s}\
""".format("Time", "Call sign", "Hex ident", "Duration", "Ang offset",
           "Clock off", "Distance", "Operator", "Model", "Manufacturer"))

    # Display list of meteors
    for item in plane_identifications:
        print("""\
{:16s} {:18s} {:18s} {:8.1f} {:10.1f} {:10.1f} {:10.1f} {:30s} {:30s} {:30s}\
""".format(date_string(item['time']), item['call_sign'], item['hex_ident'],
           item['duration'], item['ang_offset'], item['clock_offset'],
           item['distance'], item['operator'], item['model'],
           item['manufacturer']))

    # Clean up and exit
    return