Beispiel #1
0
def reprocess_all_data(obstory_id):
    db = meteorpi_db.MeteorDatabase(mod_settings.settings['dbFilestore'])
    db.con.execute("SELECT m.time FROM archive_metadata m "
                   "INNER JOIN archive_observatories l ON m.observatory = l.uid "
                   "AND l.publicId = %s AND m.time>0 "
                   "ORDER BY m.time ASC LIMIT 1",
                   (obstory_id,))
    first_seen = 0
    results = db.con.fetchall()
    if results:
        first_seen = results[0]['time']
    db.con.execute("SELECT m.time FROM archive_metadata m "
                   "INNER JOIN archive_observatories l ON m.observatory = l.uid "
                   "AND l.publicId = %s AND m.time>0 "
                   "ORDER BY m.time DESC LIMIT 1",
                   (obstory_id,))
    last_seen = 0
    results = db.con.fetchall()
    if results:
        last_seen = results[0]['time']
    day = 86400
    utc = math.floor(first_seen / day) * day + day / 2
    while utc < last_seen:
        orientation_calc(obstory_id=obstory_id,
                         utc_to_study=utc,
                         utc_now=mod_log.get_utc(),
                         utc_must_stop=0)
        utc += day
Beispiel #2
0
def get_gps_fix():
    log_txt("Waiting for GPS link")

    # Run gpsFix.py, which returns JSON output to stdout
    cmd_ = os.path.join(mod_settings.settings['pythonPath'], "gpsFix.py")
    gps_process = subprocess.Popen(cmd_, shell=True, stdout=subprocess.PIPE)
    gps_fix_json = gps_process.stdout.read()
    try:
        gps_result = json.loads(gps_fix_json)
    except ValueError:
        log_txt("Could not read valid JSON response from gpsFix.py")
        gps_result = False

    # If true, we get a structure with fields "offset", "latitude" and "longitude"
    if isinstance(gps_result, dict):
        t_offset = gps_result['offset']
        gps_latitude = gps_result['latitude']
        gps_longitude = gps_result['longitude']
        gps_altitude = gps_result['altitude']
        log_txt("GPS link achieved")
        log_txt(
            "Longitude = %.6f ; Latitude = %.6f ; Altitude = %.6f ; Clock offset: %.2f sec behind."
            % (gps_longitude, gps_latitude, gps_altitude, t_offset))
        set_utc_offset(t_offset)

        # Use the time shell command to update the system clock (required root access)
        log_txt("Trying to update system clock")
        utc_now = get_utc()
        os.system("date -s @%d" % utc_now)

        # Because the above may fail if we don't have root access, as a fallback we recalculate the clock offset
        t_offset = utc_now - time.time()
        set_utc_offset(t_offset)
        log_txt(
            "Revised clock offset after trying to set the system clock: %.2f sec behind."
            % t_offset)

        return {
            'latitude': gps_latitude,
            'longitude': gps_longitude,
            'altitude': gps_altitude
        }

    # If false, we didn't manage to establish a GPS link
    else:
        log_txt("Gave up waiting for a GPS link")
        return None
Beispiel #3
0
def export_data(db, utc_now, utc_must_stop=0):
    log_txt("Starting export of images and events")

    # Work out how long we can do exporting for
    utc_stop = get_utc() + (utc_must_stop - utc_now)

    # Search for items which need exporting
    for export_config in db.get_export_configurations():
        if export_config.enabled:
            db.mark_entities_to_export(export_config)
    db.commit()

    # Create an exporter instance
    exporter = MeteorExporter(db=db)

    # Loop until either we run out of time, or we run out of files to export
    max_failures = 4
    fail_count = 0
    while ((not utc_must_stop) or
           (time.time() < utc_stop)) and (fail_count < max_failures):
        state = exporter.handle_next_export()
        db.commit()
        if not state:
            log_txt("Finished export of images and events")
            break
        print "Export status: %s" % state.state
        if state.state == "failed":
            log_txt("Backing off, because an export failed")
            time.sleep([30, 300, 600, 1200, 2400][fail_count])
            fail_count += 1
        else:
            fail_count = 0

    # Exit
    if fail_count >= max_failures:
        log_txt("Exceeded maximum allowed number of failures: giving up.")
Beispiel #4
0
            % t_offset)

        return {
            'latitude': gps_latitude,
            'longitude': gps_longitude,
            'altitude': gps_altitude
        }

    # If false, we didn't manage to establish a GPS link
    else:
        log_txt("Gave up waiting for a GPS link")
        return None


# Fetch observatory status, e.g. location, etc
time_now = get_utc()
log_txt("Fetching observatory status")
latitude = installation_info.local_conf['latitude']
longitude = installation_info.local_conf['longitude']
altitude = 0
latest_position_update = 0
flag_gps = 0
obstory_status = None

# If this observatory doesn't exist in the database, create it now with information from installation_info
if not db.has_obstory_id(obstory_id):
    log_txt("Observatory '%s' is not set up. Using default settings." %
            obstory_id)
    db.register_obstory(
        obstory_id=installation_info.local_conf['observatoryId'],
        obstory_name=installation_info.local_conf['observatoryName'],
Beispiel #5
0
            hwm_new[obstory_id][hwm_output] = hwm_old[obstory_id][hwm_output]

        # Some tasks produce output files with different timestamps to the input file. Specifically, non-live
        # observing produces output over the entire length of a video. hwm_margin is the maximum size of this span
        hwm_margin = ((mod_settings.settings['videoMaxRecordTime'] -
                       5) if hwm_output == "rawvideo" else 0.1)
        job_list = []

        # Loop over each of the input file search patterns that this task group is to be applied to
        for task in task_list:
            [in_dir, out_dirs, in_ext, out_ext, cmd] = task

            # Loop recursively over all files in the input directory
            for dir_name, subdir_list, file_list in os.walk(in_dir):
                for f in file_list:
                    if quit_time and (get_utc() > quit_time):
                        raise TimeOut
                    input_file = os.path.join(dir_name, f)

                    # Input files must have correct extension and non-zero size to be processed
                    if f.endswith(".%s" % in_ext) and (
                            os.path.getsize(input_file) > 0):

                        # Extract observation time from image filename. If this fails, reject the file.
                        utc = mod_log.filename_to_utc(f)
                        if utc < 0:
                            continue

                        # Add this job to our list of things to do
                        job_counter += 1
                        mask_file = "/tmp/triggermask_%d_%d.txt" % (
Beispiel #6
0
    cwd = os.getcwd()
    os.chdir(tmp_dir)

    # Run the C program camfit, which attempts to stack together all of the images used by orientationCalc using
    # different barrel correction coefficients. If there are no lens distortions, the image should overlay each
    # other perfectly when they are stacked together. Otherwise, they won't overlay each other, because the
    # gnomonic transformations won't have properly de-rotated the sky. Iteratively try different barrel corrections
    # until we find a set which work well
    os.system("%s/bin/camfit %s > camFitOutput" % (mod_settings.settings['stackerPath'], image_list_filename))

    # The last line of output from camfit will contain the barrel distortion correction parameters a, b, c
    # separated by spaces
    camfit_output = open("camFitOutput").readlines()
    camfit_last_line = camfit_output[-1]

    print "Best fitting barrel distortion parameters were:\n%s\n\n" % camfit_last_line

    # Change back into the working directory
    os.chdir(cwd)


# If we're called as a script, run the method orientationCalc()
if __name__ == "__main__":
    _obstory_name = installation_info.local_conf['observatoryName']
    _utc_now = mod_log.get_utc()
    if len(sys.argv) > 1:
        _obstory_name = sys.argv[1]
    if len(sys.argv) > 2:
        _utc_now = float(sys.argv[2])
    lens_fit(_obstory_name, _utc_now)
Beispiel #7
0
def orientation_calc(obstory_id, utc_to_study, utc_now, utc_must_stop=0):
    log_prefix = "[%12s %s]" % (obstory_id, mod_astro.time_print(utc_to_study))

    log_txt("%s Starting calculation of camera alignment" % log_prefix)

    # Mathematical constants
    deg = math.pi / 180
    rad = 180 / math.pi

    # This is an estimate of the *maximum* angular width we expect images to have.
    # It should be within a factor of two of correct!
    estimated_image_scale = installation_info.local_conf['estimated_image_scale']

    # When passing images to astrometry.net, only work on the central portion, as this will have least bad distortion
    fraction_x = 0.4
    fraction_y = 0.4

    # Path the binary barrel-correction tool
    barrel_correct = os.path.join(mod_settings.settings['stackerPath'], "barrel")

    # Calculate time span to use images from
    utc_min = utc_to_study
    utc_max = utc_to_study + 3600 * 24
    db = meteorpi_db.MeteorDatabase(mod_settings.settings['dbFilestore'])

    # Fetch observatory status
    obstory_info = db.get_obstory_from_id(obstory_id)
    obstory_status = None
    if obstory_info and ('name' in obstory_info):
        obstory_status = db.get_obstory_status(obstory_name=obstory_info['name'], time=utc_now)
    if not obstory_status:
        log_txt("%s Aborting -- no observatory status available." % log_prefix)
        db.close_db()
        return
    obstory_name = obstory_info['name']

    # Search for background-subtracted time lapse photography within this range
    search = mp.FileRecordSearch(obstory_ids=[obstory_id], semantic_type="meteorpi:timelapse/frame/bgrdSub",
                                 time_min=utc_min, time_max=utc_max, limit=1000000)
    files = db.search_files(search)
    files = files['files']

    # Filter out files where the sky clarity is good and the Sun is well below horizon
    acceptable_files = []
    for f in files:
        if db.get_file_metadata(f.id, 'meteorpi:skyClarity') < 27:
            continue
        if db.get_file_metadata(f.id, 'meteorpi:sunAlt') > -4:
            continue
        acceptable_files.append(f)

    log_msg = ("%s %d still images in search period. %d meet sky quality requirements." %
               (log_prefix, len(files), len(acceptable_files)))

    # If we don't have enough images, we can't proceed to get a secure orientation fit
    if len(acceptable_files) < 6:
        log_txt("%s Not enough suitable images." % log_msg)
        db.close_db()
        return
    log_txt(log_msg)

    # We can't afford to run astrometry.net on too many images, so pick the 20 best ones
    acceptable_files.sort(key=lambda f: db.get_file_metadata(f.id, 'meteorpi:skyClarity'))
    acceptable_files.reverse()
    acceptable_files = acceptable_files[0:20]

    # Make a temporary directory to store files in.
    # This is necessary as astrometry.net spams the cwd with lots of temporary junk
    cwd = os.getcwd()
    pid = os.getpid()
    tmp = "/tmp/dcf21_orientationCalc_%d" % pid
    # log_txt("Created temporary directory <%s>" % tmp)
    os.system("mkdir %s" % tmp)
    os.chdir(tmp)

    # Loop over selected images and use astrometry.net to find their orientation
    fits = []
    fit_list = []
    alt_az_list = []
    count = 0
    for f in acceptable_files:
        img_name = f.file_name
        fit_obj = {'f': f, 'i': count, 'fit': False}
        fits.append(fit_obj)
        filename = db.file_path_for_id(f.id)

        if not os.path.exists(filename):
            log_txt("%s Error! File <%s> is missing!" % (log_prefix, filename))
            continue

        # 1. Copy image into working directory
        os.system("cp %s %s_tmp.png" % (filename, img_name))

        # 2. Barrel-correct image
        os.system("%s %s_tmp.png %.6f %.6f %.6f %s_tmp2.png" % (barrel_correct, img_name,
                                                                obstory_status['lens_barrel_a'],
                                                                obstory_status['lens_barrel_b'],
                                                                obstory_status['lens_barrel_c'],
                                                                img_name))

        # 3. Pass only central portion of image to astrometry.net. It's not very reliable with wide-field images
        d = image_dimensions("%s_tmp2.png" % img_name)
        os.system(
                "convert %s_tmp2.png -colorspace sRGB -define png:format=png24 -crop %dx%d+%d+%d +repage %s_tmp3.png"
                % (img_name,
                   fraction_x * d[0], fraction_y * d[1],
                   (1 - fraction_x) * d[0] / 2, (1 - fraction_y) * d[1] / 2,
                   img_name))

        fit_obj['fname_processed'] = '%s_tmp3.png' % img_name
        fit_obj['fname_original'] = '%s_tmp.png' % img_name
        fit_obj['dims'] = d  # Dimensions of *original* image

        count += 1

    # Now pass processed image to astrometry.net for alignment
    for fit in fits:
        f = fit['f']

        # Check that we've not run out of time
        if utc_must_stop and (mod_log.get_utc() > utc_must_stop):
            log_txt("%s We have run out of time! Aborting." % log_prefix)
            continue

        log_msg = ("Processed image <%s> from time <%s> -- skyClarity=%.1f. " %
                   (f.id, mod_astro.time_print(f.file_time),
                    db.get_file_metadata(f.id, 'meteorpi:skyClarity')))

        # How long should we allow astrometry.net to run for?
        if mod_settings.settings['i_am_a_rpi']:
            timeout = "6m"
        else:
            timeout = "50s"

        # Run astrometry.net. Insert --no-plots on the command line to speed things up.
        astrometry_start_time = mod_log.get_utc()
        estimated_width = 2 * math.atan(math.tan(estimated_image_scale / 2 * deg) * fraction_x) * rad
        os.system("timeout %s /usr/local/astrometry/bin/solve-field --no-plots --crpix-center --scale-low %.1f "
                  "--scale-high %.1f --odds-to-tune-up 1e4 --odds-to-solve 1e7 --overwrite %s > txt"
                  % (timeout,
                     estimated_width * 0.6,
                     estimated_width * 1.2,
                     fit['fname_processed']))
        astrometry_time_taken = mod_log.get_utc() - astrometry_start_time
        log_msg += ("Astrometry.net took %d sec. " % astrometry_time_taken)

        # Parse the output from astrometry.net
        fit_text = open("txt").read()
        # log_txt(fit_text)
        test = re.search(r"\(RA H:M:S, Dec D:M:S\) = \(([\d-]*):(\d\d):([\d.]*), [+]?([\d-]*):(\d\d):([\d\.]*)\)",
                         fit_text)
        if not test:
            log_txt("%s FAIL(POS): %s" % (log_prefix, log_msg))
            continue

        ra_sign = sgn(float(test.group(1)))
        ra = abs(float(test.group(1))) + float(test.group(2)) / 60 + float(test.group(3)) / 3600
        if ra_sign < 0:
            ra *= -1
        dec_sign = sgn(float(test.group(4)))
        dec = abs(float(test.group(4))) + float(test.group(5)) / 60 + float(test.group(6)) / 3600
        if dec_sign < 0:
            dec *= -1
        test = re.search(r"up is [+]?([-\d\.]*) degrees (.) of N", fit_text)
        if not test:
            log_txt("%s FAIL(PA ): %s" % (log_prefix, log_msg))
            continue

        # celestial_pa is the position angle of the upward vector in the centre of the image, counterclockwise
        #  from celestial north.
        # * It is zero if the pole star is vertical above the centre of the image.
        # * If the pole star is in the top-right of an image, expect it to be around -45 degrees.
        celestial_pa = float(test.group(1))
        # * This 180 degree rotation appears because when astrometry.net says "up" it means the bottom of the image!
        celestial_pa += 180
        if test.group(2) == "W":
            celestial_pa *= -1
        while celestial_pa > 180:
            celestial_pa -= 360
        while celestial_pa < -180:
            celestial_pa += 360
        test = re.search(r"Field size: ([\d\.]*) x ([\d\.]*) deg", fit_text)
        if not test:
            log_txt("%s FAIL(SIZ): %s" % (log_prefix, log_msg))
            continue

        # Expand reported size of image to whole image, not just the central tile we sent to astrometry.net
        scale_x = 2 * math.atan(math.tan(float(test.group(1)) / 2 * deg) * (1 / fraction_x)) * rad
        scale_y = 2 * math.atan(math.tan(float(test.group(2)) / 2 * deg) * (1 / fraction_y)) * rad

        # Work out alt-az of reported (RA,Dec) using known location of camera. Fits returned in degrees.
        alt_az = mod_astro.alt_az(ra, dec, fit['f'].file_time,
                                  obstory_status['latitude'], obstory_status['longitude'])

        # Get celestial coordinates of the local zenith
        ra_dec_zenith = mod_astro.get_zenith_position(obstory_status['latitude'],
                                                      obstory_status['longitude'],
                                                      fit['f'].file_time)
        ra_zenith = ra_dec_zenith['ra']
        dec_zenith = ra_dec_zenith['dec']

        # Work out the position angle of the zenith, counterclockwise from north, as measured at centre of frame
        zenith_pa = mod_gnomonic.position_angle(ra, dec, ra_zenith, dec_zenith)

        # Calculate the position angle of the zenith, clockwise from vertical, at the centre of the frame
        # If the camera is roughly upright, this ought to be close to zero!
        camera_tilt = zenith_pa - celestial_pa
        while camera_tilt < -180:
            camera_tilt += 360
        while camera_tilt > 180:
            camera_tilt -= 360

        log_txt("%s PASS     : %s" % (log_prefix, log_msg))
        log_txt("%s FIT      : RA: %7.2fh. Dec %7.2f deg. PA %6.1f deg. ScaleX %6.1f. ScaleY %6.1f. "
                "Zenith at (%.2f h,%.2f deg). PA Zenith %.2f deg. "
                "Alt: %7.2f deg. Az: %7.2f deg. Tilt: %7.2f deg." %
                (log_prefix, ra, dec, celestial_pa, scale_x, scale_y, ra_zenith, dec_zenith, zenith_pa,
                 alt_az[0], alt_az[1], camera_tilt))

        # Store information about fit
        fit.update({'fit': True, 'ra': ra, 'dec': dec, 'pa': celestial_pa, 'sx': scale_x, 'sy': scale_y,
                    'camera_tilt': camera_tilt})
        fit_list.append(fit)
        alt_az_list.append(alt_az)

    # Average the resulting fits
    if len(fit_list) < 4:
        log_txt("%s ABORT    : astrometry.net only managed to fit %2d images." % (log_prefix, len(fit_list)))
        db.close_db()
        os.chdir(cwd)
        os.system("rm -Rf %s" % tmp)
        return

    pa_list = [i['camera_tilt'] * deg for i in fits if i['fit']]
    pa_best = mod_astro.mean_angle(pa_list)[0]
    scale_x_list = [i['sx'] * deg for i in fits if i['fit']]
    scale_x_best = mod_astro.mean_angle(scale_x_list)[0]
    scale_y_list = [i['sy'] * deg for i in fits if i['fit']]
    scale_y_best = mod_astro.mean_angle(scale_y_list)[0]

    # Convert alt-az fits into radians
    alt_az_list_r = [[i * deg for i in j] for j in alt_az_list]
    [alt_az_best, alt_az_error] = mod_astro.mean_angle_2d(alt_az_list_r)

    # Print fit information
    success = (alt_az_error * rad < 0.6)
    if success:
        adjective = "SUCCESSFUL"
    else:
        adjective = "REJECTED"
    log_txt("%s %s ORIENTATION FIT (from %2d images). "
            "Alt: %.2f deg. Az: %.2f deg. PA: %.2f deg. ScaleX: %.2f deg. ScaleY: %.2f deg. "
            "Uncertainty: %.2f deg." % (log_prefix, adjective, len(fit_list),
                                        alt_az_best[0] * rad,
                                        alt_az_best[1] * rad,
                                        pa_best * rad,
                                        scale_x_best * rad,
                                        scale_y_best * rad,
                                        alt_az_error * rad))

    # Update observatory status
    if success:
        user = mod_settings.settings['meteorpiUser']
        utc = utc_to_study
        db.register_obstory_metadata(obstory_name, "orientation_altitude", alt_az_best[0] * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_azimuth", alt_az_best[1] * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_error", alt_az_error * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_pa", pa_best * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_width_x_field", scale_x_best * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_width_y_field", scale_y_best * rad, utc, user)
    db.commit()
    db.close_db()

    # Clean up and exit
    os.chdir(cwd)
    os.system("rm -Rf %s" % tmp)
    return
Beispiel #8
0
def database_import(db):
    # Change into the directory where data files are kept
    cwd = os.getcwd()
    os.chdir(mod_settings.settings['dataPath'])

    # Lists of high water marks, showing where we've previously got up to in importing observations
    hwm_old = {}  # hwm_old[obstory_id] = old "import" high water mark
    hwm_new = {}  # hwm_new[obstory_id] = new "import" high water mark

    # A list of the trigger observation IDs we've created
    trigger_obs_list = {}  # trigger_obs_list[obstory_id][utc] = observation_id

    # A list of the still image observation IDs we've created
    still_img_obs_list = {}

    # Loop over all of the video files and images we've created locally. For each one, we create a new observation
    # object if there are no other files from the same observatory with the same time stamp.

    # We ignore trigger images if there's no video file with the same time stamp.
    for [
            glob_pattern, observation_list, mime_type, obs_type,
            create_new_observations
    ] in [[
            "triggers_vid_processed/*/*.mp4", trigger_obs_list, "video/mp4",
            "movingObject", True
    ],
          [
              "timelapse_img_processed/*/*.png", still_img_obs_list,
              "image/png", "timelapse", True
          ],
          [
              "triggers_img_processed/*/*.png", trigger_obs_list, "image/png",
              "", False
          ]]:

        # Create a list of all the files which match this particular wildcard
        file_list = glob.glob(glob_pattern)
        file_list.sort()
        log_txt(
            "Registering files which match the wildcard <%s> -- %d files." %
            (glob_pattern, len(file_list)))

        # Loop over all the files
        for file_name in file_list:
            file_stub = file_name[:-4]
            utc = mod_log.filename_to_utc(file_name) + 0.01

            # Local images and video all have meta data in a file with a .txt file extension
            meta_file = "%s.txt" % file_stub  # File containing metadata
            meta_dict = mod_daytimejobs.file_to_dict(
                meta_file)  # Dictionary of image metadata
            assert "obstoryId" in meta_dict, "File <%s> does not have a obstoryId set." % file_name

            # Get the ID and name of the observatory that is responsible for this file
            obstory_id = meta_dict["obstoryId"]
            obstory_name = get_obstory_name_from_id(db=db,
                                                    obstory_id=obstory_id)
            if obstory_id not in hwm_old:
                hwm_old[obstory_id] = db.get_high_water_mark(
                    mark_type="import", obstory_name=obstory_name)
                if hwm_old[obstory_id] is None:
                    hwm_old[obstory_id] = 0
                hwm_new[obstory_id] = hwm_old[obstory_id]

            # If this file is older than the pre-existing high water mark for files we've imported, ignore it
            # We've probably already imported it before
            if utc < hwm_old[obstory_id]:
                continue

            print "Registering file <%s>, with obstoryId <%s>." % (file_name,
                                                                   obstory_id)

            # See if we already have an observation with this time stamp. If not, create one
            created_new_observation = False
            if not ((obstory_id in observation_list) and
                    (utc in observation_list[obstory_id])):
                if not create_new_observations:
                    continue
                obs_obj = db.register_observation(obstory_name=obstory_name,
                                                  obs_time=utc,
                                                  obs_type=obs_type,
                                                  user_id=user,
                                                  obs_meta=[])
                obs_id = obs_obj.id
                dict_tree_append(observation_list, [obstory_id, utc], obs_id)
                created_new_observation = True
                print "Created new observation with ID <%s>." % obs_id
            else:
                obs_id = observation_list[obstory_id][utc]

            # Compile a list of metadata objects to associate with this file
            metadata_objs = metadata_to_object_list(db, utc, obs_id, meta_dict)

            # If we've newly created an observation object for this file, we transfer the file's metadata
            # to the observation as well
            if created_new_observation:
                for metadata_obj in metadata_objs:
                    db.set_observation_metadata(user, obs_id, metadata_obj)

            # Import the file itself into the database
            semantic_type = local_filename_to_semantic_type(file_name)
            db.register_file(file_path=file_name,
                             user_id=user,
                             mime_type=mime_type,
                             semantic_type=semantic_type,
                             file_time=utc,
                             file_meta=metadata_objs,
                             observation_id=obs_id)

            # Update this observatory's "import" high water mark to the time of the file just imported
            hwm_new[obstory_id] = max(hwm_new[obstory_id], utc)

    os.chdir(cwd)

    # Now do some housekeeping tasks on the local database

    # Create a status log file for this observatory (so the health of this system can be checked remotely)

    # Use a file in /tmp to record the latest time we created a log file. It contains a unix time.
    last_update_filename = "/tmp/obstoryStatus_last"
    last_update_time = 0
    try:
        last_update_time = float(open(last_update_filename, "r").read())
    except IOError:
        pass
    except OSError:
        pass
    except ValueError:
        pass

    # Only create a new log file if we haven't created one within the past 12 hours
    if mod_log.get_utc() - last_update_time > 12 * 3600:
        # Give the log file a human-readable filename
        log_file_name = "/tmp/obstoryStatus_" + time.strftime(
            "%Y%m%d", time.gmtime(get_utc())) + ".log"
        os.system("./observatoryStatusLog.sh > %s" % log_file_name)

        # Create an observation object to associate with this log file
        logfile_obs = db.register_observation(
            obstory_name=installation_info.local_conf['observatoryName'],
            user_id=user,
            obs_time=mod_log.get_utc(),
            obs_type="logging",
            obs_meta=[])

        # Register the log file in the database and associate it with the observation above
        db.register_file(file_path=log_file_name,
                         user_id=user,
                         mime_type="text/plain",
                         semantic_type="logfile",
                         file_time=get_utc(),
                         file_meta=[],
                         observation_id=logfile_obs.id)

        # Update the local record of when we last created a log file observation
        open(last_update_filename, "w").write("%s" % mod_log.get_utc())

    # Remove old data from the local database, if it is older than the local data lifetime
    db.clear_database(
        obstory_names=[installation_info.local_conf['observatoryName']],
        tmin=0,
        tmax=get_utc() -
        24 * 2400 * installation_info.local_conf['dataLocalLifetime'])

    # Update the "import" high water marks for each obstory_name
    for obstory_id in hwm_new.keys():
        obstory_name = get_obstory_name_from_id(db=db, obstory_id=obstory_id)
        db.set_high_water_mark(obstory_name=obstory_name,
                               mark_type="import",
                               time=hwm_new[obstory_id])

    # Commit our changes to the database
    db.commit()
    os.chdir(cwd)
    return