Example #1
0
def camera_on():
    log_txt("Turning camera on.")
    GPIO.setwarnings(False)
    GPIO.setmode(GPIO.BOARD)
    GPIO.setup(installation_info.local_conf['gpioPinRelay'], GPIO.OUT)
    GPIO.output(installation_info.local_conf['gpioPinRelay'],
                installation_info.local_conf['relayOnGPIOState'])
Example #2
0
def run_job_group(job_group):
    if len(job_group) < 1:
        return

    # Run shell commands associated with this group of jobs
    shell_cmds = []
    for job in job_group:
        try:
            shell_cmd = " ".join((job['cmd'] % job['params']).split())
        except KeyError:
            log_txt("Key Error prevented job from running: %s" % job)
            shell_cmd = "true"
        shell_cmds.append(shell_cmd)
    for cmd in shell_cmds:
        print "Running command: %s" % cmd
    if len(shell_cmds) == 1:
        cmd = shell_cmds[0]
    else:
        cmd = " & ".join(shell_cmds) + " & wait"
    os.system(cmd)

    # Cascade metadata from input files to output files
    for job in job_group:
        m = job['params']  # Dictionary of metadata
        products = glob.glob("%(filename_out)s*%(outExt)s" % m)
        for product in products:
            stub = product[:-len(m['outExt'])]
            metadata = m[
                'metadata']  # Metadata that was associated with input file
            metadata.update(
                mod_daytimejobs.file_to_dict(in_filename="%stxt" % stub))
            mod_daytimejobs.dict_to_file(out_filename="%stxt" % stub,
                                         in_dict=metadata)
Example #3
0
def camera_off():
    log_txt("Turning camera off.")
    GPIO.setwarnings(False)
    GPIO.setmode(GPIO.BOARD)
    GPIO.setup(installation_info.local_conf['gpioPinRelay'], GPIO.OUT)
    GPIO.output(installation_info.local_conf['gpioPinRelay'],
                not installation_info.local_conf['relayOnGPIOState'])

    # Some relays need 5V, and the 3.3V generated by a Pi isn't enough to switch them off. But setting them as an input does the trick...
    GPIO.setup(installation_info.local_conf['gpioPinRelay'], GPIO.IN)
Example #4
0
def day_time_jobs_clean(db):
    log_txt("Running daytimeJobsClean")
    cwd = os.getcwd()
    os.chdir(mod_settings.settings['dataPath'])

    obstory_list = db.get_obstory_names()

    for obstory_name in obstory_list:
        log_txt("Working on observatory <%s>" % obstory_name)

        # Clean up any file products which are newer than high water mark
        # Work on each task group in turn
        for taskGroup in mod_daytimejobs.dayTimeTasks:
            hwm_output = taskGroup[0]
            log_txt("Cleaning up products of task group <%s>" % hwm_output)
            task_list = taskGroup[2]
            if db.get_high_water_mark(obstory_name=obstory_name,
                                      mark_type=hwm_output) is None:
                db.set_high_water_mark(obstory_name=obstory_name,
                                       mark_type=hwm_output,
                                       time=0)
            high_water = db.get_high_water_mark(obstory_name=obstory_name,
                                                mark_type=hwm_output)
            for task in task_list:
                out_dirs = task[1]

                # Remove any output which is newer than HWM
                for out_dir in out_dirs:
                    for dir_name, subdir_list, file_list in os.walk(out_dir):
                        for f in file_list:
                            utc = mod_log.filename_to_utc(f)
                            if utc < 0:
                                continue
                            if utc > high_water:
                                os.system("rm -f %s" %
                                          os.path.join(dir_name, f))

    os.chdir(cwd)
    log_txt("Finished daytimeJobsClean")
Example #5
0
def export_data(db, utc_now, utc_must_stop=0):
    log_txt("Starting export of images and events")

    # Work out how long we can do exporting for
    utc_stop = get_utc() + (utc_must_stop - utc_now)

    # Search for items which need exporting
    for export_config in db.get_export_configurations():
        if export_config.enabled:
            db.mark_entities_to_export(export_config)
    db.commit()

    # Create an exporter instance
    exporter = MeteorExporter(db=db)

    # Loop until either we run out of time, or we run out of files to export
    max_failures = 4
    fail_count = 0
    while ((not utc_must_stop) or
           (time.time() < utc_stop)) and (fail_count < max_failures):
        state = exporter.handle_next_export()
        db.commit()
        if not state:
            log_txt("Finished export of images and events")
            break
        print "Export status: %s" % state.state
        if state.state == "failed":
            log_txt("Backing off, because an export failed")
            time.sleep([30, 300, 600, 1200, 2400][fail_count])
            fail_count += 1
        else:
            fail_count = 0

    # Exit
    if fail_count >= max_failures:
        log_txt("Exceeded maximum allowed number of failures: giving up.")
Example #6
0
def get_gps_fix():
    log_txt("Waiting for GPS link")

    # Run gpsFix.py, which returns JSON output to stdout
    cmd_ = os.path.join(mod_settings.settings['pythonPath'], "gpsFix.py")
    gps_process = subprocess.Popen(cmd_, shell=True, stdout=subprocess.PIPE)
    gps_fix_json = gps_process.stdout.read()
    try:
        gps_result = json.loads(gps_fix_json)
    except ValueError:
        log_txt("Could not read valid JSON response from gpsFix.py")
        gps_result = False

    # If true, we get a structure with fields "offset", "latitude" and "longitude"
    if isinstance(gps_result, dict):
        t_offset = gps_result['offset']
        gps_latitude = gps_result['latitude']
        gps_longitude = gps_result['longitude']
        gps_altitude = gps_result['altitude']
        log_txt("GPS link achieved")
        log_txt(
            "Longitude = %.6f ; Latitude = %.6f ; Altitude = %.6f ; Clock offset: %.2f sec behind."
            % (gps_longitude, gps_latitude, gps_altitude, t_offset))
        set_utc_offset(t_offset)

        # Use the time shell command to update the system clock (required root access)
        log_txt("Trying to update system clock")
        utc_now = get_utc()
        os.system("date -s @%d" % utc_now)

        # Because the above may fail if we don't have root access, as a fallback we recalculate the clock offset
        t_offset = utc_now - time.time()
        set_utc_offset(t_offset)
        log_txt(
            "Revised clock offset after trying to set the system clock: %.2f sec behind."
            % t_offset)

        return {
            'latitude': gps_latitude,
            'longitude': gps_longitude,
            'altitude': gps_altitude
        }

    # If false, we didn't manage to establish a GPS link
    else:
        log_txt("Gave up waiting for a GPS link")
        return None
Example #7
0
from mod_log import log_txt, get_utc, get_utc_offset, set_utc_offset
import mod_settings
import installation_info
import mod_hardwareProps

if mod_settings.settings['i_am_a_rpi']:
    import mod_relay

obstory_id = installation_info.local_conf['observatoryId']

db = meteorpi_db.MeteorDatabase(mod_settings.settings['dbFilestore'])
hw = mod_hardwareProps.HardwareProps(
    os.path.join(mod_settings.settings['pythonPath'], "..",
                 "sensorProperties"))

log_txt("Camera controller launched")

# Make sure we have created the directory structure where observations live
os.system("mkdir -p %s/rawvideo" % mod_settings.settings['dataPath'])


# Spawn a separate process and run <gpsFix.py>. If we have a USB GPS dongle attached, this may tell us the time
# and our location. If it does, return this, otherwise return None
def get_gps_fix():
    log_txt("Waiting for GPS link")

    # Run gpsFix.py, which returns JSON output to stdout
    cmd_ = os.path.join(mod_settings.settings['pythonPath'], "gpsFix.py")
    gps_process = subprocess.Popen(cmd_, shell=True, stdout=subprocess.PIPE)
    gps_fix_json = gps_process.stdout.read()
    try:
Example #8
0
import orientationCalc
from mod_log import log_txt, get_utc

pid = os.getpid()
db = meteorpi_db.MeteorDatabase(mod_settings.settings['dbFilestore'])

# User should supply unix time on commandline at which we are to stop work
if len(sys.argv) != 3:
    print "Need to call daytimeJobs.py with clock offset, and an end time to tell it when it needs to quit by."
    sys.exit(1)

utc_offset = float(sys.argv[1])
quit_time = float(sys.argv[2])
mod_log.set_utc_offset(utc_offset)

log_txt("Running daytimeJobs. Need to quit at %s." %
        mod_astro.time_print(quit_time))

# Clean up any output files which are ahead of high water marks
log_txt("Cleaning up any output files which are ahead of high water marks")
daytimeJobsClean.day_time_jobs_clean(db)

# Change into the directory where data files are kept
cwd = os.getcwd()
os.chdir(mod_settings.settings['dataPath'])


# Run a list of shell commands in parallel
# Pass a list of job descriptor dictionaries, each having a cmd template, and a dictionary of params to substitute
def run_job_group(job_group):
    if len(job_group) < 1:
        return
Example #9
0
def orientation_calc(obstory_id, utc_to_study, utc_now, utc_must_stop=0):
    log_prefix = "[%12s %s]" % (obstory_id, mod_astro.time_print(utc_to_study))

    log_txt("%s Starting calculation of camera alignment" % log_prefix)

    # Mathematical constants
    deg = math.pi / 180
    rad = 180 / math.pi

    # This is an estimate of the *maximum* angular width we expect images to have.
    # It should be within a factor of two of correct!
    estimated_image_scale = installation_info.local_conf['estimated_image_scale']

    # When passing images to astrometry.net, only work on the central portion, as this will have least bad distortion
    fraction_x = 0.4
    fraction_y = 0.4

    # Path the binary barrel-correction tool
    barrel_correct = os.path.join(mod_settings.settings['stackerPath'], "barrel")

    # Calculate time span to use images from
    utc_min = utc_to_study
    utc_max = utc_to_study + 3600 * 24
    db = meteorpi_db.MeteorDatabase(mod_settings.settings['dbFilestore'])

    # Fetch observatory status
    obstory_info = db.get_obstory_from_id(obstory_id)
    obstory_status = None
    if obstory_info and ('name' in obstory_info):
        obstory_status = db.get_obstory_status(obstory_name=obstory_info['name'], time=utc_now)
    if not obstory_status:
        log_txt("%s Aborting -- no observatory status available." % log_prefix)
        db.close_db()
        return
    obstory_name = obstory_info['name']

    # Search for background-subtracted time lapse photography within this range
    search = mp.FileRecordSearch(obstory_ids=[obstory_id], semantic_type="meteorpi:timelapse/frame/bgrdSub",
                                 time_min=utc_min, time_max=utc_max, limit=1000000)
    files = db.search_files(search)
    files = files['files']

    # Filter out files where the sky clarity is good and the Sun is well below horizon
    acceptable_files = []
    for f in files:
        if db.get_file_metadata(f.id, 'meteorpi:skyClarity') < 27:
            continue
        if db.get_file_metadata(f.id, 'meteorpi:sunAlt') > -4:
            continue
        acceptable_files.append(f)

    log_msg = ("%s %d still images in search period. %d meet sky quality requirements." %
               (log_prefix, len(files), len(acceptable_files)))

    # If we don't have enough images, we can't proceed to get a secure orientation fit
    if len(acceptable_files) < 6:
        log_txt("%s Not enough suitable images." % log_msg)
        db.close_db()
        return
    log_txt(log_msg)

    # We can't afford to run astrometry.net on too many images, so pick the 20 best ones
    acceptable_files.sort(key=lambda f: db.get_file_metadata(f.id, 'meteorpi:skyClarity'))
    acceptable_files.reverse()
    acceptable_files = acceptable_files[0:20]

    # Make a temporary directory to store files in.
    # This is necessary as astrometry.net spams the cwd with lots of temporary junk
    cwd = os.getcwd()
    pid = os.getpid()
    tmp = "/tmp/dcf21_orientationCalc_%d" % pid
    # log_txt("Created temporary directory <%s>" % tmp)
    os.system("mkdir %s" % tmp)
    os.chdir(tmp)

    # Loop over selected images and use astrometry.net to find their orientation
    fits = []
    fit_list = []
    alt_az_list = []
    count = 0
    for f in acceptable_files:
        img_name = f.file_name
        fit_obj = {'f': f, 'i': count, 'fit': False}
        fits.append(fit_obj)
        filename = db.file_path_for_id(f.id)

        if not os.path.exists(filename):
            log_txt("%s Error! File <%s> is missing!" % (log_prefix, filename))
            continue

        # 1. Copy image into working directory
        os.system("cp %s %s_tmp.png" % (filename, img_name))

        # 2. Barrel-correct image
        os.system("%s %s_tmp.png %.6f %.6f %.6f %s_tmp2.png" % (barrel_correct, img_name,
                                                                obstory_status['lens_barrel_a'],
                                                                obstory_status['lens_barrel_b'],
                                                                obstory_status['lens_barrel_c'],
                                                                img_name))

        # 3. Pass only central portion of image to astrometry.net. It's not very reliable with wide-field images
        d = image_dimensions("%s_tmp2.png" % img_name)
        os.system(
                "convert %s_tmp2.png -colorspace sRGB -define png:format=png24 -crop %dx%d+%d+%d +repage %s_tmp3.png"
                % (img_name,
                   fraction_x * d[0], fraction_y * d[1],
                   (1 - fraction_x) * d[0] / 2, (1 - fraction_y) * d[1] / 2,
                   img_name))

        fit_obj['fname_processed'] = '%s_tmp3.png' % img_name
        fit_obj['fname_original'] = '%s_tmp.png' % img_name
        fit_obj['dims'] = d  # Dimensions of *original* image

        count += 1

    # Now pass processed image to astrometry.net for alignment
    for fit in fits:
        f = fit['f']

        # Check that we've not run out of time
        if utc_must_stop and (mod_log.get_utc() > utc_must_stop):
            log_txt("%s We have run out of time! Aborting." % log_prefix)
            continue

        log_msg = ("Processed image <%s> from time <%s> -- skyClarity=%.1f. " %
                   (f.id, mod_astro.time_print(f.file_time),
                    db.get_file_metadata(f.id, 'meteorpi:skyClarity')))

        # How long should we allow astrometry.net to run for?
        if mod_settings.settings['i_am_a_rpi']:
            timeout = "6m"
        else:
            timeout = "50s"

        # Run astrometry.net. Insert --no-plots on the command line to speed things up.
        astrometry_start_time = mod_log.get_utc()
        estimated_width = 2 * math.atan(math.tan(estimated_image_scale / 2 * deg) * fraction_x) * rad
        os.system("timeout %s /usr/local/astrometry/bin/solve-field --no-plots --crpix-center --scale-low %.1f "
                  "--scale-high %.1f --odds-to-tune-up 1e4 --odds-to-solve 1e7 --overwrite %s > txt"
                  % (timeout,
                     estimated_width * 0.6,
                     estimated_width * 1.2,
                     fit['fname_processed']))
        astrometry_time_taken = mod_log.get_utc() - astrometry_start_time
        log_msg += ("Astrometry.net took %d sec. " % astrometry_time_taken)

        # Parse the output from astrometry.net
        fit_text = open("txt").read()
        # log_txt(fit_text)
        test = re.search(r"\(RA H:M:S, Dec D:M:S\) = \(([\d-]*):(\d\d):([\d.]*), [+]?([\d-]*):(\d\d):([\d\.]*)\)",
                         fit_text)
        if not test:
            log_txt("%s FAIL(POS): %s" % (log_prefix, log_msg))
            continue

        ra_sign = sgn(float(test.group(1)))
        ra = abs(float(test.group(1))) + float(test.group(2)) / 60 + float(test.group(3)) / 3600
        if ra_sign < 0:
            ra *= -1
        dec_sign = sgn(float(test.group(4)))
        dec = abs(float(test.group(4))) + float(test.group(5)) / 60 + float(test.group(6)) / 3600
        if dec_sign < 0:
            dec *= -1
        test = re.search(r"up is [+]?([-\d\.]*) degrees (.) of N", fit_text)
        if not test:
            log_txt("%s FAIL(PA ): %s" % (log_prefix, log_msg))
            continue

        # celestial_pa is the position angle of the upward vector in the centre of the image, counterclockwise
        #  from celestial north.
        # * It is zero if the pole star is vertical above the centre of the image.
        # * If the pole star is in the top-right of an image, expect it to be around -45 degrees.
        celestial_pa = float(test.group(1))
        # * This 180 degree rotation appears because when astrometry.net says "up" it means the bottom of the image!
        celestial_pa += 180
        if test.group(2) == "W":
            celestial_pa *= -1
        while celestial_pa > 180:
            celestial_pa -= 360
        while celestial_pa < -180:
            celestial_pa += 360
        test = re.search(r"Field size: ([\d\.]*) x ([\d\.]*) deg", fit_text)
        if not test:
            log_txt("%s FAIL(SIZ): %s" % (log_prefix, log_msg))
            continue

        # Expand reported size of image to whole image, not just the central tile we sent to astrometry.net
        scale_x = 2 * math.atan(math.tan(float(test.group(1)) / 2 * deg) * (1 / fraction_x)) * rad
        scale_y = 2 * math.atan(math.tan(float(test.group(2)) / 2 * deg) * (1 / fraction_y)) * rad

        # Work out alt-az of reported (RA,Dec) using known location of camera. Fits returned in degrees.
        alt_az = mod_astro.alt_az(ra, dec, fit['f'].file_time,
                                  obstory_status['latitude'], obstory_status['longitude'])

        # Get celestial coordinates of the local zenith
        ra_dec_zenith = mod_astro.get_zenith_position(obstory_status['latitude'],
                                                      obstory_status['longitude'],
                                                      fit['f'].file_time)
        ra_zenith = ra_dec_zenith['ra']
        dec_zenith = ra_dec_zenith['dec']

        # Work out the position angle of the zenith, counterclockwise from north, as measured at centre of frame
        zenith_pa = mod_gnomonic.position_angle(ra, dec, ra_zenith, dec_zenith)

        # Calculate the position angle of the zenith, clockwise from vertical, at the centre of the frame
        # If the camera is roughly upright, this ought to be close to zero!
        camera_tilt = zenith_pa - celestial_pa
        while camera_tilt < -180:
            camera_tilt += 360
        while camera_tilt > 180:
            camera_tilt -= 360

        log_txt("%s PASS     : %s" % (log_prefix, log_msg))
        log_txt("%s FIT      : RA: %7.2fh. Dec %7.2f deg. PA %6.1f deg. ScaleX %6.1f. ScaleY %6.1f. "
                "Zenith at (%.2f h,%.2f deg). PA Zenith %.2f deg. "
                "Alt: %7.2f deg. Az: %7.2f deg. Tilt: %7.2f deg." %
                (log_prefix, ra, dec, celestial_pa, scale_x, scale_y, ra_zenith, dec_zenith, zenith_pa,
                 alt_az[0], alt_az[1], camera_tilt))

        # Store information about fit
        fit.update({'fit': True, 'ra': ra, 'dec': dec, 'pa': celestial_pa, 'sx': scale_x, 'sy': scale_y,
                    'camera_tilt': camera_tilt})
        fit_list.append(fit)
        alt_az_list.append(alt_az)

    # Average the resulting fits
    if len(fit_list) < 4:
        log_txt("%s ABORT    : astrometry.net only managed to fit %2d images." % (log_prefix, len(fit_list)))
        db.close_db()
        os.chdir(cwd)
        os.system("rm -Rf %s" % tmp)
        return

    pa_list = [i['camera_tilt'] * deg for i in fits if i['fit']]
    pa_best = mod_astro.mean_angle(pa_list)[0]
    scale_x_list = [i['sx'] * deg for i in fits if i['fit']]
    scale_x_best = mod_astro.mean_angle(scale_x_list)[0]
    scale_y_list = [i['sy'] * deg for i in fits if i['fit']]
    scale_y_best = mod_astro.mean_angle(scale_y_list)[0]

    # Convert alt-az fits into radians
    alt_az_list_r = [[i * deg for i in j] for j in alt_az_list]
    [alt_az_best, alt_az_error] = mod_astro.mean_angle_2d(alt_az_list_r)

    # Print fit information
    success = (alt_az_error * rad < 0.6)
    if success:
        adjective = "SUCCESSFUL"
    else:
        adjective = "REJECTED"
    log_txt("%s %s ORIENTATION FIT (from %2d images). "
            "Alt: %.2f deg. Az: %.2f deg. PA: %.2f deg. ScaleX: %.2f deg. ScaleY: %.2f deg. "
            "Uncertainty: %.2f deg." % (log_prefix, adjective, len(fit_list),
                                        alt_az_best[0] * rad,
                                        alt_az_best[1] * rad,
                                        pa_best * rad,
                                        scale_x_best * rad,
                                        scale_y_best * rad,
                                        alt_az_error * rad))

    # Update observatory status
    if success:
        user = mod_settings.settings['meteorpiUser']
        utc = utc_to_study
        db.register_obstory_metadata(obstory_name, "orientation_altitude", alt_az_best[0] * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_azimuth", alt_az_best[1] * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_error", alt_az_error * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_pa", pa_best * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_width_x_field", scale_x_best * rad, utc, user)
        db.register_obstory_metadata(obstory_name, "orientation_width_y_field", scale_y_best * rad, utc, user)
    db.commit()
    db.close_db()

    # Clean up and exit
    os.chdir(cwd)
    os.system("rm -Rf %s" % tmp)
    return
Example #10
0
def database_import(db):
    # Change into the directory where data files are kept
    cwd = os.getcwd()
    os.chdir(mod_settings.settings['dataPath'])

    # Lists of high water marks, showing where we've previously got up to in importing observations
    hwm_old = {}  # hwm_old[obstory_id] = old "import" high water mark
    hwm_new = {}  # hwm_new[obstory_id] = new "import" high water mark

    # A list of the trigger observation IDs we've created
    trigger_obs_list = {}  # trigger_obs_list[obstory_id][utc] = observation_id

    # A list of the still image observation IDs we've created
    still_img_obs_list = {}

    # Loop over all of the video files and images we've created locally. For each one, we create a new observation
    # object if there are no other files from the same observatory with the same time stamp.

    # We ignore trigger images if there's no video file with the same time stamp.
    for [
            glob_pattern, observation_list, mime_type, obs_type,
            create_new_observations
    ] in [[
            "triggers_vid_processed/*/*.mp4", trigger_obs_list, "video/mp4",
            "movingObject", True
    ],
          [
              "timelapse_img_processed/*/*.png", still_img_obs_list,
              "image/png", "timelapse", True
          ],
          [
              "triggers_img_processed/*/*.png", trigger_obs_list, "image/png",
              "", False
          ]]:

        # Create a list of all the files which match this particular wildcard
        file_list = glob.glob(glob_pattern)
        file_list.sort()
        log_txt(
            "Registering files which match the wildcard <%s> -- %d files." %
            (glob_pattern, len(file_list)))

        # Loop over all the files
        for file_name in file_list:
            file_stub = file_name[:-4]
            utc = mod_log.filename_to_utc(file_name) + 0.01

            # Local images and video all have meta data in a file with a .txt file extension
            meta_file = "%s.txt" % file_stub  # File containing metadata
            meta_dict = mod_daytimejobs.file_to_dict(
                meta_file)  # Dictionary of image metadata
            assert "obstoryId" in meta_dict, "File <%s> does not have a obstoryId set." % file_name

            # Get the ID and name of the observatory that is responsible for this file
            obstory_id = meta_dict["obstoryId"]
            obstory_name = get_obstory_name_from_id(db=db,
                                                    obstory_id=obstory_id)
            if obstory_id not in hwm_old:
                hwm_old[obstory_id] = db.get_high_water_mark(
                    mark_type="import", obstory_name=obstory_name)
                if hwm_old[obstory_id] is None:
                    hwm_old[obstory_id] = 0
                hwm_new[obstory_id] = hwm_old[obstory_id]

            # If this file is older than the pre-existing high water mark for files we've imported, ignore it
            # We've probably already imported it before
            if utc < hwm_old[obstory_id]:
                continue

            print "Registering file <%s>, with obstoryId <%s>." % (file_name,
                                                                   obstory_id)

            # See if we already have an observation with this time stamp. If not, create one
            created_new_observation = False
            if not ((obstory_id in observation_list) and
                    (utc in observation_list[obstory_id])):
                if not create_new_observations:
                    continue
                obs_obj = db.register_observation(obstory_name=obstory_name,
                                                  obs_time=utc,
                                                  obs_type=obs_type,
                                                  user_id=user,
                                                  obs_meta=[])
                obs_id = obs_obj.id
                dict_tree_append(observation_list, [obstory_id, utc], obs_id)
                created_new_observation = True
                print "Created new observation with ID <%s>." % obs_id
            else:
                obs_id = observation_list[obstory_id][utc]

            # Compile a list of metadata objects to associate with this file
            metadata_objs = metadata_to_object_list(db, utc, obs_id, meta_dict)

            # If we've newly created an observation object for this file, we transfer the file's metadata
            # to the observation as well
            if created_new_observation:
                for metadata_obj in metadata_objs:
                    db.set_observation_metadata(user, obs_id, metadata_obj)

            # Import the file itself into the database
            semantic_type = local_filename_to_semantic_type(file_name)
            db.register_file(file_path=file_name,
                             user_id=user,
                             mime_type=mime_type,
                             semantic_type=semantic_type,
                             file_time=utc,
                             file_meta=metadata_objs,
                             observation_id=obs_id)

            # Update this observatory's "import" high water mark to the time of the file just imported
            hwm_new[obstory_id] = max(hwm_new[obstory_id], utc)

    os.chdir(cwd)

    # Now do some housekeeping tasks on the local database

    # Create a status log file for this observatory (so the health of this system can be checked remotely)

    # Use a file in /tmp to record the latest time we created a log file. It contains a unix time.
    last_update_filename = "/tmp/obstoryStatus_last"
    last_update_time = 0
    try:
        last_update_time = float(open(last_update_filename, "r").read())
    except IOError:
        pass
    except OSError:
        pass
    except ValueError:
        pass

    # Only create a new log file if we haven't created one within the past 12 hours
    if mod_log.get_utc() - last_update_time > 12 * 3600:
        # Give the log file a human-readable filename
        log_file_name = "/tmp/obstoryStatus_" + time.strftime(
            "%Y%m%d", time.gmtime(get_utc())) + ".log"
        os.system("./observatoryStatusLog.sh > %s" % log_file_name)

        # Create an observation object to associate with this log file
        logfile_obs = db.register_observation(
            obstory_name=installation_info.local_conf['observatoryName'],
            user_id=user,
            obs_time=mod_log.get_utc(),
            obs_type="logging",
            obs_meta=[])

        # Register the log file in the database and associate it with the observation above
        db.register_file(file_path=log_file_name,
                         user_id=user,
                         mime_type="text/plain",
                         semantic_type="logfile",
                         file_time=get_utc(),
                         file_meta=[],
                         observation_id=logfile_obs.id)

        # Update the local record of when we last created a log file observation
        open(last_update_filename, "w").write("%s" % mod_log.get_utc())

    # Remove old data from the local database, if it is older than the local data lifetime
    db.clear_database(
        obstory_names=[installation_info.local_conf['observatoryName']],
        tmin=0,
        tmax=get_utc() -
        24 * 2400 * installation_info.local_conf['dataLocalLifetime'])

    # Update the "import" high water marks for each obstory_name
    for obstory_id in hwm_new.keys():
        obstory_name = get_obstory_name_from_id(db=db, obstory_id=obstory_id)
        db.set_high_water_mark(obstory_name=obstory_name,
                               mark_type="import",
                               time=hwm_new[obstory_id])

    # Commit our changes to the database
    db.commit()
    os.chdir(cwd)
    return