Exemplo n.º 1
0
def backup_database():
    """Function to automatically back-up the TOM database"""

    config = read_config()

    log = log_utilities.start_day_log(config, config['log_root_name'])

    filetest = path.isfile(config['db_location'])
    dirtest = path.isdir(config['backup_dir'])
    log.info('Checking the DB file is accessible with status ' +
             repr(filetest))
    log.info('Checking the backup directory is accessible with status ' +
             repr(dirtest))

    log.info('Executing rsync command: ' + config['rsync_command'])
    child = subprocess.Popen(config['rsync_command'].split(' '),
                             shell=False,
                             stderr=subprocess.PIPE)
    while True:
        err = child.stderr.readlines()
        for e in err:
            log.info(e)
        if child.poll() != None:
            break

    log.info('Completed DB back-up')
    log_utilities.end_day_log(log)
Exemplo n.º 2
0
def sync_surveys():
    '''Driver function to subscribe to the alerts of microlensing events produced by surveys OGLE, MOA
    and KMTNet.  The function downloads their model parameters and data wherever available. 
    '''

    # Read script configuration:
    config_file_path = '/home/robouser/.robonet_site/surveys_sync.xml'
    config = config_parser.read_config(config_file_path)
    
    log = log_utilities.start_day_log( config, __name__ )
    log.info( 'Started sync of survey data')

    # Harvest parameters of lenses detected by OGLE
    ogle_data = get_ogle_parameters(config, log)


# Sync against database

    # Harvest MOA information
    moa_data = get_moa_parameters(config, log)

# Sync against database

    # Harvest KMTNet information
    # KMTNet are not producing alerts yet
    #get_kmtnet_parameters(config)
    
    log_utilities.end_day_log( log )
Exemplo n.º 3
0
def sync_artemis():
    '''Driver function to maintain an up to date copy of the data on all microlensing events from 
        the ARTEMiS server at Univ. of St. Andrews.
    '''
    
    # Read configuration:
    config_file_path = '/home/robouser/.robonet_site/artemis_sync.xml'
    config = config_parser.read_config(config_file_path)
    log = log_utilities.start_day_log( config, __name__ )
    log.info('Started sync with ARTEMiS server')
    
    # Sync the results of ARTEMiS' own model fits for all events:
    sync_artemis_data_db(config,'model',log)

    # Sync the event parameters published by the surveys from the ARTEMiS server:
    sync_artemis_data_db(config,'pubpars',log)

    # Sync the event photometry data from the ARTEMiS server:
    sync_artemis_data_db(config,'data',log)

    # Sync ARTEMiS' internal fileset, to gain access to the anomaly indicators:
    rsync_internal_data(config)

    # Tidy up and finish:
    log_utilities.end_day_log( log )
Exemplo n.º 4
0
def test_build_cadence_request(simulate=False):
    """Function to verify that an ObsRequest can be submitted to the 
    LCO network"""
    config = {'log_root_name': 'test_lco_interface', 'log_dir': '.'}
    log = log_utilities.start_day_log(config, 'test_lco_interface')

    obs = get_test_obs(simulate=simulate)

    ur = obs.build_cadence_request(log=log, debug=True)

    assert (type(ur) == type({'foo': 'bar'}))
    ur_keys = [
        'group_id', 'observation_type', 'operator', 'ipp_value', 'requests'
    ]
    for key in ur_keys:
        assert (key in ur.keys())
    assert (len(ur['requests']) > 0)


    molecule_keys = ['exposure_time', 'exposure_count', 'defocus', 'filter', \
                    'instrument_name', 'bin_x', 'bin_y']
    window_keys = ['start', 'end']
    location_keys = ['telescope_class', 'site', 'observatory']
    target_keys = ['name', 'ra', 'dec']
    constraints_keys = ['max_airmass', 'min_lunar_distance']

    def check_keys_present(component, expect_keys):
        status = True
        for key in expect_keys:
            if key not in component.keys() or component[key] == None:
                status = False
                print component, key, component[key], status, \
                    (key not in component.keys()), (component[key] == None)
        return status

    for req in ur['requests']:
        assert (len(req['molecules']) > 0)
        for mole in req['molecules']:
            status = check_keys_present(mole, molecule_keys)
            assert (status == True)

        assert (len(req['windows']) > 0)
        for window in req['windows']:
            status = check_keys_present(window, window_keys)
            assert (status == True)

        status = check_keys_present(req['location'], location_keys)
        assert (status == True)

        status = check_keys_present(req['target'], target_keys)
        assert (status == True)

        status = check_keys_present(req['constraints'], constraints_keys)
        assert (status == True)

    log_utilities.end_day_log(log)
    print 'Successful test of observation request build'
Exemplo n.º 5
0
def test_obs_submission(simulate=True):
    """Function to verify that an ObsRequest can be submitted to the 
    LCO network"""
    config = {'log_root_name': 'test_lco_interface', 'log_dir': '.'}

    log = log_utilities.start_day_log(config, 'test_lco_interface')

    obs = get_test_obs(simulate=simulate)
    obs_list = lco_interface.submit_obs_requests([obs], log)

    log_utilities.end_day_log(log)
    print 'Successful test of observation submission'
    return [obs]
Exemplo n.º 6
0
def run_survey():
    """Driver function for the Sinistro survey package"""
    
    # Parse script configuration
    fconfig = 'survey_config.xml'
    lconfigsdir = '.survey'
    (iexec, script_config) = config_parser.readxmlconfig(fconfig,lconfigsdir)
    
    # Start logging
    log = log_utilities.start_day_log( script_config, 'sinistro_survey_obs' )

    # Check for clashing ongoing processes for which the logs might 
    # become corrupted if this script runs at the same time.  
    # If not present, create a lock to prevent other crashes.
    lock( script_config, 'check', log )
    lock( script_config, 'lock', log )
    
    # Read targetlist and observation configurations
    target_fields = read_target_list( script_config, log )
    
    # Check the logs for any pre-existing and still live obs requests:
    existing_obs = log_utilities.read_active_survey_obs( script_config, log )
    
    # Build observing requests and submit, excluding any fields for which
    # live observation requests should already be in the scheduler:
    obsrecord = log_utilities.start_obs_record( script_config )
    for target_name, field in target_fields.items():
        
        if field.name not in existing_obs.keys():
            field.build_odin_request( script_config, log=log, debug=False )
            log.info('Built observation request ' + field.group_id)
            
            field.submit_request(script_config, log=log, debug=False)
            
            log.info('    => Status: ' + repr(field.submit_status) + \
                                ': ' + repr(field.submit_response))
            obsrecord.write( field.obs_record( script_config ) )
            existing_obs[field.name] = field
        else:
            log.info('Existing live observation request for field ' + \
                field.name + ' - no additional request made')
    obsrecord.close()
    
    # Record active obs groups in the ActiveSurvey log:
    log_utilities.write_active_survey_obs( existing_obs, script_config, log )
    
    # Tidy up and finish:
    log.info('Finished requesting observations')
    lock( script_config, 'unlock', log )
    log_utilities.end_day_log( log )
Exemplo n.º 7
0
def test_cron():
    
    fileobj = open('/home/robouser/cron_test.dat','w')
    t = datetime.utcnow()
    fileobj.write( t.strftime("%Y-%m-%dT%H:%M:%S") + '\n' )
    fileobj.write('Completed imports\n')
    fileobj.flush()
    
    config = { 'k2_footprint_data': \
                '/home/robouser/Software/robonet_site/data/k2-footprint.json',
               'xsuperstamp_target_data': \
               '/home/robouser/Software/robonet_site/data/xsuperstamp_targets.json', 
               'ddt_target_data': \
               '/home/robouser/Software/robonet_site/data/c9-ddt-targets-preliminary.csv',
               'tmp_location': \
               '/science/robonet/rob/Operations/ExoFOP', \
               'k2_campaign': 9, \
               'k2_year': 2016, \
               'log_directory': '/science/robonet/rob/Operations/ExoFOP', \
               'log_root_name': 'test_cron'
              }
    fileobj.write('Set up config\n')
    log = log_utilities.start_day_log( config, __name__, console=False )
    fileobj.write('Started logging\n')
    log.info('Started logging')
    environ['DISPLAY'] = ':99'
    environ['PWD'] = '/science/robonet/rob/Operations/ExoFOP/'
    chdir(environ['PWD'])
    log.info(str(environ.items()))
    fileobj.write( str(environ.items())+ '\n')
    
    #pkg_path = '/opt/anaconda.2.5.0/bin/K2onSilicon'
    #chdir('/science/robonet/rob/Operations/ExoFOP')
    #target_file = 'target.csv'
    #output_file = '/science/robonet/rob/Operations/ExoFOP/targets_siliconFlag.cav'
    comm = '/opt/anaconda.2.5.0/bin/K2onSilicon /science/robonet/rob/Operations/ExoFOP/target.csv 9'
    #( iexec, coutput ) = getstatusoutput( pkg_path + ' ' + target_file + \
     #               ' ' + str(config['k2_campaign']) )
    ( iexec, coutput ) = getstatusoutput( comm )
    log.info(coutput + '\n')
    log.info('Loaded K2 data')
    
    fileobj.write(coutput + '\n')
    fileobj.write('Loaded K2 Campaign data\n')   
    fileobj.flush() 
    
    fileobj.close()
    log_utilities.end_day_log( log )
Exemplo n.º 8
0
def rtmodel_subscriber(log=None, renamed=None):
    """Function to download the parameters of events modeled by RTModel"""
    
    # Read configuration:
    config_file_path = path.join(path.expanduser('~'),
                                 '.robonet_site', 'rtmodel_sync.xml')
    config = config_parser.read_config(config_file_path)
    
    if log == None:
        use_given_log = True
        log = log_utilities.start_day_log( config, __name__ )
        log.info('Started sync with RTmodel server')
    else:
        use_given_log = False
        
    # Scrape the list of events which have been modeled from the 
    # top level site:
    events_list = get_events_list( config, log )
    
    # Loop over all events, harvest the parameters of the best fit
    # for each one:
    rtmodels = {}
    for event_id in events_list:
        model = get_event_params( config, event_id, log )
        if renamed != None and model.event_name in renamed.keys():
            model.event_name = renamed[model.event_name]
            log.info('-> Switched name of event renamed by ARTEMiS to '+\
                             model.event_name)
        rtmodels[model.event_name] = model
        log.info( model.summary() )
        
    # Tidy up and finish:
    if use_given_log == False:
        log_utilities.end_day_log( log )
    
    return rtmodels    
def simulate_grid_models( params ):
    """Function to drive a simulation of a grid of microlensing models
    spanning user-defined ranges in u0, tE, phi, Vbase, rho).
    
    For each grid point, the simulation generates two lightcurves: 
    one FSPL including annual parallax
    one FSPL including annual + satellite parallax
    with datapoints which reflect the photometric precision likely from 
    a 1m telescope on Earth and the Swift satellite.
    """
    
    log = log_utilities.start_day_log( params, 'grid_sim' )
    log.info( 'Starting grid simulation' )
    
    grid = construct_grid( params, log )
    n_grid = str(len(grid))
    log.info( 'Processing grid of ' + n_grid + ' grid points' )
    par_grid = start_par_grid_log( params )
    
    # Grid point parameter list: [ml,dl,um,te,phi,Vbase,rho]
    for g, grid_point in enumerate(grid):
        event = mulens_class.MicrolensingEvent()
        event.u_min = 0.0
        event.u_offset = grid_point[2]
        event.t_E = TimeDelta((grid_point[3] * 24.0 * 3600.0),format='sec')
        event.phi = ( grid_point[4] * np.pi ) / 180.0
        event.mag_base = grid_point[5]
        event.rho = grid_point[6]
        event.M_L = constants.M_sun * grid_point[0]
        event.D_L = constants.pc * grid_point[1]
        event.D_S = constants.pc * params['source_distance']
        event.RA = '17:57:34.0'
        event.Dec = '-29:13:15.0'
        event.t_o = Time('2015-06-15T15:00:00', format='isot', scale='utc')
        event.get_earth_perihelion()
        
        log.info( 'Computing for grid point parameters (' + str(g+1) + \
                ' out of ' + n_grid + '):' )
        log.info( 'M_L='+str(grid_point[0])+' D_L='+str(grid_point[1])+\
                  ' u_offset='+str(grid_point[2])+' tE='+str(grid_point[3])+\
                  ' phi='+str(grid_point[4])+' Vbase='+str(grid_point[5])+\
                  ' rho'+str(grid_point[6]) )
        log.info( '-> Time of Earth perihelion for event year: ' + event.t_p.value )
        
        # Compute lens essential parameters, initially with a uniform cadence
        # lightcurve, to ensure that data are taken at the point of closest 
        # approach.  This is then used to determine u_o.  
        event.calc_D_lens_source()
        log.info( '-> calculated the projected separation of lens and source' )
        event.calc_einstein_radius()
        log.info( '-> computed the Einstein radius' )
        event.gen_event_timeline()
        log.info( '-> generated the event timeline' )
        event.calc_source_lens_rel_motion()
        event.calc_proj_observer_pos(parallax=True,satellite=False)
        event.calc_parallax_impact_param(set_uo=True)
        log.info( '-> ' + event.summary(inc_uo=True) )
        log.info( '-> built lensing event object' )
        
        # Re-generate the event time line etc using the cadence
        # requested for the Earth-based lightcurve:
        event.gen_event_timeline(cadence=params['cadence'], \
                                    lc_length=params['lc_length'])                                
        event.calc_source_lens_rel_motion()
        event.calc_proj_observer_pos(parallax=True,satellite=False)
        event.calc_parallax_impact_param()
        log.info( '-> generated the event timeline with observing cadence' )
        #event.plot_lens_plane_motion(params)
        
        # For ease of handling later, a copy of the basic event
        # is taken and will be used to compute the same event
        # as seen from Swift:
        swift_event = copy.copy( event )
        swift_event_force_obs = copy.copy( event )
        log.info( '-> copied to Swift event objects' )
        
        # Ground-based observer:        
        # Calculate the model lightcurve and datapoints for an FSPL 
        # event including annual parallax:
        event.calc_proj_observer_pos(parallax=True,satellite=False)
        log.info( '-> calculated the projected observer position' )
        event.calc_parallax_impact_param()
        log.info( '-> calculated the PSPL impact parameter' )
        log.info(' -> u_o = ' + str(event.u_o) )
        event.calc_magnification(model='fspl')
        log.info( '-> calculated the magnification as a function of time' )
        event.simulate_data_points(model='fspl', phot_precision='1m')
        log.info( '-> Simulated ground-based model and data' )
        
        # Swift observer - coincident observation timing: 
        swift_event.swift_t = event.t[0]
        swift_event.calc_proj_observer_pos(parallax=True,satellite=True,debug=True)
        log.info( '-> calculated the projected observer position' )
        swift_event.calc_parallax_impact_param()
        log.info( '-> calculated the PSPL impact parameter' )
        swift_event.calc_magnification(model='fspl')
        log.info( '-> calculated the magnification as a function of time' )
        swift_event.simulate_data_points(model='fspl', \
                            phot_precision='swift', window=0.83, interval=1.6,log=log)
        log.info( '-> Simulated Swift model and data' )
        
        # Swift observer - forced observations around the peak: 
        swift_event_force_obs.swift_t = event.t[0]
        swift_event_force_obs.calc_proj_observer_pos(parallax=True,satellite=True)
        log.info( '-> calculated the projected observer position' )
        swift_event_force_obs.calc_parallax_impact_param()
        log.info( '-> calculated the PSPL impact parameter' )
        swift_event_force_obs.calc_magnification(model='fspl')
        log.info( '-> calculated the magnification as a function of time' )
        swift_event_force_obs.simulate_data_points(model='fspl', \
                            phot_precision='swift', window=0.83, interval=1.6,\
                            force_t0_obs=True,log=log)
        log.info( '-> Simulated Swift model and data' )
        
        # Record information on the current grid point:
        par_grid.write(str(grid_point[0])+' '+str(grid_point[1])+' '+\
                    str(grid_point[3])+' '+str(grid_point[4])+' '+\
                    str(grid_point[6])+' '+str(grid_point[5])+' '+\
                    str(event.u_o)+' '+str(grid_point[2])+' '+\
                    str(event.u_min)+' '+str(event.u_t.min())+' '+\
                    str(swift_event.u_t.min())+' '+\
                    str(swift_event_force_obs.u_t.min())+'\n')
        par_grid.flush()
        
        # Output data lightcurves:
        file_path = path.join( params['output_path'], \
                        event.root_file_name()+'_earth.dat' )
        event.output_data( file_path )
        file_path = path.join( params['output_path'], \
                    event.root_file_name()+'_swift.dat' )
        swift_event.output_data( file_path )
        file_path = path.join( params['output_path'], \
                    event.root_file_name()+'_swift_forced_obs.dat' )
        swift_event_force_obs.output_data( file_path )
        
        # Output model lightcurves:
        file_path = path.join(params['output_path'], \
                        event.root_file_name()+'_earth.model' )
        event.output_model( file_path, model='fspl' )
        file_path = path.join( params['output_path'], \
                    event.root_file_name()+'_swift.model' )
        swift_event.output_model( file_path, model='fspl' )
        file_path = path.join( params['output_path'], \
                    event.root_file_name()+'_swift_forced_obs.model' )
        swift_event_force_obs.output_model( file_path, model='fspl' )
        log.info( '-> Completed output' )
        
        #else:
        #    log.info( '-> u_o too large to be interesting, skipping' )
        
    log.info( 'Completed simulation' )
    par_grid.close()
    log_utilities.end_day_log( log )
Exemplo n.º 10
0
            else:

                break


def decompress_data(output_directory):

    os.system('funpack ' + output_directory + '*.fz')
    os.system('rm ' + output_directory + '*.fz')


if __name__ == '__main__':

    config = {
        'log_directory': '/data/romerea/data/logs/2017',
        'log_root_name': 'data_harvester'
    }
    logger = log_utilities.start_day_log(config,
                                         'data_collection',
                                         console=False)

    download_needed_data('/data/romerea/data/images/incoming/Scripts/',
                         'Harverst_Conf.txt',
                         '/data/romerea/data/images/incoming/Scripts/',
                         'Already_Download_List.txt',
                         '/data/romerea/data/images/incoming/', logger)
    logger.info('Download data success!')
    decompress_data('/data/romerea/data/images/incoming/')
    logger.info('Decompress data success!')