def main(): """ The main config-writer routine """ # Generate rock priors rockpriormu, rockpriorcov = compute_rock_priors(config_petrofn) # Read in grav data and convert to the standard format. # For Hugo's data, divide by 10 to convert from um/s^2 to mgal. gravdata = pd.read_csv(gravdata_fn, names=sensor_colnames, dtype=float, skiprows=1) gravdata.val /= 10.0 # Read in mag data and convert to the standard format. magdata = pd.read_csv(magdata_fn, names=sensor_colnames, dtype=float, skiprows=1) # Read in field observation data and convert to the standard format. fielddata = convert_ground_truth(fielddata_fn) # Fill remaining fields in config_params config_params.update({ 'grav_data': gravdata, 'mag_data': magdata, 'field_data': fielddata, 'rockpriormu': rockpriormu, 'rockpriorcov': rockpriorcov, }) write_config(**config_params)
def main(): """ The main config-writer routine """ # Generate rock priors generate_rock_data(layer_rockprops, source_petrofn, config_petrofn) rockpriormu, rockpriorcov = compute_rock_priors(config_petrofn) # Read prospector NPZ prospect = np.load('synthetic.npz') gravsynth = prospect['gravReadings'][0] gravsynth -= np.mean(gravsynth) # gravsynth += 0.05*np.std(gravsynth)*np.random.normal(size=gravsynth.shape) with open('gravsynth.csv', 'w') as csvfile: csvfile.write( pd.DataFrame(gravsynth).to_csv(index=False, header=False)) magsynth = prospect['magReadings'][0] magsynth -= np.mean(magsynth) # magsynth += 0.05*np.std(magsynth)*np.random.normal(size=magsynth.shape) with open('magsynth.csv', 'w') as csvfile: csvfile.write(pd.DataFrame(magsynth).to_csv(index=False, header=False)) # Read in grav data and convert to the standard format. # For Hugo's data, divide by 10 to convert from um/s^2 to mgal. gravdata = pd.read_csv(gravdata_fn, names=sensor_colnames, dtype=float, skiprows=1) gravdata.val /= 10.0 # Read in mag data and convert to the standard format. magdata = pd.read_csv(magdata_fn, names=sensor_colnames, dtype=float, skiprows=1) # Read in field observation data and convert to the standard format. fielddata = generate_field_data(layer_rockprops, fielddata_fn, fieldsrc_fn) return # Fill remaining fields in config_params config_params.update({ 'grav_data': gravdata, 'mag_data': magdata, 'field_data': fielddata, 'rockpriormu': rockpriormu, 'rockpriorcov': rockpriorcov, }) write_config(**config_params)
session_id = out['s'][0] if plus_one: session_id += 1 return '{}_{}'.format(out['sessionHost'][0], session_id) with open(weights_list_path, 'r') as fn: weights_list = json.load(fn) config_mapping = (dict() if not os.path.exists(config_mapping_path) else load_pickles(config_mapping_path)) for weights in weights_list: logger.info('Running with weights: {}'.format(weights)) update_cfg_path = os.path.join(sims_fbs_config_path, 'updated.cfg') write_config(weights, update_cfg_path) next_session_id = get_latest_sessionid() logger.info('Running opsim with session ID: {}'.format(next_session_id)) repo.git.checkout('-b', 'weights/{}'.format(next_session_id)) try: execute(['./run_opsim.sh {} "{}"'.format(run_dir, opsim_flags)]) except Exception as e: repo.git.branch('-d', 'weights/{}'.format(next_session_id)) repo.git.checkout('batchrun') logger.error('Error occurred in running feature based scheduler, ' 'please check opsim log files') logger.error(e) if os.path.exists(update_cfg_path): logger.error('Error occurred, removing updated.cfg') os.remove(update_cfg_path)
def test_write_config(): """ Unit test of write_config. """ write_config(**config_params)