def setUp(self): project_name = 'Test project' project_dir = './' prj = OQtProject(project_name, project_dir) model_id = 'model01' model = OQtModel(model_id=model_id, name='Test model') prj.add_model(model)
def setUp(self): # # set logging settings fname = './testcomputedoubletruncatedmfd.log' logging.basicConfig(filename=fname, level=logging.DEBUG) # # clear directory folder = os.path.join(BASE_DATA_PATH, './../../tmp/project_test') delete_and_create_project_dir(folder) # # set environment variable self.prj_path = os.path.join(BASE_DATA_PATH, './../../tmp/project_test/test.oqmbtp') os.environ["OQMBT_PROJECT"] = self.prj_path # # create the project path = './../data/project.ini' inifile = os.path.join(BASE_DATA_PATH, path) project_create([inifile, os.path.dirname(self.prj_path)]) # # add to the model the name of the shapefile - the path is relative to # the position of the project file oqtkp = OQtProject.load_from_file(self.prj_path) model_id = 'model01' oqtkp.active_model_id = model_id model = oqtkp.models[model_id] path = './../../data/wf01/shapefiles/test_faults.shp' model.faults_shp_filename = path oqtkp.models[model_id] = model oqtkp.save()
def test_load_fault_01(self): """ Load fault data from a shapefile (only simple faults for the time\ being) """ # # loading the nb_name = 'load_data_from_shapefile_fmg.ipynb' nb_path = './../../../notebooks/sources_shallow_fault/' tmp = os.path.join(BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # loading the project oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # count the number of area sources cnt = 0 for key in list(model.sources.keys()): src = model.sources[key] if re.match('SimpleFaultSource', src.source_type): cnt += 1 # checking the number of sources assert cnt == 6 # checking slip rate values self.assertAlmostEqual(0.01, model.sources['sf395'].slip_rate, 0.01) self.assertAlmostEqual(0.06, model.sources['sf400'].slip_rate, 0.01)
def xml_vs_mfd(source_id, source_model_fname, model_id, oqmbt_project_fname): """ :param str source_id: The ID of the source to be analysed :param str source_model_fname: The name of the xml shapefile :param str model_id: The model ID """ # # read the source_model src_model, info = read(source_model_fname) # # compute total mfd sources tmfd = get_total_mfd(src_model) # # read project oqtkp = OQtProject.load_from_file(oqmbt_project_fname) model_id = oqtkp.active_model_id model = oqtkp.models[model_id] # # get source mfd src = model.sources[source_id] mfd = src.mfd if isinstance(src.mfd, TruncatedGRMFD): mfd = get_evenlyDiscretizedMFD_from_truncatedGRMFD(mfd) # # compute total mfd sources plt.figure(figsize=(10, 8)) plot_mfd_cumulative(tmfd) plot_mfd_cumulative(mfd, title=source_model_fname)
def clean_project_component(model_id, source_type): """ :param model_id: :param source_type: """ # # load project project_pickle_filename = os.environ.get('OQMBT_PROJECT') oqtkp = OQtProject.load_from_file(project_pickle_filename) print('Project: {:s}'.format(oqtkp.name)) # # set model model = oqtkp.models[model_id] # # delete filename if hasattr(model, 'faults_shp_filename'): del model.faults_shp_filename # directories: # - reports # - focal_mech # - hypo_depths # - nrml folder_reports = os.path.join(oqtkp.directory, 'reports') # # hdf5 files: # - completeness.hdf5 # - eqk_rates.hdf5 # - focal_mechanisms.hdf5 # - hypo_close_to_faults.hdf5 - NOT USED # - hypo_depths.hdf5 # - <model>_hypo_dist.hdf5 # - <model>_nodal_plane_dist.hdf5 # # deleting sources keys = list(model.sources.keys()) for key in keys: stype = model.sources[key].source_type if stype == source_type: del model.sources[key] # # delete reports _delete_reports(folder_reports, key) # # remove information about faults if source_type == 'SimpleFaultSource': for src_key in model.sources: if (model.sources[src_key].source_type == 'AreaSource' and hasattr(model.sources[src_key], 'ids_faults_inside')): del model.sources[src_key].ids_faults_inside # # saving model and project oqtkp.models[model_id] = model oqtkp.save()
def project_create(argv): """ This creates a new `oqmbt` project :parameter argv: A list. The first argument contains the path to the folder where the project will be created. The second parameter is the folder (this overrides the `directory` parameter in the `project` section of the .ini file """ ini_filename = argv[0] print('Reading project information from: \n{:s}'.format(ini_filename)) assert os.path.exists(ini_filename) # # reading the .ini file config = ConfigParser(dict_type=AttrDict) config.read(ini_filename) # # set project dir and name if len(argv) > 1: project_dir = argv[1] config._sections.project.directory = project_dir else: project_dir = os.path.abspath(config._sections.project.directory) project_name = config._sections.project.name # # info print('Project directory : {:s}'.format((project_dir))) print('Project name : {:s}'.format((project_name))) # # create a clean project directory delete_and_create_project_dir(project_dir) # # create the project prj = OQtProject(project_name, project_dir) # MN: 'project_filename' assigned but never used project_filename = os.path.join(project_dir, prj._get_filename()) # # create default files create_default_files(prj, project_dir) # # add standard subfolders add_subfolders(project_dir) # # load information for the various models for key in config._sections.keys(): # # search for sections containing model information if re.search('^model', key): model = load_model_info(config, key) prj.add_model(model) # # save the project prj.save()
def handle_change_model(sender): """ """ global model oqmbtp = OQtProject.load_from_file(project_filename) model_id = w_model.value model = oqmbtp.models[model_id] del oqmbtp if len(model.sources.keys()): w_sources.options = model.sources.keys() else: w_sources.options = []
def read_faults(faults_xml_filename=None): """ Reads the information on faults included in an .xml file :parameter faults_xml_filename: The name of the .xml file with the faults """ # # loading project project_pickle_filename = os.environ.get('OQMBT_PROJECT') oqtkp = OQtProject.load_from_file(project_pickle_filename) model_id = oqtkp.active_model_id model = oqtkp.models[model_id] if faults_xml_filename is None: fname = getattr(model, 'faults_xml_filename') faults_xml_filename = os.path.join(oqtkp.directory, fname) # # read .xml file content sources, _ = read(faults_xml_filename) # # save the information for f in sources: # # fixing the id of the fault source sid = str(f.source_id) if not re.search('^fs_', sid): sid = 'fs_{:s}'.format(sid) if isinstance(f, SimpleFaultSource): src = OQtSource(sid, 'SimpleFaultSource') src.trace = f.fault_trace src.msr = f.magnitude_scaling_relationship src.mfd = f.mfd src.rupture_aspect_ratio = f.rupture_aspect_ratio src.trt = f.tectonic_region_type src.dip = f.dip src.upper_seismogenic_depth = f.upper_seismogenic_depth src.lower_seismogenic_depth = f.lower_seismogenic_depth src.name = f.name src.rake = f.rake model.sources[sid] = src else: raise ValueError('Unsupported fault type') # # save the project oqtkp.models[model_id] = model oqtkp.save()
def test_01(self): # # running the notebook nb_name = 'load_geometry_from_shapefile.ipynb' nb_path = './../../../notebooks/sources_area/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # loading the project oqtkp = OQtProject.load_from_file(self.prj_path) model_id = 'model01' oqtkp.active_model_id = model_id model = oqtkp.models[model_id] # # checking the number and the type of sources loaded self.assertEqual(len(model.sources), 2) keys = list(model.sources.keys()) self.assertEqual(model.sources[keys[0]].source_type, 'AreaSource') self.assertEqual(model.sources[keys[1]].source_type, 'AreaSource')
def set_completeness_for_sources(completeness_table, dataset_list): """ :parameter completeness_table: :parameter dataset_list: """ # # load the project project_pickle_filename = os.environ.get('OQMBT_PROJECT') oqtkp = OQtProject.load_from_file(project_pickle_filename) oqtkp.directory = os.path.dirname(project_pickle_filename) model_id = oqtkp.active_model_id # MN: 'model' assigned but never used model = oqtkp.models[model_id] # # closing file filename = os.path.join(oqtkp.directory, oqtkp.compl_hdf5_filename) fhdf5 = h5py.File(filename, 'a') print('Updating {:s}'.format(filename)) # # update/create group (i.e. model) containing the completeness table if model_id in fhdf5.keys(): print(' Group {:s} exists'.format(model_id)) grp = fhdf5[model_id] else: print(' Creating group: %s' % (model_id)) grp = fhdf5.create_group(model_id) # # update/create the dataset containing the completeness table for dataset_name in dataset_list: if dataset_name in grp: del fhdf5[model_id][dataset_name] print(' Updating dataset: %s' % (dataset_name)) # MN: 'dataset' assigned but never used dataset = grp.create_dataset(dataset_name, data=completeness_table) else: print(' Creating dataset: %s' % (dataset_name)) # MN: 'dataset' assigned but never used dataset = grp.create_dataset(dataset_name, data=completeness_table) # # closing the .hdf5 completeness file fhdf5.close()
def setUp(self): # # fname = './testcomputedoubletruncatedGRfromseismicity.log' logging.basicConfig(filename=fname, level=logging.DEBUG) # # clear directory folder = os.path.join(self.BASE_DATA_PATH, './../../tmp/project_test') delete_and_create_project_dir(folder) # # set environment variable self.prj_path = os.path.join(self.BASE_DATA_PATH, './../../tmp/project_test/test.oqmbtp') os.environ["OQMBT_PROJECT"] = self.prj_path # # create the project path = './../data/project.ini' inifile = os.path.join(self.BASE_DATA_PATH, path) project_create([inifile, os.path.dirname(self.prj_path)]) # # add to the model the name of the shapefile - the path is relative to # the position of the project file oqtkp = OQtProject.load_from_file(self.prj_path) model_id = 'model01' oqtkp.active_model_id = model_id model = oqtkp.models[model_id] # # set the shapefile with the geometry of area sources path = './../../notebooks/data/shapefiles/area_sources.shp' model.area_shapefile_filename = path # # set the catalogue name path = './../../notebooks/data/catalogue.csv' model.catalogue_csv_filename = path # # saving the project oqtkp.models[model_id] = model oqtkp.save()
def test_01(self): """ This implements a workflow similar to the one used with FMG """ reports_folder = os.path.join('..', 'tmp', 'project_test', 'reports') # # oqtkp = OQtProject.load_from_file(self.prj_path) model = oqtkp.models['model01'] get_src_ids = GetSourceIDs(model) # # AREA SOURCES # ..................................................................... # running the first notebook that loads the geometry of the sources # from the shapefile nb_name = 'load_geometry_from_shapefile.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # ..................................................................... # catalogue pre-processing nb_name = 'catalogue_pre_processing.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'catalogue') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # checking the creation of the pickled version of the catalogue file_name = 'model01_catalogue.pkl' file_path = os.path.join('..', 'tmp', 'project_test') tmp = os.path.join(self.BASE_DATA_PATH, file_path, file_name) nb_full_path = os.path.abspath(tmp) assert os.path.exists(nb_full_path) # # checking that .hdf5 file exists and contains updated information file_name = 'completeness.hdf5' file_path = os.path.join('..', 'tmp', 'project_test') tmp = os.path.join(self.BASE_DATA_PATH, file_path, file_name) nb_full_path = os.path.abspath(tmp) assert os.path.exists(nb_full_path) # # this is clearly non completely consistent. We should remove the # duplicated thresholds and keep only the ones with the smaller # magnitude f = h5py.File(nb_full_path, 'r') grp = f['/model01'] computed = grp['whole_catalogue'][:] expected = np.array([[1998., 3.5], [1989., 4.0], [1977., 4.5], [1970., 5.0], [1933., 5.5], [1933., 6.0], [1905., 6.5], [1905., 7.0]]) np.testing.assert_equal(expected, computed) f.close() # # ..................................................................... # assign default completeness to all the sources nb_name = 'set_completeness_to_all_area_sources.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # checking that the .hdf5 contains the completeness tables for all the # sources file_name = 'completeness.hdf5' file_path = os.path.join('..', 'tmp', 'project_test') tmp = os.path.join(self.BASE_DATA_PATH, file_path, file_name) nb_full_path = os.path.abspath(tmp) f = h5py.File(nb_full_path, 'r') grp = f['/model01'] computed = grp['1'][:] np.testing.assert_equal(expected, computed) computed = grp['2'][:] np.testing.assert_equal(expected, computed) computed = grp['3'][:] np.testing.assert_equal(expected, computed) f.close() # # ..................................................................... # calculate GR parameters for all the area sources # # loading the project oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # running notebook get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'compute_double_truncated_GR_from_seismicity.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys, reports_folder=reports_folder) # # loading the project del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # check the a and b values computed self.assertAlmostEqual(model.sources['1'].a_gr, 3.7243511906) self.assertAlmostEqual(model.sources['1'].b_gr, 0.636452331875) self.assertAlmostEqual(model.sources['2'].a_gr, 3.69438318983) self.assertAlmostEqual(model.sources['2'].b_gr, 0.674434277192) self.assertAlmostEqual(model.sources['3'].a_gr, 3.32936780717) self.assertAlmostEqual(model.sources['3'].b_gr, 0.6336174742) # # ..................................................................... # upload hypocentral depths # # loading the project del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # running notebook get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'load_hypocentral_depth_distribution_from_csv.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # checking that the .hdf5 contains the completeness tables for all the # sources file_name = 'model01_hypo_dist.hdf5' file_path = os.path.join('..', 'tmp', 'project_test') tmp = os.path.join(self.BASE_DATA_PATH, file_path, file_name) nb_full_path = os.path.abspath(tmp) assert os.path.exists(nb_full_path) # checking values expected = np.zeros(2, dtype=[('depth', 'f4'), ('wei', 'f4')]) expected[0] = (10.0, 0.6) expected[1] = (20.0, 0.4) f = h5py.File(nb_full_path, 'r') computed = f['1'][:] np.testing.assert_array_equal(expected, computed) computed = f['2'][:] np.testing.assert_array_equal(expected, computed) computed = f['3'][:] np.testing.assert_array_equal(expected, computed) f.close() # # ..................................................................... # upload focal mechanism distribution # # loading the project del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # running notebook get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'load_nodal_plane_distribution_from_csv.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # checking that the .hdf5 contains the completeness tables for all the # sources file_name = 'model01_focal_mech_dist.hdf5' file_path = './../tmp/project_test/' tmp = os.path.join(self.BASE_DATA_PATH, file_path, file_name) nb_full_path = os.path.abspath(tmp) assert os.path.exists(nb_full_path) # checking values expected = np.zeros(1, dtype=[('strike', 'f4'), ('dip', 'f4'), ('rake', 'f4'), ('wei', 'f4')]) expected[0] = (0.00, 90.00, 0.00, 1.00) f = h5py.File(nb_full_path, 'r') computed = f['1'][:] np.testing.assert_array_equal(expected, computed) computed = f['2'][:] np.testing.assert_array_equal(expected, computed) computed = f['3'][:] np.testing.assert_array_equal(expected, computed) f.close() # # STRAIN ANALYSIS # ..................................................................... # Computing moment from strain nb_name = 'compute_mo_from_strain.ipynb' nb_path = './../../notebooks/sources_area/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # computing corner magnitude get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'compute_mc_from_mo.ipynb' nb_path = './../../notebooks/tectonics/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # checking del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] thrs = 1e7 self.assertTrue(model.sources['1'].mo_mcs / 8.2392996092e+15 < thrs) self.assertTrue(model.sources['2'].mo_mcs / 1.99901877766e+16 < thrs) self.assertTrue(model.sources['3'].mo_mcs / 1.99901877766e+16 < thrs) self.assertTrue( model.sources['1'].mo_strain / 7.86150975109e+16 < thrs) self.assertTrue( model.sources['2'].mo_strain / 5.29894154843e+16 < thrs) self.assertTrue( model.sources['3'].mo_strain / 8.33252270107e+16 < thrs) # # fixing the MFD for all the area sources get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'set_mfd_tapered_GR.ipynb' nb_path = './../../notebooks/sources_area/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # FAULT SOURCES # ..................................................................... # running the notebook that loads data from nb_name = 'load_data_from_shapefile_fmg.ipynb' nb_path = './../../notebooks/sources_shallow_fault/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # checking the number of fault sources loaded del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] cnt = 0 for key in list(model.sources.keys()): src = model.sources[key] if src.source_type == 'SimpleFaultSource': cnt += 1 assert cnt == 6 # # compute the mfd from slip rate get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['SimpleFaultSource']) nb_name = 'mfd_double_truncated_from_slip_rate_SRC.ipynb' nb_path = './../../notebooks/sources_shallow_fault/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # checking that each fault has an MFD del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] for key in list(model.sources.keys()): src = model.sources[key] if src.source_type == 'SimpleFaultSource': assert hasattr(src, 'mfd') # # ..................................................................... # find the faults inside each area source get_src_ids.reset() get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'find_faults_within_area_source.ipynb' nb_path = './../../notebooks/sources_area/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # checking that each fault has an MFD del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] src = model.sources['1'] self.assertAlmostEqual(src.ids_faults_inside['sf400'], 0.695494958, 2) self.assertAlmostEqual(src.ids_faults_inside['sf399'], 1.0) src = model.sources['2'] self.assertAlmostEqual(src.ids_faults_inside['sf398'], 1.0) self.assertAlmostEqual(src.ids_faults_inside['sf396'], 1.0) src = model.sources['3'] self.assertAlmostEqual(src.ids_faults_inside['sf397'], 1.0) self.assertAlmostEqual(src.ids_faults_inside['sf400'], 0.3045975665, 2) self.assertAlmostEqual(src.ids_faults_inside['sf395'], 0.2386801966, 2) # # ..................................................................... # compute moment nb_name = 'compute_mo_from_mfd.ipynb' nb_path = './../../notebooks/sources/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # checking del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] for key in list(model.sources.keys()): src = model.sources[key] self.assertTrue(hasattr(src, 'mo_from_mfd'))
def runipy_for_sources(project_filename, model_id, notebook_path, src_id_list, wdg_progress='None', report=False, create_pdf=False): opt = '' if report: opt = ' -o' rpt_table = ReportTable() # Running for idx, elem in enumerate(sorted(src_id_list)): # Saving project oqmbtp = OQtProject.load_from_file(project_filename) oqmbtp.active_model_id = model_id oqmbtp.active_source_id = [elem] oqmbtp.save() outdir = oqmbtp.directory del oqmbtp # Running notebook cmd_str = 'runipy %s %s' % (opt, notebook_path) if len(opt) > 0: nb_name = os.path.split(notebook_path)[1] rpt_path, rpt_name = _get_report_path_name(outdir, model_id, elem, nb_name) if not os.path.exists(rpt_path): os.makedirs(rpt_path) # Running notebook out = subprocess.call(cmd_str, shell=True) if out: print(cmd_str) print('src ID:', elem) print('Error in processing the notebook') return 1 if len(opt) > 0: # Converting notebook cmd_str = 'ipython nbconvert --to html %s' % (notebook_path) out = subprocess.call(cmd_str, shell=True) if out: print(cmd_str) print('Error in creating the html report') return 1 # Moving report to folder and rename it path = os.path.dirname(notebook_path) html_filename = os.path.join(rpt_path, rpt_name) in_html = os.path.join(path, os.path.splitext(nb_name)[0]) cmd_str = 'mv %s.html %s' % (in_html, html_filename) out = subprocess.call(cmd_str, shell=True) if out: print('Error in moving the report to the final folder') print('from: %s.html' % (os.path.splitext(nb_name)[0])) print('to:', html_filename) return 1 else: print('Created %s' % (rpt_name)) if create_pdf: pdf_filename = '%s.pdf' % (html_filename.rsplit('.', 1)[0]) cmd_str = 'wkhtmltopdf %s %s' % (html_filename, pdf_filename) out = subprocess.call(cmd_str, shell=True) if out: print('Error in creating the .pdf file') return 1 else: pass # Update reports table path = os.path.relpath(os.path.join(rpt_path, rpt_name)) rpt_table.append_item(path, rpt_name) if wdg_progress is not None: wdg_progress.value = idx if wdg_progress is not None: return rpt_table else: return 0
def automator_gui(filename): """ :parameter project_filename: The name of a puckle file containing an OQtProject """ global w_model, w_sources, w_nb_name, w_nb_type, w_repo, w_progress global project_filename, model global project_dir wdg_list = [] margin = 5 project_filename = filename oqmbtp = OQtProject.load_from_file(project_filename) models = oqmbtp.models.keys() project_dir = oqmbtp.directory w_title = widgets.HTML(value="<h3>Automator<h3>") tmp_str = "Name : %s <br>" % (oqmbtp.name) tmp_str += "Stored in: %s <br><br>" % (project_dir) w_text = widgets.HTML(value=tmp_str) wdg_list.append(w_title) wdg_list.append(w_text) tmp_str = "Warning: the model does not contain sources" w_warn = widgets.HTML(value=tmp_str, visible=False) if len(models): model_id = models[0] model = oqmbtp.models[model_id] w_model = widgets.Dropdown(options=models, description='Model', value=model_id, width=400, margin=margin) if len(model.sources.keys()): # Sources drop down menu tmp_list = sorted(model.sources.keys()) tmp_list.insert(0, 'All') tmp_str = 'Sources' w_sources = widgets.SelectMultiple(options=tmp_list, description=tmp_str, width=200, margin=margin) else: w_sources = widgets.Dropdown(options=[], description='Source') wdg_list.append(w_model) wdg_list.append(w_sources) else: w_warn.visible = True # Notebook type w_nb_type = widgets.Dropdown(options=NB_TYPES, description='Notebook type', width=400, margin=margin) wdg_list.append(w_nb_type) # Notebook name w_nb_name = widgets.Dropdown(options=[], description='Notebook name', width=400, margin=margin) wdg_list.append(w_nb_name) # Report checkbox w_repo = widgets.Checkbox(description='Generate report', value=False) wdg_list.append(w_repo) # Warning wdg_list.append(w_warn) # Button w_butt = widgets.Button(description='Run', width=100, border_color='red') wdg_list.append(w_butt) # Progress bar w_progress = widgets.FloatProgress(value=0.0, min=0.0, step=1, visible=False, description='Processing:') wdg_list.append(w_progress) w_model.on_trait_change(handle_change_model) w_nb_type.on_trait_change(handle_change_nb_type, 'value') w_butt.on_click(handle_run) # Clean variables del oqmbtp return widgets.VBox(children=wdg_list)
def test_01(self): """ This implements a workflow creating area sources without faults """ reports_folder = os.path.join('..', 'tmp', 'project_test', 'reports') # # oqtkp = OQtProject.load_from_file(self.prj_path) model = oqtkp.models['model01'] get_src_ids = GetSourceIDs(model) # # AREA SOURCES # ..................................................................... # running the first notebook that loads the geometry of the sources # from the shapefile nb_name = 'load_geometry_from_shapefile.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) model = oqtkp.models['model01'] # # ..................................................................... # set tectonic-region oqtkp.set_tectonic_region('model01', 'Active Shallow Crust') oqtkp.save() # checking del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] for key in sorted(model.sources): assert model.sources[key].tectonic_region_type == \ 'Active Shallow Crust' oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # ..................................................................... # catalogue pre-processing nb_name = 'catalogue_pre_processing.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'catalogue') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # ..................................................................... # assign default completeness to all the sources nb_name = 'set_completeness_to_all_area_sources.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # ..................................................................... # calculate GR parameters for all the area sources # # loading the project oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # running notebook get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'compute_double_truncated_GR_from_seismicity.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys, reports_folder=reports_folder) # # ..................................................................... # upload hypocentral depths # # loading the project del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # running notebook get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'load_hypocentral_depth_distribution_from_csv.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # ..................................................................... # upload focal mechanism distribution # # loading the project del oqtkp oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # running notebook get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'load_nodal_plane_distribution_from_csv.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # ..................................................................... # Setting the mmax get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'set_mem_from_seismicity_max_obs_plus_delta.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # ..................................................................... # Setting the MFD get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'set_mfd_double_truncated_GR.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_area') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys) # # ..................................................................... # Creating sources nb_name = 'create_sources_no_faults.ipynb' nb_path = os.path.join('..', '..', 'notebooks', 'sources_distributed_s') tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys)
def run(project_filename, model_id, notebook_path, src_id_list, reports_folder=''): """ :parameter project_filename: The name of the file (it's a pickle file) containing the project :parameter model_id: A string identifying the ID of the model with the sources to be processed :parameter notebook_path: The path to the notebook to be executed :parameter src_id_list: A list containing the list of the sources to be processed :parameter reports_folder: The name of the folder where to create the .html report. An empty string does not trigger the creation of a report. :returns: A tuple containing an instance of the class `nbformat.notebooknode.NotebookNode` and a dictionary """ # # check that the output folder exists if len(reports_folder) and not os.path.exists(reports_folder): os.makedirs(reports_folder) # # options # MN: 'opt' assigned but never used opt = '' # # running prc = [] for idx, elem in enumerate(sorted(src_id_list)): # # saving project oqmbtp = OQtProject.load_from_file(project_filename) oqmbtp.active_model_id = model_id oqmbtp.active_source_id = [elem] oqmbtp.save() # # output directory outdir = oqmbtp.directory del oqmbtp # # rpt_name = None if len(reports_folder): nb_name = os.path.split(notebook_path)[1] rpt_path, rpt_name = _get_report_path_name(outdir, model_id, elem, nb_name) # # msg = 'Running {:s}'.format(os.path.basename(notebook_path)) msg += ' for source with ID {:s}'.format(elem) print(msg) # # running the notebook out = notebook.run(notebook_path, '', reports_folder=reports_folder, key=rpt_name) # # prc.append(out) ok = False if all(prc): ok = True return ok
def handle_run(sender): # Case all if w_sources.value[0] == 'All': tlist = sorted(model.sources.keys()) else: tlist = w_sources.value # Update progress bar w_progress.max = len(tlist)-1 w_progress.visible = True w_progress.value = 0 # Option opt = ' -o' if w_repo.value else '' if len(opt) > 0: rpt_table = ReportTable() # Running for idx, elem in enumerate(tlist): # Saving project oqmbtp = OQtProject.load_from_file(project_filename) oqmbtp.active_model_id = w_model.value oqmbtp.active_source_id = [elem] oqmbtp.save() del oqmbtp # Running notebook nb_str = os.path.join('./%s/%s' % (w_nb_type.value, w_nb_name.value)) cmd_str = 'runipy %s %s' % (opt, nb_str) if len(opt) > 0: rpt_path, rpt_name = _get_report_path_name(project_dir, w_model.value, elem, w_nb_name.value) if not os.path.exists(rpt_path): os.makedirs(rpt_path) # Running notebook subprocess.call(cmd_str, shell=True) if len(opt) > 0: # Converting notebook cmd_str = 'ipython nbconvert --to html %s' % (nb_str) out = subprocess.call(cmd_str, shell=True) if out: print('Error in creating the html report') # Moving report to folder and rename it cmd_str = 'mv %s.html %s' % (os.path.splitext(w_nb_name.value)[0], os.path.join(rpt_path, rpt_name)) out = subprocess.call(cmd_str, shell=True) if out: print('Error in moving the report to the final folder') # Update reports table path = os.path.relpath(os.path.join(rpt_path, rpt_name)) rpt_table.append_item(path, rpt_name) w_progress.value = idx if len(opt) > 0: display(HTML(rpt_table.get_report_table()))
def test_01(self): """ Read geometry from a shapefile and compute a MFD for each source """ # # oqtkp = OQtProject.load_from_file(self.prj_path) model = oqtkp.models['model01'] get_src_ids = GetSourceIDs(model) # # running the first notebook that loads the geometry of the sources # from the shapefile nb_name = 'load_geometry_from_shapefile.ipynb' nb_path = './../../../notebooks/sources_area/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # create the pickled catalogue nb_name = 'catalogue_create_pickle.ipynb' # we miss the completeness t nb_path = './../../../notebooks/catalogue/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) nb.run(nb_full_path, '') # # set the completeness table for the to sources set_completeness_for_sources(TABLE, ['02', '03']) # # checking the creation of the pickled version of the catalogue file_name = 'model01_catalogue.pkl' file_path = './../../tmp/project_test/' tmp = os.path.join(self.BASE_DATA_PATH, file_path, file_name) nb_full_path = os.path.abspath(tmp) assert os.path.exists(nb_full_path) # # loading the project oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # running the notebook that computes the GR parameters get_src_ids = GetSourceIDs(model) get_src_ids.keep_equal_to('source_type', ['AreaSource']) nb_name = 'compute_double_truncated_GR_from_seismicity.ipynb' nb_path = './../../../notebooks/sources_area/' tmp = os.path.join(self.BASE_DATA_PATH, nb_path, nb_name) nb_full_path = os.path.abspath(tmp) reports_folder = './../tmp/project_test/reports/' automator.run(self.prj_path, 'model01', nb_full_path, get_src_ids.keys, reports_folder=reports_folder) # # loading the project oqtkp = OQtProject.load_from_file(self.prj_path) oqtkp.active_model_id = 'model01' model = oqtkp.models['model01'] # # check the a and b values computed self.assertAlmostEqual(model.sources['02'].a_gr, 5.824285578226533) self.assertAlmostEqual(model.sources['02'].b_gr, 1.1421442004454874) self.assertAlmostEqual(model.sources['03'].a_gr, 3.8036622760645868) self.assertAlmostEqual(model.sources['03'].b_gr, 0.8832991033938602)
def setUp(self): # # fname = './wf01.log' # logging.basicConfig(filename=fname, level=logging.DEBUG) logging.basicConfig(filename=fname, level=logging.WARN) # # clear directory where the project will be created folder = os.path.join(self.BASE_DATA_PATH, '..', 'tmp', 'project_test') delete_and_create_project_dir(folder) # # set environment variable self.prj_path = os.path.join(folder, 'test.oqmbtp') os.environ["OQMBT_PROJECT"] = self.prj_path # # create the project inifile = os.path.join(self.BASE_DATA_PATH, '..', 'data', 'wf01', 'project.ini') project_create([inifile, os.path.dirname(self.prj_path)]) # # load the project just created oqtkp = OQtProject.load_from_file(self.prj_path) model_id = 'model01' oqtkp.active_model_id = model_id model = oqtkp.models[model_id] # # set the shapefile with the geometry of area sources [relative path # with origin the project folder] model.area_shapefile_filename = os.path.join('.', '..', '..', 'data', 'wf01', 'shapefiles', 'test_area.shp') # # set the shapefile with the geometry of fault sources [relative path # with origin the project folder] model.faults_shp_filename = os.path.join('.', '..', '..', 'data', 'wf01', 'shapefiles', 'test_faults.shp') # # set the shapefile withe the faults path = './../../data/wf01/shapefile/test_faults.csv' model.catalogue_csv_filename = path # # set the catalogue name path = './../../data/wf01/catalogue.csv' model.catalogue_csv_filename = path # required by imfd_double_truncated_from_slip_rate_SRC.ipynb model.default_bgr = 1.0 model.strain_pickle_spatial_index_filename = ( './../../data/wf01/strain/sample_average_strain') model.strain_rate_model_hdf5_filename = ( './../../data/wf01/strain/sample_average_strain.hdf5') # required by compute_mo_from_strain.ipynb model.shear_modulus = 3.2e10 # required by compute_mo_from_strain.ipynb model.coup_coef = 0.8 # required by compute_mo_from_strain.ipynb model.coup_thick = 15.0 # required by compute_mo_from_strain.ipynb model.strain_cell_dx = 0.250 model.strain_cell_dy = 0.200 # required by set_mfd_tapered_GR.ipynb model.m_min = 5.0 # required by set_mfd_tapered_GR.ipynb model.bin_width = 0.1 model.faults_lower_threshold_magnitude = 6.5 model.msr = 'WC1994' # # create the hypo files - the folder hypo_depths is created by the # 'project_create' script folder = os.path.dirname(self.prj_path) for i in [1, 2, 3]: fname = 'hypo_depths-model01-{:d}.csv'.format(i) path = os.path.join(folder, 'hypo_depths', fname) f = open(path, 'w') f.write('depth,weight\n') f.write('10,0.6\n') f.write('20,0.4\n') f.close() model.hypo_dist_filename = 'model01_hypo_dist.hdf5' # # create the focal mechanism files for i in [1, 2, 3]: fname = 'focal_mechs-model01-{:d}.csv'.format(i) path = os.path.join(folder, 'focal_mechs', fname) f = open(path, 'w') f.write('strike,dip,rake,weight\n') f.write('0.00,90.00,0.00,1.00\n') f.close() model.nodal_plane_dist_filename = 'model01_focal_mech_dist.hdf5' # # saving the project oqtkp.models[model_id] = model oqtkp.save()