def test_alternative_mfds(self): converter = s.SourceConverter( investigation_time=1., rupture_mesh_spacing=1, # km complex_fault_mesh_spacing=5, # km width_of_mfd_bin=0.1, # for Truncated GR MFDs area_source_discretization=1.) source_nodes = read_nodes(ALT_MFDS_SRC_MODEL, filter_sources, ValidNode) [cplx1, sflt1, sflt2] = map(converter.convert_node, source_nodes) # Check the values # Arbitrary MFD assert_close(cplx1.mfd.magnitudes, [8.6, 8.8, 9.0]) assert_close(cplx1.mfd.occurrence_rates, [0.0006, 0.0008, 0.0004]) # Youngs & Coppersmith from characteristic rate self.assertAlmostEqual(sflt1.mfd.b_val, 1.0) self.assertAlmostEqual(sflt1.mfd.a_val, 3.3877843113) self.assertAlmostEqual(sflt1.mfd.char_mag, 7.0) self.assertAlmostEqual(sflt1.mfd.char_rate, 0.005) self.assertAlmostEqual(sflt1.mfd.min_mag, 5.0) # Youngs & Coppersmith from total moment rate self.assertAlmostEqual(sflt2.mfd.b_val, 1.0) self.assertAlmostEqual(sflt2.mfd.a_val, 5.0800, 3) self.assertAlmostEqual(sflt2.mfd.char_mag, 7.0) self.assertAlmostEqual(sflt2.mfd.char_rate, 0.24615, 5) self.assertAlmostEqual(sflt2.mfd.min_mag, 5.0)
def test_nonparametric_source_ok(self): converter = s.SourceConverter( investigation_time=50., rupture_mesh_spacing=1, # km complex_fault_mesh_spacing=1, # km width_of_mfd_bin=1., # for Truncated GR MFDs area_source_discretization=1.) np, = read_nodes(NONPARAMETRIC_SOURCE, filter_sources, ValidNode) converter.convert_node(np)
def setUpClass(cls): cls.converter = s.SourceConverter( investigation_time=50., rupture_mesh_spacing=1, # km complex_fault_mesh_spacing=1, # km width_of_mfd_bin=1., # for Truncated GR MFDs area_source_discretization=1.) cls.source_collector = dict((sc.trt, sc) for sc in parse_source_model( MIXED_SRC_MODEL, cls.converter, lambda src: None)) cls.sitecol = site.SiteCollection(cls.SITES)
def test_duplicate_id(self): converter = s.SourceConverter( # different from self.converter investigation_time=50., rupture_mesh_spacing=1, complex_fault_mesh_spacing=1, width_of_mfd_bin=0.1, area_source_discretization=10, ) with self.assertRaises(DuplicatedID): parse_source_model(DUPLICATE_ID_SRC_MODEL, converter)
def setUpClass(cls): cls.parser = SourceModelParser(s.SourceConverter( investigation_time=50., rupture_mesh_spacing=1, # km complex_fault_mesh_spacing=1, # km width_of_mfd_bin=1., # for Truncated GR MFDs area_source_discretization=1.)) cls.source_collector = { sc.trt: sc for sc in cls.parser.parse_trt_models(MIXED_SRC_MODEL)} cls.sitecol = site.SiteCollection(cls.SITES)
def test_duplicate_id(self): parser = SourceModelParser(s.SourceConverter( investigation_time=50., rupture_mesh_spacing=1, complex_fault_mesh_spacing=1, width_of_mfd_bin=0.1, area_source_discretization=10, )) with self.assertRaises(DuplicatedID): parser.parse_sources(DUPLICATE_ID_SRC_MODEL)
def setUpClass(cls): cls.converter = s.SourceConverter( investigation_time=50., rupture_mesh_spacing=1, # km complex_fault_mesh_spacing=1, # km width_of_mfd_bin=1., # for Truncated GR MFDs area_source_discretization=1., # km ) source_nodes = read_nodes(MIXED_SRC_MODEL, filter_sources, ValidNode) (cls.area, cls.point, cls.simple, cls.cmplx, cls.char_simple, cls.char_complex, cls.char_multi) = map(cls.converter.convert_node, source_nodes) # the parameters here would typically be specified in the job .ini cls.investigation_time = 50. cls.rupture_mesh_spacing = 1 # km cls.complex_fault_mesh_spacing = 1 # km cls.width_of_mfd_bin = 1. # for Truncated GR MFDs cls.area_source_discretization = 1. # km
def get_source_models(oqparam, gsim_lt, source_model_lt, in_memory=True): """ Build all the source models generated by the logic tree. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param gsim_lt: a :class:`openquake.commonlib.logictree.GsimLogicTree` instance :param source_model_lt: a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance :param in_memory: if True, keep in memory the sources, else just collect the TRTs :returns: an iterator over :class:`openquake.commonlib.source.SourceModel` tuples """ converter = sourceconverter.SourceConverter( oqparam.investigation_time, oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing, oqparam.width_of_mfd_bin, oqparam.area_source_discretization) parser = source.SourceModelParser(converter) # consider only the effective realizations rlzs = logictree.get_effective_rlzs(source_model_lt) samples_by_lt_path = source_model_lt.samples_by_lt_path() num_source_models = len(rlzs) for i, rlz in enumerate(rlzs): sm = rlz.value # name of the source model smpath = rlz.lt_path num_samples = samples_by_lt_path[smpath] fname = possibly_gunzip(os.path.join(oqparam.base_path, sm)) if in_memory: apply_unc = source_model_lt.make_apply_uncertainties(smpath) try: trt_models = parser.parse_trt_models(fname, apply_unc) except ValueError as e: if str(e) in ('Surface does not conform with Aki & ' 'Richards convention', 'Edges points are not in the right order'): raise InvalidFile('''\ %s: %s. Probably you are using an obsolete model. In that case you can fix the file with the command python -m openquake.engine.tools.correct_complex_sources %s ''' % (fname, e, fname)) else: raise else: # just collect the TRT models smodel = next( read_nodes(fname, lambda el: 'sourceModel' in el.tag, source.nodefactory['sourceModel'])) trt_models = source.TrtModel.collect(smodel) trts = [mod.trt for mod in trt_models] source_model_lt.tectonic_region_types.update(trts) gsim_file = oqparam.inputs.get('gsim_logic_tree') if gsim_file: # check TRTs for trt_model in trt_models: if trt_model.trt not in gsim_lt.values: raise ValueError( "Found in %r a tectonic region type %r inconsistent " "with the ones in %r" % (sm, trt_model.trt, gsim_file)) else: gsim_lt = logictree.GsimLogicTree.from_(oqparam.gsim) weight = rlz.weight / num_samples num_gsim_paths = (num_samples if oqparam.number_of_logic_tree_samples else gsim_lt.get_num_paths()) logging.info('Processed source model %d/%d with %d gsim path(s)', i + 1, num_source_models, num_gsim_paths) yield source.SourceModel(sm, weight, smpath, trt_models, num_gsim_paths, i, num_samples) # log if some source file is being used more than once for fname, hits in parser.fname_hits.items(): if hits > 1: logging.info('%s has been considered %d times', fname, hits)