def get_hazard_job(self): hazard_imls = [0.001, 0.01, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.7, 0.8, 0.9, 1.0] poes = [0.039861266979, 0.039861266979, 0.0397287574803, 0.0296134266256, 0.0198273287565, 0.0130622701615, 0.00865538795, 0.00589852059369, 0.00406169858951, 0.00281172717953, 0.00199511741778, 0.00135870597285, 0.000989667841574, 0.000757544444296, 0.000272824002046, 0.0, 0.0, 0.] job = helpers.get_job( helpers.get_data_path("simple_fault_demo_hazard/job.ini"), intensity_measure_types_and_levels=str({'PGA': hazard_imls})) models.HazardSite.objects.create( hazard_calculation=job, location="POINT(1 1)") models.HazardCurveData.objects.create( hazard_curve=models.HazardCurve.objects.create( output=models.Output.objects.create_output( job, "Test Hazard curve", "hazard_curve"), investigation_time=50, imt="PGA", imls=hazard_imls, statistics="mean"), poes=poes, location="POINT(1 1)") return job
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("scenario_hazard/job.ini"), number_of_ground_motion_fields=1000) fname = self._test_path('gmf_scenario.csv') helpers.create_gmf_from_csv(job, fname, 'gmf_scenario') return job
def test_del_calc_no_access(self): # Test the case where we try to delete a hazard calculation which does # not belong to current user. # In this case, deletion is now allowed and should raise an exception. hazard_job = helpers.get_job(self.hazard_cfg, username=helpers.random_string()) self.assertRaises(RuntimeError, engine.del_calc, hazard_job.id)
def test_del_haz_calc(self): hazard_job = helpers.get_job( self.hazard_cfg, username=getpass.getuser()) models.Output.objects.create_output( hazard_job, 'test_curves_1', output_type='hazard_curve' ) models.Output.objects.create_output( hazard_job, 'test_curves_2', output_type='hazard_curve' ) # Sanity check: make sure the hazard calculation and outputs exist in # the database: hazard_jobs = models.OqJob.objects.filter(id=hazard_job.id) self.assertEqual(1, hazard_jobs.count()) outputs = models.Output.objects.filter(oq_job=hazard_job.id) self.assertEqual(2, outputs.count()) # Delete the calculation engine.del_haz_calc(hazard_job.id) # Check that the hazard calculation and its outputs were deleted: outputs = models.Output.objects.filter(oq_job=hazard_job.id) self.assertEqual(0, outputs.count()) hazard_jobs = models.OqJob.objects.filter(id=hazard_job.id) self.assertEqual(0, hazard_jobs.count())
def setUpClass(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') job = helpers.get_job(cfg) output = models.Output.objects.create(oq_job=job, display_name='test', output_type='ses') ses_coll = models.SESCollection.create(output=output) self.mesh_lons = numpy.array([0.1 * x for x in range(16)]).reshape( (4, 4)) self.mesh_lats = numpy.array([0.2 * x for x in range(16)]).reshape( (4, 4)) self.mesh_depths = numpy.array([0.3 * x for x in range(16)]).reshape( (4, 4)) sfs = SimpleFaultSurface( Mesh(self.mesh_lons, self.mesh_lats, self.mesh_depths)) ps = PlanarSurface(10, 20, 30, Point(3.9, 2.2, 10), Point(4.90402718, 3.19634248, 10), Point(5.9, 2.2, 90), Point(4.89746275, 1.20365263, 90)) self.fault_rupture = models.ProbabilisticRupture.objects.create( ses_collection=ses_coll, magnitude=5, rake=0, surface=sfs, is_from_fault_source=True, is_multi_surface=False) self.source_rupture = models.ProbabilisticRupture.objects.create( ses_collection=ses_coll, magnitude=5, rake=0, surface=ps, is_from_fault_source=False, is_multi_surface=False)
def test_del_calc(self): hazard_job = helpers.get_job(self.hazard_cfg, username=getpass.getuser()) models.Output.objects.create_output(hazard_job, 'test_curves_1', output_type='hazard_curve') models.Output.objects.create_output(hazard_job, 'test_curves_2', output_type='hazard_curve') # Sanity check: make sure the hazard calculation and outputs exist in # the database: hazard_jobs = models.OqJob.objects.filter(id=hazard_job.id) self.assertEqual(1, hazard_jobs.count()) outputs = models.Output.objects.filter(oq_job=hazard_job.id) self.assertEqual(2, outputs.count()) # Delete the calculation engine.del_calc(hazard_job.id) # Check that the hazard calculation and its outputs were deleted: outputs = models.Output.objects.filter(oq_job=hazard_job.id) self.assertEqual(0, outputs.count()) hazard_jobs = models.OqJob.objects.filter(id=hazard_job.id) self.assertEqual(0, hazard_jobs.count())
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("simple_fault_demo_hazard/job.ini")) hazard_curve = [ (0.001, 0.0398612669790014), (0.01, 0.039861266979001400), (0.05, 0.039728757480298900), (0.10, 0.029613426625612500), (0.15, 0.019827328756491600), (0.20, 0.013062270161451900), (0.25, 0.008655387950000430), (0.30, 0.005898520593689670), (0.35, 0.004061698589511780), (0.40, 0.002811727179526820), (0.45, 0.001995117417776690), (0.50, 0.001358705972845710), (0.55, 0.000989667841573727), (0.60, 0.000757544444296432), (0.70, 0.000272824002045979), (0.80, 0.00), (0.9, 0.00), (1.0, 0.00)] models.HazardSite.objects.create( hazard_calculation=job.hazard_calculation, location="POINT(1 1)") models.HazardCurveData.objects.create( hazard_curve=models.HazardCurve.objects.create( output=models.Output.objects.create_output( job, "Test Hazard curve", "hazard_curve"), investigation_time=50, imt="PGA", imls=[hz[0] for hz in hazard_curve], statistics="mean"), poes=[hz[1] for hz in hazard_curve], location="POINT(1 1)") return job
def test_del_haz_calc_no_access(self): # Test the case where we try to delete a hazard calculation which does # not belong to current user. # In this case, deletion is now allowed and should raise an exception. hazard_job = helpers.get_job( self.hazard_cfg, username=helpers.random_string()) self.assertRaises(RuntimeError, engine.del_haz_calc, hazard_job.id)
def setUpClass(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') job = helpers.get_job(cfg) output = models.Output.objects.create( oq_job=job, display_name='test', output_type='ses') ses_coll = models.SESCollection.create( output=output) self.mesh_lons = numpy.array( [0.1 * x for x in range(16)]).reshape((4, 4)) self.mesh_lats = numpy.array( [0.2 * x for x in range(16)]).reshape((4, 4)) self.mesh_depths = numpy.array( [0.3 * x for x in range(16)]).reshape((4, 4)) sfs = SimpleFaultSurface( Mesh(self.mesh_lons, self.mesh_lats, self.mesh_depths)) ps = PlanarSurface( 10, 20, 30, Point(3.9, 2.2, 10), Point(4.90402718, 3.19634248, 10), Point(5.9, 2.2, 90), Point(4.89746275, 1.20365263, 90)) self.fault_rupture = models.ProbabilisticRupture.objects.create( ses_collection=ses_coll, magnitude=5, rake=0, surface=sfs, is_from_fault_source=True, is_multi_surface=False) self.source_rupture = models.ProbabilisticRupture.objects.create( ses_collection=ses_coll, magnitude=5, rake=0, surface=ps, is_from_fault_source=False, is_multi_surface=False)
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("scenario_hazard/job.ini")) fname = os.path.join(os.path.dirname(case_1.__file__), 'gmf_scenario.csv') helpers.create_gmf_from_csv(job, fname, 'gmf_scenario') return job
def get_hazard_job(self): hazard_imls = [ 0.001, 0.01, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.7, 0.8, 0.9, 1.0 ] poes = [ 0.039861266979, 0.039861266979, 0.0397287574803, 0.0296134266256, 0.0198273287565, 0.0130622701615, 0.00865538795, 0.00589852059369, 0.00406169858951, 0.00281172717953, 0.00199511741778, 0.00135870597285, 0.000989667841574, 0.000757544444296, 0.000272824002046, 0.0, 0.0, 0. ] job = helpers.get_job( helpers.get_data_path("simple_fault_demo_hazard/job.ini"), intensity_measure_types_and_levels=str({'PGA': hazard_imls})) models.HazardSite.objects.create(hazard_calculation=job, location="POINT(1 1)") models.HazardCurveData.objects.create( hazard_curve=models.HazardCurve.objects.create( output=models.Output.objects.create_output( job, "Test Hazard curve", "hazard_curve"), investigation_time=50, imt="PGA", imls=hazard_imls, statistics="mean"), poes=poes, location="POINT(1 1)") return job
def test_initialize_site_model(self): # we need a slightly different config file for this test cfg = helpers.get_data_path( 'simple_fault_demo_hazard/job_with_site_model.ini') self.job = helpers.get_job(cfg) self.calc = core.ClassicalHazardCalculator(self.job) self.calc.initialize_site_model() # If the site model isn't valid for the calculation geometry, a # `RuntimeError` should be raised here # Okay, it's all good. Now check the count of the site model records. sm_nodes = models.SiteModel.objects.filter(job=self.job) self.assertEqual(2601, len(sm_nodes)) num_pts_to_compute = len( self.job.hazard_calculation.points_to_compute()) hazard_site = models.HazardSite.objects.filter( hazard_calculation=self.job.hazard_calculation) # The site model is good. Now test that `hazard_site` was computed. # For now, just test the length. self.assertEqual(num_pts_to_compute, len(hazard_site))
def setUp(self): self.cfg = helpers.get_data_path('event_based_hazard/job_2.ini') self.job = helpers.get_job(self.cfg, username=getpass.getuser()) self.calc = core.EventBasedHazardCalculator(self.job) hc = self.job.hazard_calculation hc._site_collection = make_site_coll(0, 0, n=5) models.JobStats.objects.create(oq_job=self.job)
def setUpClass(cls): cfg = helpers.get_data_path("calculators/hazard/classical/haz_map_test_job.ini") job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) hc = job.hazard_calculation cls.calc = get_calculator_class("hazard", hc.calculation_mode)(job) cls.calc.initialize_site_model() assert len(hc.site_collection) == 2, len(hc.site_collection)
def setUp(self): cfg = helpers.get_data_path("simple_fault_demo_hazard/job.ini") self.job = helpers.get_job(cfg, username="******") for i in range(0, random.randint(1, 10)): lt_model = models.LtSourceModel.objects.create( hazard_calculation=self.job.hazard_calculation, ordinal=i, sm_lt_path=[i] ) models.LtRealization(lt_model=lt_model, ordinal=i, weight=1 / (i + 1), gsim_lt_path=[i]).save()
def setUpClass(cls): cfg = helpers.get_data_path( 'calculators/hazard/classical/haz_map_test_job.ini') job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) cls.calc = calculators(job) cls.calc.initialize_site_collection() num_sites = len(cls.calc.site_collection) assert num_sites == 2, num_sites
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("event_based_hazard/job.ini"), region_grid_spacing='0', ses_per_logic_tree_path='1') job.save() helpers.create_gmf_from_csv(job, self._test_path('gmf.csv')) return job
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("scenario_hazard/job.ini")) fname = self._test_path('gmf_scenario.csv') helpers.create_gmf_from_csv(job, fname, 'gmf_scenario') # this is needed to make happy the GetterBuilder job.hazard_calculation.number_of_ground_motion_fields = 1000 job.hazard_calculation.save() return job
def test(self): cfg = helpers.get_data_path('bad_gsim/job.ini') job = helpers.get_job(cfg, username=getpass.getuser()) calc = core.EventBasedHazardCalculator(job) with self.assertRaises(ValueError) as ctxt: calc.initialize_sources() errmsg = str(ctxt.exception) assert errmsg.startswith( "Found in 'source_model.xml' a tectonic region type " "'Active Shallow Crust' inconsistent with the ones"), errmsg
def test_check_limits_event_based(self): # this is a based on a demo with 2 realizations, 5 ses, # 2 imt and 121 sites cfg = helpers.get_data_path('event_based_hazard/job.ini') job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) calc = calculators(job) input_weight, output_weight = calc.pre_execute() self.assertEqual(input_weight, 2705.5) self.assertAlmostEqual(output_weight, 1210.0)
def setUp(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') self.job = helpers.get_job(cfg, username="******") for i in range(0, random.randint(1, 10)): lt_model = models.LtSourceModel.objects.create( hazard_calculation=self.job, ordinal=i, sm_lt_path=[i]) models.LtRealization(lt_model=lt_model, ordinal=i, weight=1 / (i + 1), gsim_lt_path=[i]).save()
def test(self): cfg = helpers.get_data_path('bad_gsim/job.ini') job = helpers.get_job(cfg, username=getpass.getuser()) calc = core.EventBasedHazardCalculator(job) with self.assertRaises(ValueError) as ctxt: calc.initialize_site_collection() calc.initialize_sources() errmsg = str(ctxt.exception) assert errmsg.startswith( "Found in 'source_model.xml' a tectonic region type " "'Active Shallow Crust' inconsistent with the ones"), errmsg
def test_check_limits_event_based(self): # this is a based on a demo with 2 realizations, 5 ses, # 2 imt and 121 sites cfg = helpers.get_data_path( 'event_based_hazard/job.ini') job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) calc = calculators(job) input_weight, output_weight = calc.pre_execute() self.assertEqual(input_weight, 2705.5) self.assertAlmostEqual(output_weight, 1210.0)
def test_check_limits_event_based(self): # this is a based on a demo with 2 realizations, 5 ses, # 2 imt and 121 sites cfg = helpers.get_data_path("event_based_hazard/job.ini") job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) hc = job.hazard_calculation calc = get_calculator_class("hazard", hc.calculation_mode)(job) input_weight, output_weight = calc.pre_execute() self.assertEqual(input_weight, 1352.75) self.assertAlmostEqual(output_weight, 12.1)
def setUp(self): # this is an example with number_of_logic_tree_samples = 1 cfg = helpers.get_data_path('classical_job.ini') job = helpers.get_job(cfg) hc = job.hazard_calculation self.rnd = random.Random(hc.random_seed) self.source_model_lt = logictree.SourceModelLogicTree.from_hc(hc) sm = models.LtSourceModel( hazard_calculation=hc, ordinal=0, sm_lt_path=[], sm_name='sm test', weight=None) self.gmpe_lt = sm.make_gsim_lt( ['Active Shallow Crust', 'Subduction Interface'])
def test_site_collection_and_ses_collection(self): cfg = helpers.get_data_path('scenario_hazard/job.ini') job = helpers.get_job(cfg, username=getpass.getuser()) models.JobStats.objects.create(oq_job=job) calc = scen_core.ScenarioHazardCalculator(job) calc.initialize_site_model() site_coll = job.hazard_calculation.site_collection # all of the parameters should be the same: self.assertTrue((site_coll.vs30 == 760).all()) self.assertTrue((site_coll.vs30measured).all()) self.assertTrue((site_coll.z1pt0 == 100).all()) self.assertTrue((site_coll.z2pt5 == 5).all()) # just for sanity, make sure the meshes are correct (the locations) job_mesh = job.hazard_calculation.points_to_compute() self.assertTrue((job_mesh.lons == site_coll.mesh.lons).all()) self.assertTrue((job_mesh.lats == site_coll.mesh.lats).all()) # test SESCollection calc.initialize_sources() calc.create_ruptures() ses_coll = models.SESCollection.objects.get( output__oq_job=job, output__output_type='ses') expected_tags = [ 'scenario-0000000000', 'scenario-0000000001', 'scenario-0000000002', 'scenario-0000000003', 'scenario-0000000004', 'scenario-0000000005', 'scenario-0000000006', 'scenario-0000000007', 'scenario-0000000008', 'scenario-0000000009', ] expected_seeds = [ 511025145, 1168723362, 794472670, 1296908407, 1343724121, 140722153, 28278046, 1798451159, 556958504, 503221907] for ses in ses_coll: # there is a single ses self.assertEqual(ses.ordinal, 1) for ses_rup, tag, seed in zip(ses, expected_tags, expected_seeds): self.assertEqual(ses_rup.ses_id, 1) self.assertEqual(ses_rup.tag, tag) self.assertEqual(ses_rup.seed, seed)
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("event_based_hazard/job.ini")) job.hazard_calculation = models.HazardCalculation.objects.create( truncation_level=job.hazard_calculation.truncation_level, maximum_distance=job.hazard_calculation.maximum_distance, intensity_measure_types_and_levels=( job.hazard_calculation.intensity_measure_types_and_levels), calculation_mode="event_based", investigation_time=50, ses_per_logic_tree_path=1) job.save() helpers.create_gmf_from_csv(job, self._test_path('gmf.csv')) return job
def setUpClass(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') job = helpers.get_job(cfg) lt_model = models.LtSourceModel.objects.create( hazard_calculation=job.hazard_calculation, ordinal=0, sm_lt_path='foo') lt_rlz = models.LtRealization.objects.create( lt_model=lt_model, ordinal=0, gsim_lt_path='bar', weight=1) output = models.Output.objects.create( oq_job=job, display_name='test', output_type='ses') ses_coll = models.SESCollection.objects.create( output=output, lt_model=lt_rlz.lt_model, ordinal=0) self.mesh_lons = numpy.array( [0.1 * x for x in range(16)]).reshape((4, 4)) self.mesh_lats = numpy.array( [0.2 * x for x in range(16)]).reshape((4, 4)) self.mesh_depths = numpy.array( [0.3 * x for x in range(16)]).reshape((4, 4)) sfs = SimpleFaultSurface( Mesh(self.mesh_lons, self.mesh_lats, self.mesh_depths)) ps = PlanarSurface( 10, 20, 30, Point(3.9, 2.2, 10), Point(4.90402718, 3.19634248, 10), Point(5.9, 2.2, 90), Point(4.89746275, 1.20365263, 90)) trt = 'Active Shallow Crust' trt_model = models.TrtModel.objects.create( lt_model=lt_model, tectonic_region_type=trt, num_sources=0, num_ruptures=1, min_mag=5, max_mag=5, gsims=['testGSIM']) self.fault_rupture = models.ProbabilisticRupture.objects.create( ses_collection=ses_coll, magnitude=5, rake=0, surface=sfs, trt_model=trt_model, is_from_fault_source=True, is_multi_surface=False) self.source_rupture = models.ProbabilisticRupture.objects.create( ses_collection=ses_coll, magnitude=5, rake=0, surface=ps, trt_model=trt_model, is_from_fault_source=False, is_multi_surface=False)
def setUp(self): cfg = helpers.get_data_path('classical_job.ini') job = helpers.get_job(cfg) self.uncertainties_applied = [] def apply_uncertainty(branchset, value, source): fingerprint = (branchset.uncertainty_type, value) self.uncertainties_applied.append(fingerprint) self.original_apply_uncertainty = logictree.BranchSet.apply_uncertainty logictree.BranchSet.apply_uncertainty = apply_uncertainty hc = job.hazard_calculation self.source_model_lt = logictree.SourceModelLogicTree.from_hc(hc) sm = models.LtSourceModel( hazard_calculation=hc, ordinal=0, sm_lt_path=[], sm_name='sm test', weight=None) self.gmpe_lt = sm.make_gsim_lt( ['Active Shallow Crust', 'Subduction Interface'])
def test_create_risk_calculation(self): # we need an hazard output to create a risk calculation hazard_cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') hazard_job = helpers.get_job(hazard_cfg, 'openquake') hc = hazard_job.get_oqparam() lt_model = models.LtSourceModel.objects.create( hazard_calculation=hazard_job, ordinal=1, sm_lt_path="test_sm") rlz = models.LtRealization.objects.create( lt_model=lt_model, ordinal=1, weight=None, gsim_lt_path="test_gsim") hazard_output = models.HazardCurve.objects.create( lt_realization=rlz, output=models.Output.objects.create_output( hazard_job, "Test Hazard output", "hazard_curve"), investigation_time=hc.investigation_time, imt="PGA", imls=[0.1, 0.2, 0.3]) params = { 'hazard_output_id': hazard_output.output.id, 'base_path': 'path/to/job.ini', 'export_dir': '/tmp/xxx', 'calculation_mode': 'classical_risk', # just some sample params 'lrem_steps_per_interval': 5, 'conditional_loss_poes': '0.01, 0.02, 0.05', 'region_constraint': [(-0.5, 0.5), (0.5, 0.5), (0.5, -0.5), (-0.5, -0.5)], } rc = engine.create_calculation(models.RiskCalculation, params) # Normalize/clean fields by fetching a fresh copy from the db. rc = models.RiskCalculation.objects.get(id=rc.id) self.assertEqual(rc.calculation_mode, 'classical_risk') self.assertEqual(rc.lrem_steps_per_interval, 5) self.assertEqual(rc.conditional_loss_poes, [0.01, 0.02, 0.05]) self.assertEqual( rc.region_constraint.wkt, ('POLYGON ((-0.5000000000000000 0.5000000000000000, ' '0.5000000000000000 0.5000000000000000, ' '0.5000000000000000 -0.5000000000000000, ' '-0.5000000000000000 -0.5000000000000000, ' '-0.5000000000000000 0.5000000000000000))'))
def test_check_limits_classical(self): # this is a based on a demo with 3 realizations, 2 sites and 4 rlzs cfg = helpers.get_data_path( 'calculators/hazard/classical/haz_map_test_job.ini') job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) calc = calculators(job) input_weight, output_weight = calc.pre_execute() self.assertEqual(input_weight, 225) self.assertEqual(output_weight, 24) calc.max_input_weight = 1 with self.assertRaises(general.InputWeightLimit): calc.check_limits(input_weight, output_weight) calc.max_input_weight = 1000 calc.max_output_weight = 1 with self.assertRaises(general.OutputWeightLimit): calc.check_limits(input_weight, output_weight)
def test_check_limits_classical(self): # this is a based on a demo with 3 realizations, 2 sites and 4 rlzs cfg = helpers.get_data_path("calculators/hazard/classical/haz_map_test_job.ini") job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) hc = job.hazard_calculation calc = get_calculator_class("hazard", hc.calculation_mode)(job) input_weight, output_weight = calc.pre_execute() self.assertEqual(input_weight, 225) self.assertEqual(output_weight, 24) calc.max_input_weight = 1 with self.assertRaises(general.InputWeightLimit): calc.check_limits(input_weight, output_weight) calc.max_input_weight = 1000 calc.max_output_weight = 1 with self.assertRaises(general.OutputWeightLimit): calc.check_limits(input_weight, output_weight)
def test_get_site_collection_with_reference_parameters(self): cfg = helpers.get_data_path('scenario_hazard/job.ini') job = helpers.get_job(cfg, username=getpass.getuser()) models.JobStats.objects.create(oq_job=job) calc = scen_core.ScenarioHazardCalculator(job) calc.initialize_site_model() site_coll = job.hazard_calculation.site_collection # all of the parameters should be the same: self.assertTrue((site_coll.vs30 == 760).all()) self.assertTrue((site_coll.vs30measured).all()) self.assertTrue((site_coll.z1pt0 == 100).all()) self.assertTrue((site_coll.z2pt5 == 5).all()) # just for sanity, make sure the meshes are correct (the locations) job_mesh = job.hazard_calculation.points_to_compute() self.assertTrue((job_mesh.lons == site_coll.mesh.lons).all()) self.assertTrue((job_mesh.lats == site_coll.mesh.lats).all())
def test_site_collection_and_ses_collection(self): cfg = helpers.get_data_path('scenario_hazard/job.ini') job = helpers.get_job(cfg, username=getpass.getuser()) models.JobStats.objects.create(oq_job=job) calc = scen_core.ScenarioHazardCalculator(job) calc.initialize_site_collection() site_coll = calc.site_collection # all of the parameters should be the same: self.assertTrue((site_coll.vs30 == 760).all()) self.assertTrue((site_coll.vs30measured).all()) self.assertTrue((site_coll.z1pt0 == 100).all()) self.assertTrue((site_coll.z2pt5 == 5).all()) # test SESCollection calc.create_ruptures() ses_coll = models.SESCollection.objects.get(output__oq_job=job, output__output_type='ses') expected_tags = [ 'scenario-0000000000', 'scenario-0000000001', 'scenario-0000000002', 'scenario-0000000003', 'scenario-0000000004', 'scenario-0000000005', 'scenario-0000000006', 'scenario-0000000007', 'scenario-0000000008', 'scenario-0000000009', ] expected_seeds = [ 511025145, 1168723362, 794472670, 1296908407, 1343724121, 140722153, 28278046, 1798451159, 556958504, 503221907 ] for ses in ses_coll: # there is a single ses self.assertEqual(ses.ordinal, 1) for ses_rup, tag, seed in zip(ses, expected_tags, expected_seeds): self.assertEqual(ses_rup.ses_id, 1) self.assertEqual(ses_rup.tag, tag) self.assertEqual(ses_rup.seed, seed)
def test_get_site_collection_with_site_model(self): cfg = helpers.get_data_path( 'simple_fault_demo_hazard/job_with_site_model.ini') job = helpers.get_job(cfg) models.JobStats.objects.create(oq_job=job) calc = cls_core.ClassicalHazardCalculator(job) # Bootstrap the `hazard_site` table: calc.initialize_site_collection() calc.initialize_sources() site_coll = calc.site_collection # Since we're using a pretty big site model, it's a bit excessive to # check each and every value. # Instead, we'll just test that the lenth of each site collection attr # is equal to the number of points of interest in the calculation. expected_len = len(site_coll) self.assertEqual(expected_len, len(site_coll.vs30)) self.assertEqual(expected_len, len(site_coll.vs30measured)) self.assertEqual(expected_len, len(site_coll.z1pt0)) self.assertEqual(expected_len, len(site_coll.z2pt5))
def setUpClass(cls): cfg = helpers.get_data_path('event_based_hazard/job.ini') job = helpers.get_job(cfg) lt_model = models.LtSourceModel.objects.create( hazard_calculation=job.hazard_calculation, ordinal=1, sm_lt_path="test_sm") lt_model_2 = models.LtSourceModel.objects.create( hazard_calculation=job.hazard_calculation, ordinal=2, sm_lt_path="test_sm_2") rlz1 = models.LtRealization.objects.create( lt_model=lt_model, ordinal=1, seed=1, weight=None, gsim_lt_path="test_gsim") rlz2 = models.LtRealization.objects.create( lt_model=lt_model, ordinal=2, seed=1, weight=None, gsim_lt_path="test_gsim_2") ses_coll = models.SESCollection.objects.create( output=models.Output.objects.create_output( job, "Test SES Collection 1", "ses"), lt_model=lt_model, ordinal=1) # create a second SESCollection; this is to avoid regressions # in models.Gmf.__iter__ which should yield a single # GmfSet even if there are several SES collections models.SESCollection.objects.create( output=models.Output.objects.create_output( job, "Test SES Collection 2", "ses"), lt_model=lt_model_2, ordinal=2) gmf_data1 = helpers.create_gmf_data_records(job, rlz1, ses_coll)[0] points = [(15.3, 38.22), (15.7, 37.22), (15.4, 38.09), (15.56, 38.1), (15.2, 38.2)] gmf_data2 = helpers.create_gmf_data_records( job, rlz2, ses_coll, points)[0] cls.gmf1 = gmf_data1.gmf # a Gmf instance cls.ruptures1 = tuple(get_tags(gmf_data1)) cls.ruptures2 = tuple(get_tags(gmf_data2)) cls.investigation_time = job.hazard_calculation.investigation_time
def _setup_a_new_calculator(self): cfg = helpers.get_data_path('disaggregation/job.ini') job = helpers.get_job(cfg, username=getpass.getuser()) calc = core.DisaggHazardCalculator(job) return job, calc
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("scenario_hazard/job.ini")) fname = self._test_path('gmf_scenario.csv') helpers.populate_gmf_data_from_csv(job, fname) return job
def get_hazard_job(self): job = helpers.get_job(helpers.get_data_path("scenario_hazard/job.ini")) fname = self._test_path('gmf_scenario.csv') helpers.create_gmf_from_csv(job, fname, 'gmf_scenario') return job
def get_hazard_job(self): job = helpers.get_job(helpers.get_data_path("scenario_hazard/job.ini"), number_of_ground_motion_fields=1000) fname = self._test_path('gmf_scenario.csv') helpers.create_gmf_from_csv(job, fname, 'gmf_scenario') return job
def get_hazard_job(self): job = helpers.get_job(helpers.get_data_path("scenario_hazard/job.ini")) fname = os.path.join(os.path.dirname(case_1.__file__), 'gmf_scenario.csv') helpers.create_gmf_from_csv(job, fname, 'gmf_scenario') return job
def get_hazard_job(self): job = helpers.get_job( helpers.get_data_path("event_based_hazard/job.ini")) helpers.create_gmf_from_csv(job, self._test_path('gmf.csv')) return job
def _setup_a_new_calculator(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') job = helpers.get_job(cfg, username=getpass.getuser()) calc = core.ClassicalHazardCalculator(job) return job, calc