def test_initialize_site_model(self): # we need a slightly different config file for this test cfg = helpers.demo_file( 'simple_fault_demo_hazard/job_with_site_model.ini') self.job = helpers.get_hazard_job(cfg) self.calc = core.ClassicalHazardCalculator(self.job) self.calc.initialize_site_model() # If the site model isn't valid for the calculation geometry, a # `RuntimeError` should be raised here # Okay, it's all good. Now check the count of the site model records. [site_model_inp] = models.inputs4hcalc( self.job.hazard_calculation.id, input_type='site_model') sm_nodes = models.SiteModel.objects.filter(input=site_model_inp) self.assertEqual(2601, len(sm_nodes)) num_pts_to_compute = len( self.job.hazard_calculation.points_to_compute()) [site_data] = models.SiteData.objects.filter( hazard_calculation=self.job.hazard_calculation.id) # The site model is good. Now test that `site_data` was computed. # For now, just test the lengths of the site data collections: self.assertEqual(num_pts_to_compute, len(site_data.lons)) self.assertEqual(num_pts_to_compute, len(site_data.lats)) self.assertEqual(num_pts_to_compute, len(site_data.vs30s)) self.assertEqual(num_pts_to_compute, len(site_data.vs30_measured)) self.assertEqual(num_pts_to_compute, len(site_data.z1pt0s)) self.assertEqual(num_pts_to_compute, len(site_data.z2pt5s))
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("event_based_hazard/job.ini")) helpers.create_gmf_from_csv(job, os.path.join( os.path.dirname(__file__), 'gmf.csv')) return job
def setUp(self): # Patch a few methods here and restore them in the tearDown to avoid # too many nested with # See http://www.voidspace.org.uk/python/mock/patch.html \ # #patch-methods-start-and-stop self.patchers = [] def start_patch(attr_path): _, attr = attr_path.rsplit('.', 1) patcher = patch(attr_path) self.patchers.append(patcher) setattr(self, attr, patcher.start()) start_patch('openquake.engine.supervising.is_pid_running') # Patch the actions taken by the supervisor start_patch('openquake.engine.supervising.supervisor.\ record_job_stop_time') start_patch( 'openquake.engine.supervising.supervisor.cleanup_after_job') start_patch('openquake.engine.supervising.supervisor.terminate_job') start_patch('openquake.engine.supervising.supervisor.get_job_status') start_patch('openquake.engine.supervising.supervisor' '.update_job_status') logging.root.setLevel(logging.CRITICAL) cfg = get_data_path('end-to-end-hazard-risk/job_haz_classical.ini') self.job = get_hazard_job(cfg)
def setUp(self): self.cfg = helpers.get_data_path('event_based_hazard/job_2.ini') self.job = helpers.get_hazard_job(self.cfg, username=getpass.getuser()) self.calc = core.EventBasedHazardCalculator(self.job) hc_id = self.job.hazard_calculation.id models.SiteCollection.cache[hc_id] = make_site_coll(0, 0, n=5) models.JobStats.objects.create(oq_job=self.job)
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("simple_fault_demo_hazard/job.ini")) hazard_curve = [(0.001, 0.0398612669790014), (0.01, 0.039861266979001400), (0.05, 0.039728757480298900), (0.10, 0.029613426625612500), (0.15, 0.019827328756491600), (0.20, 0.013062270161451900), (0.25, 0.008655387950000430), (0.30, 0.005898520593689670), (0.35, 0.004061698589511780), (0.40, 0.002811727179526820), (0.45, 0.001995117417776690), (0.50, 0.001358705972845710), (0.55, 0.000989667841573727), (0.60, 0.000757544444296432), (0.70, 0.000272824002045979), (0.80, 0.00), (0.9, 0.00), (1.0, 0.00)] models.HazardCurveData.objects.create( hazard_curve=models.HazardCurve.objects.create( output=models.Output.objects.create_output( job, "Test Hazard curve", "hazard_curve"), investigation_time=50, imt="PGA", imls=[hz[0] for hz in hazard_curve], statistics="mean"), poes=[hz[1] for hz in hazard_curve], location="POINT(1 1)") return job
def hazard_id(self): job = helpers.get_hazard_job( helpers.demo_file("scenario_hazard/job.ini")) hc = job.hazard_calculation job.hazard_calculation = models.HazardCalculation.objects.create( owner=hc.owner, truncation_level=hc.truncation_level, maximum_distance=hc.maximum_distance, intensity_measure_types=["PGA"], calculation_mode="scenario") job.status = "complete" job.save() output = models.Output.objects.create_output( job, "Test Hazard output", "gmf_scenario") fname = os.path.join(os.path.dirname(__file__), 'gmf_scenario.csv') with open(fname, 'rb') as csvfile: gmfreader = csv.reader(csvfile, delimiter=',') locations = gmfreader.next() arr = numpy.array([[float(x) for x in row] for row in gmfreader]) for i, gmvs in enumerate(arr): models.GmfScenario.objects.create( output=output, imt="PGA", gmvs=gmvs, location="POINT(%s)" % locations[i]) return output.id
def hazard_id(self): job = helpers.get_hazard_job( helpers.demo_file("simple_fault_demo_hazard/job.ini")) hazard_curve = [ (0.001, 0.0398612669790014), (0.01, 0.039861266979001400), (0.05, 0.039728757480298900), (0.10, 0.029613426625612500), (0.15, 0.019827328756491600), (0.20, 0.013062270161451900), (0.25, 0.008655387950000430), (0.30, 0.005898520593689670), (0.35, 0.004061698589511780), (0.40, 0.002811727179526820), (0.45, 0.001995117417776690), (0.50, 0.001358705972845710), (0.55, 0.000989667841573727), (0.60, 0.000757544444296432), (0.70, 0.000272824002045979), (0.80, 0.00), (0.9, 0.00), (1.0, 0.00)] hd = models.HazardCurveData.objects.create( hazard_curve=models.HazardCurve.objects.create( output=models.Output.objects.create_output( job, "Test Hazard curve", "hazard_curve"), investigation_time=50, imt="PGA", imls=[hz[0] for hz in hazard_curve], statistics="mean"), poes=[hz[1] for hz in hazard_curve], location="POINT(1 1)") return hd.hazard_curve.output.id
def test_del_haz_calc(self): hazard_job = helpers.get_hazard_job(self.hazard_cfg, username=getpass.getuser()) hazard_calc = hazard_job.hazard_calculation models.Output.objects.create_output( hazard_job, 'test_curves_1', output_type='hazard_curve' ) models.Output.objects.create_output( hazard_job, 'test_curves_2', output_type='hazard_curve' ) # Sanity check: make sure the hazard calculation and outputs exist in # the database: hazard_calcs = models.HazardCalculation.objects.filter( id=hazard_calc.id ) self.assertEqual(1, hazard_calcs.count()) outputs = models.Output.objects.filter(oq_job=hazard_job.id) self.assertEqual(2, outputs.count()) # Delete the calculation engine.del_haz_calc(hazard_calc.id) # Check that the hazard calculation and its outputs were deleted: outputs = models.Output.objects.filter(oq_job=hazard_job.id) self.assertEqual(0, outputs.count()) hazard_calcs = models.HazardCalculation.objects.filter( id=hazard_calc.id ) self.assertEqual(0, hazard_calcs.count())
def test_initialize_site_model(self): # we need a slightly different config file for this test cfg = helpers.get_data_path( 'simple_fault_demo_hazard/job_with_site_model.ini') self.job = helpers.get_hazard_job(cfg) self.calc = core.ClassicalHazardCalculator(self.job) self.calc.initialize_site_model() # If the site model isn't valid for the calculation geometry, a # `RuntimeError` should be raised here # Okay, it's all good. Now check the count of the site model records. sm_nodes = models.SiteModel.objects.filter(job=self.job) self.assertEqual(2601, len(sm_nodes)) num_pts_to_compute = len( self.job.hazard_calculation.points_to_compute()) hazard_site = models.HazardSite.objects.filter( hazard_calculation=self.job.hazard_calculation) # The site model is good. Now test that `hazard_site` was computed. # For now, just test the length. self.assertEqual(num_pts_to_compute, len(hazard_site))
def hazard_id(self): job = helpers.get_hazard_job( helpers.demo_file("scenario_hazard/job.ini")) job.hazard_calculation = models.HazardCalculation.objects.create( owner=job.hazard_calculation.owner, truncation_level=job.hazard_calculation.truncation_level, maximum_distance=job.hazard_calculation.maximum_distance, intensity_measure_types_and_levels=( job.hazard_calculation.intensity_measure_types_and_levels), calculation_mode="scenario") job.save() output = models.Output.objects.create_output( job, "Test Hazard output", "gmf_scenario") fname = os.path.join(os.path.dirname(__file__), 'gmf_scenario.csv') with open(fname, 'rb') as csvfile: gmfreader = csv.reader(csvfile, delimiter=',') locations = gmfreader.next() arr = numpy.array([map(float, row) for row in gmfreader]) for i, gmvs in enumerate(arr.transpose()): models.GmfScenario.objects.create( output=output, imt="MMI", gmvs=gmvs, result_grp_ordinal=1, location="POINT(%s)" % locations[i]) return output.id
def setUpClass(cls): cfg = helpers.get_data_path('event_based_hazard/job.ini') job = helpers.get_hazard_job(cfg) rlz1 = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=1, seed=1, weight=None, sm_lt_path="test_sm", gsim_lt_path="test_gsim", is_complete=False, total_items=1, completed_items=1) rlz2 = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=2, seed=1, weight=None, sm_lt_path="test_sm", gsim_lt_path="test_gsim", is_complete=False, total_items=1, completed_items=1) ses_coll1 = models.SESCollection.objects.create( output=models.Output.objects.create_output( job, "Test SES Collection 1", "ses"), lt_realization=rlz1) ses_coll2 = models.SESCollection.objects.create( output=models.Output.objects.create_output( job, "Test SES Collection 2", "ses"), lt_realization=rlz2) gmf_data1 = helpers.create_gmf_data_records(job, rlz1, ses_coll1)[0] points = [(15.3, 38.22), (15.7, 37.22), (15.4, 38.09), (15.56, 38.1), (15.2, 38.2)] gmf_data2 = helpers.create_gmf_data_records( job, rlz2, ses_coll2, points)[0] cls.gmf_coll1 = gmf_data1.gmf cls.ruptures1 = tuple(get_tags(gmf_data1)) cls.ruptures2 = tuple(get_tags(gmf_data2)) cls.investigation_time = job.hazard_calculation.investigation_time
def setUp(self): cfg = helpers.demo_file('simple_fault_demo_hazard/job.ini') self.job = helpers.get_hazard_job(cfg, username="******") for i in range(0, random.randint(1, 10)): models.LtRealization( hazard_calculation=self.job.hazard_calculation, ordinal=i, seed=None, weight=1 / (i + 1), sm_lt_path=[i], gsim_lt_path=[i], total_items=0, completed_items=0).save()
def hazard_id(self): job = helpers.get_hazard_job(helpers.demo_file("event_based_hazard/job.ini")) job.hazard_calculation = models.HazardCalculation.objects.create( owner=job.hazard_calculation.owner, truncation_level=job.hazard_calculation.truncation_level, maximum_distance=job.hazard_calculation.maximum_distance, intensity_measure_types_and_levels=(job.hazard_calculation.intensity_measure_types_and_levels), calculation_mode="event_based", investigation_time=50, ses_per_logic_tree_path=1, ) job.save() hc = job.hazard_calculation lt_realization = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=1, seed=1, weight=None, sm_lt_path="test_sm", gsim_lt_path="test_gsim", is_complete=False, total_items=1, completed_items=1, ) gmf_set = models.GmfSet.objects.create( gmf_collection=models.GmfCollection.objects.create( output=models.Output.objects.create_output(job, "Test Hazard output", "gmf"), lt_realization=lt_realization, complete_logic_tree_gmf=False, ), investigation_time=hc.investigation_time, ses_ordinal=1, complete_logic_tree_gmf=False, ) with open(os.path.join(os.path.dirname(__file__), "gmf.csv"), "rb") as csvfile: gmfreader = csv.reader(csvfile, delimiter=",") locations = gmfreader.next() gmv_matrix = numpy.array([[float(x) for x in row] for row in gmfreader]).transpose() rupture_ids = helpers.get_rupture_ids(job, hc, lt_realization, len(gmv_matrix[0])) for i, gmvs in enumerate(gmv_matrix): models.Gmf.objects.create( gmf_set=gmf_set, imt="PGA", gmvs=gmvs, rupture_ids=map(str, rupture_ids), result_grp_ordinal=1, location="POINT(%s)" % locations[i], ) return gmf_set.gmf_collection.output.id
def test_del_haz_calc_no_access(self): # Test the case where we try to delete a hazard calculation which does # not belong to current user. # In this case, deletion is now allowed and should raise an exception. hazard_job = helpers.get_hazard_job(self.hazard_cfg, username=helpers.random_string()) hazard_calc = hazard_job.hazard_calculation self.assertRaises(RuntimeError, engine.del_haz_calc, hazard_calc.id)
def setUpClass(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') job = helpers.get_hazard_job(cfg) lt_rlz = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=0, seed=0, sm_lt_path='foo', gsim_lt_path='bar', total_items=0) output = models.Output.objects.create(oq_job=job, display_name='test', output_type='ses') ses_coll = models.SESCollection.objects.create(output=output, lt_realization=lt_rlz) ses = models.SES.objects.create(ses_collection=ses_coll, investigation_time=50.0, ordinal=1) self.mesh_lons = numpy.array([0.1 * x for x in range(16)]).reshape( (4, 4)) self.mesh_lats = numpy.array([0.2 * x for x in range(16)]).reshape( (4, 4)) self.mesh_depths = numpy.array([0.3 * x for x in range(16)]).reshape( (4, 4)) # planar surface coords self.ps_lons = [1, 3, 5, 7] self.ps_lats = [2, 4, 6, 8] self.ps_depths = [0.1, 0.2, 0.3, 0.4] self.fault_rupture = models.SESRupture.objects.create( ses=ses, old_magnitude=5, old_strike=0, old_dip=0, old_rake=0, old_tectonic_region_type='Active Shallow Crust', old_is_from_fault_source=True, old_lons=self.mesh_lons, old_is_multi_surface=False, old_lats=self.mesh_lats, old_depths=self.mesh_depths) self.source_rupture = models.SESRupture.objects.create( ses=ses, magnitude=5, old_strike=0, old_dip=0, old_rake=0, old_tectonic_region_type='Active Shallow Crust', old_is_from_fault_source=False, old_lons=self.ps_lons, old_is_multi_surface=False, old_lats=self.ps_lats, old_depths=self.ps_depths)
def test_create_risk_calculation(self): # we need an hazard output to create a risk calculation hazard_cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') hazard_job = helpers.get_hazard_job(hazard_cfg, 'openquake') hc = hazard_job.hazard_calculation rlz = models.LtRealization.objects.create( hazard_calculation=hazard_job.hazard_calculation, ordinal=1, seed=1, weight=None, sm_lt_path="test_sm", gsim_lt_path="test_gsim", is_complete=False, total_items=1, completed_items=1) hazard_output = models.HazardCurve.objects.create( lt_realization=rlz, output=models.Output.objects.create_output( hazard_job, "Test Hazard output", "hazard_curve"), investigation_time=hc.investigation_time, imt="PGA", imls=[0.1, 0.2, 0.3]) params = { 'hazard_output_id': hazard_output.output.id, 'base_path': 'path/to/job.ini', 'export_dir': '/tmp/xxx', 'calculation_mode': 'classical', # just some sample params 'lrem_steps_per_interval': 5, 'conditional_loss_poes': '0.01, 0.02, 0.05', 'region_constraint': '-0.5 0.5, 0.5 0.5, 0.5 -0.5, -0.5, -0.5', } owner = helpers.default_user() vuln_file = models.Input(digest='123', path='/foo/bar', size=0, input_type='structural_vulnerability', owner=owner) vuln_file.save() exposure_file = models.Input(digest='456', path='/foo/baz', size=0, input_type='exposure', owner=owner) exposure_file.save() files = [vuln_file, exposure_file] rc = engine.create_risk_calculation(owner, params, files) # Normalize/clean fields by fetching a fresh copy from the db. rc = models.RiskCalculation.objects.get(id=rc.id) self.assertEqual(rc.calculation_mode, 'classical') self.assertEqual(rc.lrem_steps_per_interval, 5) self.assertEqual(rc.conditional_loss_poes, [0.01, 0.02, 0.05]) self.assertEqual( rc.region_constraint.wkt, ('POLYGON ((-0.5000000000000000 0.5000000000000000, ' '0.5000000000000000 0.5000000000000000, ' '0.5000000000000000 -0.5000000000000000, ' '-0.5000000000000000 -0.5000000000000000, ' '-0.5000000000000000 0.5000000000000000))'))
def setUpClass(self): cfg = helpers.demo_file("simple_fault_demo_hazard/job.ini") job = helpers.get_hazard_job(cfg) lt_rlz = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=0, seed=0, sm_lt_path="foo", gsim_lt_path="bar", total_items=0, ) output = models.Output.objects.create(oq_job=job, owner=job.owner, display_name="test", output_type="ses") ses_coll = models.SESCollection.objects.create(output=output, lt_realization=lt_rlz) ses = models.SES.objects.create(ses_collection=ses_coll, investigation_time=50.0, ordinal=1) self.mesh_lons = numpy.array([0.1 * x for x in range(16)]).reshape((4, 4)) self.mesh_lats = numpy.array([0.2 * x for x in range(16)]).reshape((4, 4)) self.mesh_depths = numpy.array([0.3 * x for x in range(16)]).reshape((4, 4)) # planar surface coords self.ps_lons = [1, 3, 5, 7] self.ps_lats = [2, 4, 6, 8] self.ps_depths = [0.1, 0.2, 0.3, 0.4] self.fault_rupture = models.SESRupture.objects.create( ses=ses, magnitude=5, strike=0, dip=0, rake=0, tectonic_region_type="Active Shallow Crust", is_from_fault_source=True, lons=self.mesh_lons, lats=self.mesh_lats, depths=self.mesh_depths, result_grp_ordinal=1, rupture_ordinal=1, ) self.source_rupture = models.SESRupture.objects.create( ses=ses, magnitude=5, strike=0, dip=0, rake=0, tectonic_region_type="Active Shallow Crust", is_from_fault_source=False, lons=self.ps_lons, lats=self.ps_lats, depths=self.ps_depths, result_grp_ordinal=1, rupture_ordinal=2, )
def setUp(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') self.job = helpers.get_hazard_job(cfg, username="******") for i in range(0, random.randint(1, 10)): models.LtRealization( hazard_calculation=self.job.hazard_calculation, ordinal=i, seed=None, weight=1 / (i + 1), sm_lt_path=[i], gsim_lt_path=[i], total_items=0, completed_items=0).save()
def test_get_site_collection_with_reference_parameters(self): cfg = helpers.demo_file("simple_fault_demo_hazard/job.ini") job = helpers.get_hazard_job(cfg, username=getpass.getuser()) site_coll = models.get_site_collection(job.hazard_calculation) # all of the parameters should be the same: self.assertTrue((site_coll.vs30 == 760).all()) self.assertTrue((site_coll.vs30measured).all()) self.assertTrue((site_coll.z1pt0 == 5).all()) self.assertTrue((site_coll.z2pt5 == 100).all()) # just for sanity, make sure the meshes are correct (the locations) job_mesh = job.hazard_calculation.points_to_compute() self.assertTrue((job_mesh.lons == site_coll.mesh.lons).all()) self.assertTrue((job_mesh.lats == site_coll.mesh.lats).all())
def test_get_site_collection_with_reference_parameters(self): cfg = helpers.get_data_path('scenario_hazard/job.ini') job = helpers.get_hazard_job(cfg, username=getpass.getuser()) calc = scen_core.ScenarioHazardCalculator(job) calc.initialize_site_model() site_coll = job.hazard_calculation.site_collection # all of the parameters should be the same: self.assertTrue((site_coll.vs30 == 760).all()) self.assertTrue((site_coll.vs30measured).all()) self.assertTrue((site_coll.z1pt0 == 100).all()) self.assertTrue((site_coll.z2pt5 == 5).all()) # just for sanity, make sure the meshes are correct (the locations) job_mesh = job.hazard_calculation.points_to_compute() self.assertTrue((job_mesh.lons == site_coll.mesh.lons).all()) self.assertTrue((job_mesh.lats == site_coll.mesh.lats).all())
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("event_based_hazard/job.ini")) job.hazard_calculation = models.HazardCalculation.objects.create( truncation_level=job.hazard_calculation.truncation_level, maximum_distance=job.hazard_calculation.maximum_distance, intensity_measure_types_and_levels=( job.hazard_calculation.intensity_measure_types_and_levels), calculation_mode="event_based", investigation_time=50, ses_per_logic_tree_path=1) job.save() helpers.create_gmf_from_csv(job, self._test_path('gmf.csv')) return job
def setUpClass(cls): cfg = helpers.get_data_path('event_based_hazard/job.ini') job = helpers.get_hazard_job(cfg) rlz1 = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=1, seed=1, weight=None, sm_lt_path="test_sm", gsim_lt_path="test_gsim", is_complete=False, total_items=1, completed_items=1) rlz2 = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=2, seed=1, weight=None, sm_lt_path="test_sm", gsim_lt_path="test_gsim", is_complete=False, total_items=1, completed_items=1) ses_coll1 = models.SESCollection.objects.create( output=models.Output.objects.create_output( job, "Test SES Collection 1", "ses"), lt_realization=rlz1) ses_coll2 = models.SESCollection.objects.create( output=models.Output.objects.create_output( job, "Test SES Collection 2", "ses"), lt_realization=rlz2) gmf_data1 = helpers.create_gmf_data_records(job, rlz1, ses_coll1)[0] points = [(15.3, 38.22), (15.7, 37.22), (15.4, 38.09), (15.56, 38.1), (15.2, 38.2)] gmf_data2 = helpers.create_gmf_data_records(job, rlz2, ses_coll2, points)[0] cls.gmf_coll1 = gmf_data1.gmf cls.parent_coll = models.Gmf.objects.create( output=models.Output.objects.create_output( job, "Test Hazard output", "complete_lt_gmf")) cls.ruptures1 = tuple(get_tags(gmf_data1)) cls.ruptures2 = tuple(get_tags(gmf_data2)) cls.investigation_time = job.hazard_calculation.investigation_time
def test_create_risk_calculation(self): # we need an hazard output to create a risk calculation hazard_cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') hazard_job = helpers.get_hazard_job(hazard_cfg, 'openquake') hc = hazard_job.hazard_calculation rlz = models.LtRealization.objects.create( hazard_calculation=hazard_job.hazard_calculation, ordinal=1, seed=1, weight=None, sm_lt_path="test_sm", gsim_lt_path="test_gsim", is_complete=False, total_items=1, completed_items=1) hazard_output = models.HazardCurve.objects.create( lt_realization=rlz, output=models.Output.objects.create_output( hazard_job, "Test Hazard output", "hazard_curve"), investigation_time=hc.investigation_time, imt="PGA", imls=[0.1, 0.2, 0.3]) params = { 'hazard_output_id': hazard_output.output.id, 'base_path': 'path/to/job.ini', 'export_dir': '/tmp/xxx', 'calculation_mode': 'classical', # just some sample params 'lrem_steps_per_interval': 5, 'conditional_loss_poes': '0.01, 0.02, 0.05', 'region_constraint': '-0.5 0.5, 0.5 0.5, 0.5 -0.5, -0.5, -0.5', } rc = engine.create_calculation(models.RiskCalculation, params) # Normalize/clean fields by fetching a fresh copy from the db. rc = models.RiskCalculation.objects.get(id=rc.id) self.assertEqual(rc.calculation_mode, 'classical') self.assertEqual(rc.lrem_steps_per_interval, 5) self.assertEqual(rc.conditional_loss_poes, [0.01, 0.02, 0.05]) self.assertEqual( rc.region_constraint.wkt, ('POLYGON ((-0.5000000000000000 0.5000000000000000, ' '0.5000000000000000 0.5000000000000000, ' '0.5000000000000000 -0.5000000000000000, ' '-0.5000000000000000 -0.5000000000000000, ' '-0.5000000000000000 0.5000000000000000))'))
def test_get_site_collection_with_site_model(self): cfg = helpers.demo_file("simple_fault_demo_hazard/job_with_site_model.ini") job = helpers.get_hazard_job(cfg) calc = cls_core.ClassicalHazardCalculator(job) # Bootstrap the `site_data` table: calc.initialize_sources() calc.initialize_site_model() site_coll = models.get_site_collection(job.hazard_calculation) # Since we're using a pretty big site model, it's a bit excessive to # check each and every value. # Instead, we'll just test that the lenth of each site collection attr # is equal to the number of points of interest in the calculation. expected_len = len(job.hazard_calculation.points_to_compute()) self.assertEqual(expected_len, len(site_coll)) self.assertEqual(expected_len, len(site_coll.vs30)) self.assertEqual(expected_len, len(site_coll.vs30measured)) self.assertEqual(expected_len, len(site_coll.z1pt0)) self.assertEqual(expected_len, len(site_coll.z2pt5))
def hazard_id(self): job = helpers.get_hazard_job( helpers.demo_file("scenario_hazard/job.ini")) hc = job.hazard_calculation job.hazard_calculation = models.HazardCalculation.objects.create( owner=hc.owner, truncation_level=hc.truncation_level, maximum_distance=hc.maximum_distance, intensity_measure_types=["PGA"], calculation_mode="scenario") job.status = "complete" job.save() output = models.Output.objects.create_output( job, "Test Hazard output", "gmf_scenario") fname = os.path.join(os.path.dirname(__file__), 'gmf_scenario.csv') with open(fname, 'rb') as csvfile: gmfreader = csv.reader(csvfile, delimiter=',') locations = gmfreader.next() arr = numpy.array([[float(x) for x in row] for row in gmfreader]) for i, gmvs in enumerate(arr.transpose()): # In order to test properly the hazard getter we split # the available ground motion values in two result # groups (that we expect to be both considered). models.GmfScenario.objects.create( output=output, imt="PGA", gmvs=gmvs[0:5], result_grp_ordinal=1, location="POINT(%s)" % locations[i]) models.GmfScenario.objects.create( output=output, imt="PGA", gmvs=gmvs[5:], result_grp_ordinal=2, location="POINT(%s)" % locations[i]) return output.id
def test_get_site_collection_with_site_model(self): cfg = helpers.get_data_path( 'simple_fault_demo_hazard/job_with_site_model.ini') job = helpers.get_hazard_job(cfg) calc = cls_core.ClassicalHazardCalculator(job) # Bootstrap the `hazard_site` table: calc.initialize_sources() calc.initialize_site_model() site_coll = job.hazard_calculation.site_collection # Since we're using a pretty big site model, it's a bit excessive to # check each and every value. # Instead, we'll just test that the lenth of each site collection attr # is equal to the number of points of interest in the calculation. expected_len = len(job.hazard_calculation.points_to_compute()) self.assertEqual(expected_len, len(site_coll)) self.assertEqual(expected_len, len(site_coll.vs30)) self.assertEqual(expected_len, len(site_coll.vs30measured)) self.assertEqual(expected_len, len(site_coll.z1pt0)) self.assertEqual(expected_len, len(site_coll.z2pt5))
def setUpClass(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') job = helpers.get_hazard_job(cfg) lt_rlz = models.LtRealization.objects.create( hazard_calculation=job.hazard_calculation, ordinal=0, seed=0, sm_lt_path='foo', gsim_lt_path='bar', total_items=0) output = models.Output.objects.create( oq_job=job, owner=job.owner, display_name='test', output_type='ses') ses_coll = models.SESCollection.objects.create( output=output, lt_realization=lt_rlz) ses = models.SES.objects.create( ses_collection=ses_coll, investigation_time=50.0, ordinal=1) self.mesh_lons = numpy.array( [0.1 * x for x in range(16)]).reshape((4, 4)) self.mesh_lats = numpy.array( [0.2 * x for x in range(16)]).reshape((4, 4)) self.mesh_depths = numpy.array( [0.3 * x for x in range(16)]).reshape((4, 4)) # planar surface coords self.ps_lons = [1, 3, 5, 7] self.ps_lats = [2, 4, 6, 8] self.ps_depths = [0.1, 0.2, 0.3, 0.4] self.fault_rupture = models.SESRupture.objects.create( ses=ses, old_magnitude=5, old_strike=0, old_dip=0, old_rake=0, old_tectonic_region_type='Active Shallow Crust', old_is_from_fault_source=True, old_lons=self.mesh_lons, old_is_multi_surface=False, old_lats=self.mesh_lats, old_depths=self.mesh_depths) self.source_rupture = models.SESRupture.objects.create( ses=ses, magnitude=5, old_strike=0, old_dip=0, old_rake=0, old_tectonic_region_type='Active Shallow Crust', old_is_from_fault_source=False, old_lons=self.ps_lons, old_is_multi_surface=False, old_lats=self.ps_lats, old_depths=self.ps_depths)
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("scenario_hazard/job.ini")) fname = os.path.join(os.path.dirname(__file__), 'gmf_scenario.csv') helpers.populate_gmf_data_from_csv(job, fname) return job
def _setup_a_new_calculator(self): cfg = helpers.get_data_path('disaggregation/job.ini') job = helpers.get_hazard_job(cfg, username=getpass.getuser()) calc = disagg_core.DisaggHazardCalculator(job) return job, calc
def _setup_a_new_calculator(self): cfg = helpers.demo_file('simple_fault_demo_hazard/job.ini') job = helpers.get_hazard_job(cfg, username=getpass.getuser()) calc = core.ClassicalHazardCalculator(job) return job, calc
def setUp(self): cfg = helpers.get_data_path('event_based_hazard/job.ini') self.job = helpers.get_hazard_job(cfg, username=getpass.getuser()) self.calc = core.EventBasedHazardCalculator(self.job) models.JobStats.objects.create(oq_job=self.job)
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("scenario_hazard/job.ini")) helpers.populate_gmf_data_from_csv(job, CSVFILE) return job
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("event_based_hazard/job.ini")) helpers.create_gmf_from_csv(job, self._test_path('gmf.csv')) return job
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("scenario_hazard/job.ini")) helpers.populate_gmf_data_from_csv( job, self._test_path('../case_1/gmf_scenario.csv')) return job
def _setup_a_new_calculator(self): cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini') job = helpers.get_hazard_job(cfg, username=getpass.getuser()) calc = core.ClassicalHazardCalculator(job) return job, calc
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("scenario_hazard/job.ini")) fname = self._test_path('gmf_scenario.csv') helpers.populate_gmf_data_from_csv(job, fname) return job
def get_hazard_job(self): job = helpers.get_hazard_job( helpers.get_data_path("scenario_hazard/job.ini")) fname = self._test_path('../case_1/gmf_scenario.csv') helpers.populate_gmf_data_from_csv(job, fname) return job