def test_inputs4job_with_correct_input_type_and_path(self): # The source inputs are returned. inp1 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="source", size=self.sizes.next()) inp1.save() models.Input2job(oq_job=self.job, input=inp1).save() path = self.paths.next() inp2 = models.Input(owner=self.job.owner, path=path, input_type="source", size=self.sizes.next()) inp2.save() models.Input2job(oq_job=self.job, input=inp2).save() inp3 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="source", size=self.sizes.next()) inp3.save() models.Input2job(oq_job=self.job, input=inp3).save() self.assertEqual([inp2], models.inputs4job(self.job.id, input_type="source", path=path))
def setUpClass(cls): default_user = helpers.default_user() cls.job = models.OqJob(owner=default_user) cls.job.save() # dmg dist per asset cls.ddpa_output = models.Output( owner=default_user, oq_job=cls.job, display_name='Test dmg dist per asset', output_type='dmg_dist_per_asset', db_backed=True) cls.ddpa_output.save() cls.ddpa = models.DmgDistPerAsset( output=cls.ddpa_output, dmg_states=cls.DMG_STATES) cls.ddpa.save() # We also need some sample exposure data records (to satisfy the dmg # dist per asset FK). test_input = models.Input( owner=default_user, input_type='exposure', path='fake', size=0) test_input.save() i2j = models.Input2job(input=test_input, oq_job=cls.job) i2j.save() exp_model = models.ExposureModel( owner=default_user, input=test_input, name='test-exp-model', category='economic loss', stco_type='per_asset', stco_unit='CHF') exp_model.save() test_site = shapes.Site(3.14, 2.17) cls.exp_data = models.ExposureData( # Asset exposure_model=exp_model, asset_ref=helpers.random_string(), taxonomy=helpers.random_string(), number_of_units=37, site=test_site.point.to_wkt(), stco=1234.56) cls.exp_data.save() # dmg dist per taxonomy cls.ddpt_output = models.Output( owner=default_user, oq_job=cls.job, display_name='Test dmg dist per taxonomy', output_type='dmg_dist_per_taxonomy', db_backed=True) cls.ddpt_output.save() cls.ddpt = models.DmgDistPerTaxonomy( output=cls.ddpt_output, dmg_states=cls.DMG_STATES) cls.ddpt.save() # total dmg dist cls.ddt_output = models.Output( owner=default_user, oq_job=cls.job, display_name='Test dmg dist total', output_type='dmg_dist_total', db_backed=True) cls.ddt_output.save() cls.ddt = models.DmgDistTotal( output=cls.ddt_output, dmg_states=cls.DMG_STATES) cls.ddt.save()
def setUp(self): self.emdl_input = models.Input( input_type="exposure", size=123, path="/tmp/fake-exposure-path", owner=self.job.owner) self.emdl_input.save() i2j = models.Input2job(input=self.emdl_input, oq_job=self.job) i2j.save()
def setUp(self): owner = engine.prepare_user('openquake') self.site_model_inp = models.Input( owner=owner, digest='fake', path='fake', input_type='site_model', size=0 ) self.site_model_inp.save()
def test_inputs4job_with_wrong_path(self): # No input is returned. inp = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp.save() models.Input2job(oq_job=self.job, input=inp).save() self.assertEqual([], models.inputs4job(self.job.id, path="xyz"))
def setUpClass(cls): cls.job = cls.setup_classic_job() cls.input = models.Input(input_type="source", size=123, path="/tmp/fake-source-path", owner=cls.job.owner) cls.input.save() i2j = models.Input2job(input=cls.input, oq_job=cls.job) i2j.save()
def test_inputs4job_with_single_input(self): # The single input is returned. inp = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp.save() models.Input2job(oq_job=self.job, input=inp).save() self.assertEqual([inp], models.inputs4job(self.job.id))
def setUp(self): emdl_input = models.Input( input_type="exposure", size=123, path="/tmp/fake-exposure-path", owner=self.job.owner) emdl_input.save() i2j = models.Input2job(input=emdl_input, oq_job=self.job) i2j.save() self.mdl = models.ExposureModel( input=emdl_input, owner=self.job.owner, name="exposure-model-testing", category="economic loss")
def insert_inputs(job, inputs): """Insert the input records for the given data and job.""" for imt, imp in inputs: iobj = models.Input(path=imp, input_type=imt, owner=job.owner, size=random.randint(1024, 16 * 1024)) iobj.save() i2j = models.Input2job(input=iobj, oq_job=job) i2j.save()
def setUp(self): fmdl_input = models.Input(input_type="fragility", size=123, path="/tmp/fake-fragility-path", owner=self.job.owner) fmdl_input.save() i2j = models.Input2job(input=fmdl_input, oq_job=self.job) i2j.save() self.mdl = models.FragilityModel(input=fmdl_input, owner=self.job.owner, format="continuous")
def setUp(self): # md5sum digest incorrect self.glt_i = models.Input(input_type="lt_gmpe", size=123, path=self.GLT, owner=self.old_job.owner, digest="0" * 32) self.glt_i.save() i2j = models.Input2job(input=self.glt_i, oq_job=self.old_job) i2j.save() # md5sum digest correct if sys.platform == 'darwin': digest = subprocess.check_output(["md5", self.SLT]).split()[-1] else: digest = subprocess.check_output(["md5sum", self.SLT]).split()[0] self.slt_i = models.Input(input_type="lt_source", size=123, path=self.SLT, owner=self.old_job.owner, digest=digest) self.slt_i.save() i2j = models.Input2job(input=self.slt_i, oq_job=self.old_job) i2j.save() self.job = self.setup_classic_job()
def setUp(self): emdl_input = models.Input( input_type="exposure", size=123, path="/tmp/fake-exposure-path", owner=self.job.owner) emdl_input.save() i2j = models.Input2job(input=emdl_input, oq_job=self.job) i2j.save() self.mdl = models.ExposureModel( input=emdl_input, owner=self.job.owner, unit_type="count", name="exposure-data-testing", category="economic loss", coco_type="per_asset", coco_unit="USD") self.mdl.save()
def test_get_site_model_too_many_site_models(self): job = engine.prepare_job() site_model_inp1 = models.Input( owner=job.owner, digest='fake', path='fake', input_type='site_model', size=0 ) site_model_inp1.save() site_model_inp2 = models.Input( owner=job.owner, digest='fake', path='fake', input_type='site_model', size=0 ) site_model_inp2.save() # link both site models to the job: models.Input2job(input=site_model_inp1, oq_job=job).save() models.Input2job(input=site_model_inp2, oq_job=job).save() with self.assertRaises(RuntimeError) as assert_raises: general.get_site_model(job.id) self.assertEqual('Only 1 site model per job is allowed, found 2.', assert_raises.exception.message)
def test_inputs4job_with_correct_input_type(self): # The exposure inputs are returned. inp1 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp1.save() models.Input2job(oq_job=self.job, input=inp1).save() inp2 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="rupture", size=self.sizes.next()) inp2.save() models.Input2job(oq_job=self.job, input=inp2).save() inp3 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp3.save() models.Input2job(oq_job=self.job, input=inp3).save() self.assertEqual([inp1, inp3], models.inputs4job(self.job.id, input_type="exposure"))
def test_get_site_model(self): job = engine.prepare_job() site_model_inp = models.Input( owner=job.owner, digest='fake', path='fake', input_type='site_model', size=0 ) site_model_inp.save() # The link has not yet been made in the input2job table. self.assertIsNone(general.get_site_model(job.id)) # Complete the link: models.Input2job(input=site_model_inp, oq_job=job).save() actual_site_model = general.get_site_model(job.id) self.assertEqual(site_model_inp, actual_site_model)
def _setup_input(self, input_type, size, path, digest, jobs): """Create a model input and associate it with the given jobs. Its owner will be the same as the owner of the first job. """ # In order for the tests in this class to work we need to disable any # other model inputs that might still be in the database. models.Input2job.objects.all().delete() mdl = models.Input(input_type=input_type, size=size, path=path, owner=jobs[0].owner, digest=digest) mdl.save() for job in jobs: i2j = models.Input2job(input=mdl, oq_job=job) i2j.save() return mdl
def setUp(self): fmdl_input = models.Input(input_type="fragility", size=123, path="/tmp/fake-fragility-path", owner=self.job.owner) fmdl_input.save() i2j = models.Input2job(input=fmdl_input, oq_job=self.job) i2j.save() self.mdl = models.FragilityModel(input=fmdl_input, owner=self.job.owner, format="discrete", lss="a b c".split(), imls=[0.2, 0.3], imt="mmi") self.mdl.save() self.continuous_mdl = models.FragilityModel(input=fmdl_input, owner=self.job.owner, format="continuous", lss="d e f".split()) self.continuous_mdl.save()
def test_store_site_model(self): # Setup inp = models.Input( owner=models.OqUser.objects.get(id=1), path='fake_path', digest='fake_digest', input_type='site_model', size=0) inp.save() site_model = helpers.get_data_path('site_model.xml') exp_site_model = [ dict(lon=-122.5, lat=37.5, vs30=800.0, vs30_type="measured", z1pt0=100.0, z2pt5=5.0), dict(lon=-122.6, lat=37.6, vs30=801.0, vs30_type="measured", z1pt0=101.0, z2pt5=5.1), dict(lon=-122.7, lat=37.7, vs30=802.0, vs30_type="measured", z1pt0=102.0, z2pt5=5.2), dict(lon=-122.8, lat=37.8, vs30=803.0, vs30_type="measured", z1pt0=103.0, z2pt5=5.3), dict(lon=-122.9, lat=37.9, vs30=804.0, vs30_type="measured", z1pt0=104.0, z2pt5=5.4), ] ret_val = general.store_site_model(inp, site_model) actual_site_model = models.SiteModel.objects.filter( input=inp.id).order_by('id') for i, exp in enumerate(exp_site_model): act = actual_site_model[i] self.assertAlmostEqual(exp['lon'], act.location.x) self.assertAlmostEqual(exp['lat'], act.location.y) self.assertAlmostEqual(exp['vs30'], act.vs30) self.assertEqual(exp['vs30_type'], act.vs30_type) self.assertAlmostEqual(exp['z1pt0'], act.z1pt0) self.assertAlmostEqual(exp['z2pt5'], act.z2pt5) # last, check that the `store_site_model` function returns all of the # newly-inserted records # an `equals` check just compares the ids for i, val in enumerate(ret_val): self.assertEqual(val, actual_site_model[i])
def test_import_job_profile(self): # Given a path to a demo config file, ensure that the appropriate # database record for OqJobProfile is created. # At the moment, the api function used to import the job profile also # returns a dict of the config params and a list of config file # sections. cfg_path = helpers.demo_file('HazardMapTest/config.gem') # Default 'openquake' user: owner = helpers.default_user() smlt_input = models.Input( owner=helpers.default_user(), path=os.path.abspath(helpers.demo_file( 'HazardMapTest/source_model_logic_tree.xml')), input_type='lt_source', size=671, digest="4372d13cec89f2a1072a2c7c694656d0") gmpelt_input = models.Input( owner=helpers.default_user(), path=os.path.abspath(helpers.demo_file( 'HazardMapTest/gmpe_logic_tree.xml')), input_type='lt_gmpe', size=709, digest="d9ece248a1e73ee25bd5964670282012") src_model_input = models.Input( owner=helpers.default_user(), path=os.path.abspath(helpers.demo_file( 'HazardMapTest/source_model.xml')), input_type='source', size=1644, digest="3118538b30b69289e6ea47967e9f51aa") expected_inputs_map = dict( lt_source=smlt_input, lt_gmpe=gmpelt_input, source=src_model_input) expected_jp = models.OqJobProfile( owner=owner, calc_mode='classical', job_type=['hazard'], region=GEOSGeometry( 'POLYGON((-122.2 37.6, -122.2 38.2, ' '-121.5 38.2, -121.5 37.6, -122.2 37.6))'), region_grid_spacing=0.01, min_magnitude=5.0, investigation_time=50.0, maximum_distance=200.0, component='gmroti50', imt='pga', period=None, damping=None, truncation_type='twosided', truncation_level=3.0, imls=[ 0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527, 0.0738, 0.103, 0.145, 0.203, 0.284, 0.397, 0.556, 0.778, 1.09], poes=[0.1], realizations=1, depth_to_1pt_0km_per_sec=100.0, vs30_type='measured', source_model_lt_random_seed=23, gmpe_lt_random_seed=5, width_of_mfd_bin=0.1, standard_deviation_type='total', reference_vs30_value=760.0, reference_depth_to_2pt5km_per_sec_param=5.0, sadigh_site_type='rock', # area sources: include_area_sources=True, treat_area_source_as='pointsources', area_source_discretization=0.1, area_source_magnitude_scaling_relationship=( 'W&C 1994 Mag-Length Rel.'), # point sources: include_grid_sources=False, treat_grid_source_as='pointsources', grid_source_magnitude_scaling_relationship=( 'W&C 1994 Mag-Length Rel.'), # simple faults: include_fault_source=True, fault_rupture_offset=1.0, fault_surface_discretization=1.0, fault_magnitude_scaling_relationship='Wells & Coppersmith (1994)', fault_magnitude_scaling_sigma=0.0, rupture_aspect_ratio=2.0, rupture_floating_type='downdip', # complex faults: include_subduction_fault_source=False, subduction_fault_rupture_offset=10.0, subduction_fault_surface_discretization=10.0, subduction_fault_magnitude_scaling_relationship=( 'W&C 1994 Mag-Length Rel.'), subduction_fault_magnitude_scaling_sigma=0.0, subduction_rupture_aspect_ratio=1.5, subduction_rupture_floating_type='downdip', quantile_levels=[], compute_mean_hazard_curve=True) expected_sections = ['HAZARD', 'general'] expected_params = { 'AREA_SOURCE_DISCRETIZATION': '0.1', 'AREA_SOURCE_MAGNITUDE_SCALING_RELATIONSHIP': 'W&C 1994 Mag-Length Rel.', 'BASE_PATH': os.path.abspath(helpers.demo_file('HazardMapTest')), 'CALCULATION_MODE': 'Classical', 'COMPONENT': 'Average Horizontal (GMRotI50)', 'COMPUTE_MEAN_HAZARD_CURVE': 'true', 'DAMPING': '5.0', 'DEPTHTO1PT0KMPERSEC': '100.0', 'FAULT_MAGNITUDE_SCALING_RELATIONSHIP': 'Wells & Coppersmith (1994)', 'FAULT_MAGNITUDE_SCALING_SIGMA': '0.0', 'FAULT_RUPTURE_OFFSET': '1.0', 'FAULT_SURFACE_DISCRETIZATION': '1.0', 'GMPE_LOGIC_TREE_FILE': os.path.abspath( helpers.demo_file('HazardMapTest/gmpe_logic_tree.xml')), 'GMPE_LT_RANDOM_SEED': '5', 'GMPE_TRUNCATION_TYPE': '2 Sided', 'GRID_SOURCE_MAGNITUDE_SCALING_RELATIONSHIP': 'W&C 1994 Mag-Length Rel.', 'INCLUDE_AREA_SOURCES': 'true', 'INCLUDE_FAULT_SOURCE': 'true', 'INCLUDE_GRID_SOURCES': 'false', 'INCLUDE_SUBDUCTION_FAULT_SOURCE': 'false', 'INTENSITY_MEASURE_LEVELS': ( '0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527,' ' 0.0738, 0.103, 0.145, 0.203, 0.284, 0.397, 0.556, 0.778,' ' 1.09'), 'INTENSITY_MEASURE_TYPE': 'PGA', 'INVESTIGATION_TIME': '50.0', 'MAXIMUM_DISTANCE': '200.0', 'MINIMUM_MAGNITUDE': '5.0', 'NUMBER_OF_LOGIC_TREE_SAMPLES': '1', 'OUTPUT_DIR': 'computed_output', 'PERIOD': '0.0', 'POES': '0.1', 'QUANTILE_LEVELS': '', 'REFERENCE_DEPTH_TO_2PT5KM_PER_SEC_PARAM': '5.0', 'REFERENCE_VS30_VALUE': '760.0', 'REGION_GRID_SPACING': '0.01', 'REGION_VERTEX': '37.6, -122.2, 38.2, -122.2, 38.2, -121.5, 37.6, -121.5', 'RUPTURE_ASPECT_RATIO': '2.0', 'RUPTURE_FLOATING_TYPE': 'Along strike and down dip', 'SADIGH_SITE_TYPE': 'Rock', 'SOURCE_MODEL_LOGIC_TREE_FILE': os.path.abspath( helpers.demo_file( 'HazardMapTest/source_model_logic_tree.xml')), 'SOURCE_MODEL_LT_RANDOM_SEED': '23', 'STANDARD_DEVIATION_TYPE': 'Total', 'SUBDUCTION_FAULT_MAGNITUDE_SCALING_RELATIONSHIP': 'W&C 1994 Mag-Length Rel.', 'SUBDUCTION_FAULT_MAGNITUDE_SCALING_SIGMA': '0.0', 'SUBDUCTION_FAULT_RUPTURE_OFFSET': '10.0', 'SUBDUCTION_FAULT_SURFACE_DISCRETIZATION': '10.0', 'SUBDUCTION_RUPTURE_ASPECT_RATIO': '1.5', 'SUBDUCTION_RUPTURE_FLOATING_TYPE': 'Along strike and down dip', 'TREAT_AREA_SOURCE_AS': 'Point Sources', 'TREAT_GRID_SOURCE_AS': 'Point Sources', 'TRUNCATION_LEVEL': '3', 'VS30_TYPE': 'measured', 'WIDTH_OF_MFD_BIN': '0.1'} actual_jp, params, sections = engine.import_job_profile( cfg_path, self.job) self.assertEqual(expected_params, params) self.assertEqual(expected_sections, sections) # Test the OqJobProfile: self.assertTrue( models.model_equals(expected_jp, actual_jp, ignore=( 'id', 'last_update', '_owner_cache'))) # Test the Inputs: actual_inputs = models.inputs4job(self.job.id) self.assertEqual(3, len(actual_inputs)) for act_inp in actual_inputs: exp_inp = expected_inputs_map[act_inp.input_type] self.assertTrue( models.model_equals( exp_inp, act_inp, ignore=( "id", "last_update", "path", "model", "_owner_cache", "owner_id", "model_content_id")))