def test_risk_mandatory_parameters(self): sections = [config.RISK_SECTION, config.HAZARD_SECTION, config.GENERAL_SECTION] params = {} engine = helpers.create_job(params, sections=sections) self.assertFalse(engine.is_valid()[0]) params = {config.EXPOSURE: "/a/path/to/exposure"} engine = helpers.create_job(params, sections=sections) self.assertFalse(engine.is_valid()[0]) params = {config.EXPOSURE: "/a/path/to/exposure", config.REGION_GRID_SPACING: 0.5} engine = helpers.create_job(params, sections=sections) self.assertFalse(engine.is_valid()[0]) params = {config.EXPOSURE: "/a/path/to/exposure", config.INPUT_REGION: "a, polygon", config.REGION_GRID_SPACING: 0.5} engine = helpers.create_job(params, sections=sections) self.assertTrue(engine.is_valid()[0])
def test_with_risk_processing_the_exposure_must_be_specified(self): sections = [config.RISK_SECTION, "HAZARD", "general"] params = {} engine = helpers.create_job(params, sections=sections) self.assertFalse(engine.is_valid()[0]) params = {config.EXPOSURE: "/a/path/to/exposure"} engine = helpers.create_job(params, sections=sections) self.assertTrue(engine.is_valid()[0])
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) calc_proxy = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) # storing the basic exposure model ClassicalRiskCalculator(calc_proxy).store_exposure_assets() [em_input] = models.inputs4job(cls.job.id, input_type="exposure") [model] = em_input.exposuremodel_set.all() site = shapes.Site(1.0, 2.0) # more assets at same location models.ExposureData( exposure_model=model, taxonomy="NOT_USED", asset_ref="ASSET_1", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() models.ExposureData( exposure_model=model, taxonomy="NOT_USED", asset_ref="ASSET_2", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() site = shapes.Site(2.0, 2.0) # just one asset at location models.ExposureData( exposure_model=model, taxonomy="NOT_USED", asset_ref="ASSET_3", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
def test_prepares_blocks_using_the_exposure_and_filtering(self): """When reading the exposure file, the mixin also provides filtering on the region specified in the REGION_VERTEX and REGION_GRID_SPACING paramaters.""" region_vertex = \ "46.0, 9.14, 46.0, 9.15, 45.0, 9.15, 45.0, 9.14" params = {config.EXPOSURE: os.path.join( helpers.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE), config.INPUT_REGION: region_vertex, config.REGION_GRID_SPACING: 0.1, # the calculation mode is filled to let the mixin runs config.CALCULATION_MODE: "Event Based"} a_job = helpers.create_job(params) expected_block = general.Block( (shapes.Site(9.15, 45.16667), shapes.Site(9.14777, 45.17999))) with Mixin(a_job, general.RiskJobMixin): a_job.partition() self.assertEqual(1, len(a_job.blocks_keys)) self.assertEqual( expected_block, general.Block.from_kvs(a_job.blocks_keys[0]))
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) cls.job_ctxt = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) calc = ClassicalRiskCalculator(cls.job_ctxt) calc.store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR") # Add some more assets. coos = [(10.000155392289116, 46.546194318563), (10.222034128255, 46.0071299176413), (10.520376165581, 46.247463385278)] for lat, lon in coos: site = shapes.Site(lat, lon) cls.sites.append(site) if assets: continue location = geos.GEOSGeometry(site.point.to_wkt()) asset = models.ExposureData(exposure_model=model, taxonomy="af/ctc-D/LR", asset_ref=helpers.random_string(6), stco=lat * 2, site=location, reco=1.1 * lon) asset.save()
def test_read_gmfs(self): """Verify _get_db_gmfs.""" params = { 'REGION_VERTEX': '40,-117, 42,-117, 42,-116, 40,-116', 'REGION_GRID_SPACING': '1.0'} the_job = helpers.create_job(params, job_id=self.job.id) calculator = EventBasedRiskCalculator(the_job) self.assertEqual(3, len(calculator._gmf_db_list(self.job.id))) # only the keys in gmfs are used gmfs = calculator._get_db_gmfs([], self.job.id) self.assertEqual({}, gmfs) # only the keys in gmfs are used sites = [Site(lon, lat) for lon in xrange(-117, -115) for lat in xrange(40, 43)] gmfs = calculator._get_db_gmfs(sites, self.job.id) # avoid rounding errors for k, v in gmfs.items(): gmfs[k] = [round(i, 1) for i in v] self.assertEqual({ '0!0': [0.1, 0.5, 0.0], '0!1': [0.2, 0.6, 0.0], '1!0': [0.4, 0.8, 1.3], '1!1': [0.3, 0.7, 1.2], '2!0': [0.0, 0.0, 1.0], '2!1': [0.0, 0.0, 1.1], }, gmfs)
def setUp(self): self.params = dict( CALCULATION_MODE='Hazard', REFERENCE_VS30_VALUE=500, SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH) self.imls = [ 5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00 ] self.job_ctxt = helpers.create_job(self.params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) self.job_id = self.job_ctxt.job_id self.empty_mean_curve = [] # deleting server side cached data kvs.get_client().flushall() mean_curve = [ 9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06 ] self.site = shapes.Site(2.0, 5.0) self._store_curve_at(self.site, mean_curve)
def test_read_gmfs(self): """Verify _get_db_gmfs.""" params = { 'REGION_VERTEX': '40,-117, 42,-117, 42,-116, 40,-116', 'REGION_GRID_SPACING': '1.0' } the_job = helpers.create_job(params, job_id=self.job.id) calculator = EventBasedRiskCalculator(the_job) self.assertEqual(3, len(calculator._gmf_db_list(self.job.id))) # only the keys in gmfs are used gmfs = calculator._get_db_gmfs([], self.job.id) self.assertEqual({}, gmfs) # only the keys in gmfs are used sites = [ Site(lon, lat) for lon in xrange(-117, -115) for lat in xrange(40, 43) ] gmfs = calculator._get_db_gmfs(sites, self.job.id) # avoid rounding errors for k, v in gmfs.items(): gmfs[k] = [round(i, 1) for i in v] self.assertEqual( { '0!0': [0.1, 0.5, 0.0], '0!1': [0.2, 0.6, 0.0], '1!0': [0.4, 0.8, 1.3], '1!1': [0.3, 0.7, 1.2], '2!0': [0.0, 0.0, 1.0], '2!1': [0.0, 0.0, 1.1], }, gmfs)
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) cls.job_ctxt = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) calc = ClassicalRiskCalculator(cls.job_ctxt) calc.store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR") # Add some more assets. coos = [(10.000155392289116, 46.546194318563), (10.222034128255, 46.0071299176413), (10.520376165581, 46.247463385278)] for lat, lon in coos: site = shapes.Site(lat, lon) cls.sites.append(site) if assets: continue location = geos.GEOSGeometry(site.point.to_wkt()) asset = models.ExposureData( exposure_model=model, taxonomy="af/ctc-D/LR", asset_ref=helpers.random_string(6), stco=lat * 2, site=location, reco=1.1 * lon) asset.save()
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) calc_proxy = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) # storing the basic exposure model ClassicalRiskCalculator(calc_proxy).store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="aa/aatc-D/LR") if not assets: # This model did not exist in the database before. site = shapes.Site(1.0, 2.0) # more assets at same location models.ExposureData( exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_1", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() models.ExposureData( exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_2", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() site = shapes.Site(2.0, 2.0) # just one asset at location models.ExposureData( exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_3", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
def setUp(self): self.params = dict( CALCULATION_MODE='Hazard', REFERENCE_VS30_VALUE=500, SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH) self.imls = [5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00] self.job_ctxt = helpers.create_job(self.params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) self.job_id = self.job_ctxt.job_id self.empty_mean_curve = [] # deleting server side cached data kvs.get_client().flushall() mean_curve = [9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06] self.site = shapes.Site(2.0, 5.0) self._store_curve_at(self.site, mean_curve)
def test_with_risk_jobs_we_can_trigger_hazard_only_on_exposure_sites(self): """When we have hazard and risk jobs, we can ask to trigger the hazard computation only on the sites specified in the exposure file.""" sections = [config.HAZARD_SECTION, config.GENERAL_SECTION, config.RISK_SECTION] input_region = "46.0, 9.0, 46.0, 10.0, 45.0, 10.0, 45.0, 9.0" exposure = "exposure-portfolio.xml" exposure_path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, exposure) params = { config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 0.1, config.EXPOSURE: exposure_path, config.COMPUTE_HAZARD_AT_ASSETS: True, } engine = helpers.create_job(params, sections=sections, base_path=".") expected_sites = [ shapes.Site(9.15000, 45.16667), shapes.Site(9.15333, 45.12200), shapes.Site(9.14777, 45.17999), ] self.assertEquals(expected_sites, engine.sites_to_compute())
def test_prepares_blocks_using_the_exposure(self): """The base risk calculator is able to read the exposure file, split the sites into blocks and store them in KVS. """ params = { config.EXPOSURE: os.path.join(helpers.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE), "BASE_PATH": "." } a_job = helpers.create_job(params) calculator = general.BaseRiskCalculator(a_job) calculator.partition() sites = [shapes.Site(9.15000, 45.16667), shapes.Site(9.15333, 45.12200), shapes.Site(9.14777, 45.17999)] expected = general.Block(a_job.job_id, 0, sites) self.assertEqual(1, len(a_job.blocks_keys)) self.assertEqual( expected, general.Block.from_kvs(a_job.job_id, a_job.blocks_keys[0]))
def setUp(self): params = dict( CALCULATION_MODE='Hazard', SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH) self.job_ctxt = create_job(params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt)
def setUp(self): params = dict( CALCULATION_MODE='Hazard', COMPUTE_MEAN_HAZARD_CURVE='true', SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH) self.calc_proxy = create_job(params) self.calculator = classical.ClassicalHazardCalculator(self.calc_proxy)
def test_site_keys(self): """Verify _sites_to_gmf_keys""" params = { 'REGION_VERTEX': '40,-117, 42,-117, 42,-116, 40,-116', 'REGION_GRID_SPACING': '1.0'} with Mixin(helpers.create_job(params, job_id=self.job.id), ProbabilisticEventMixin) as mixin: keys = mixin._sites_to_gmf_keys([Site(-117, 40), Site(-116, 42)]) self.assertEquals(["0!0", "2!1"], keys)
def test_site_keys(self): """Verify _sites_to_gmf_keys""" params = { 'REGION_VERTEX': '40,-117, 42,-117, 42,-116, 40,-116', 'REGION_GRID_SPACING': '1.0'} the_job = helpers.create_job(params, job_id=self.job.id) calculator = EventBasedRiskCalculator(the_job) keys = calculator._sites_to_gmf_keys([Site(-117, 40), Site(-116, 42)]) self.assertEqual(["0!0", "2!1"], keys)
def test_computes_sites_in_region_with_risk_jobs(self): """When we have hazard and risk jobs, we always use the region.""" sections = [config.HAZARD_SECTION, config.GENERAL_SECTION, config.RISK_SECTION] input_region = "2.0, 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, 1.0" params = {config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 1.0} engine = helpers.create_job(params, sections=sections) expected_sites = [shapes.Site(1.0, 1.0), shapes.Site(2.0, 1.0), shapes.Site(1.0, 2.0), shapes.Site(2.0, 2.0)] self.assertEquals(expected_sites, engine.sites_to_compute())
def test_site_keys(self): """Verify _sites_to_gmf_keys""" params = { 'REGION_VERTEX': '40,-117, 42,-117, 42,-116, 40,-116', 'REGION_GRID_SPACING': '1.0' } the_job = helpers.create_job(params, job_id=self.job.id) calculator = EventBasedRiskCalculator(the_job) keys = calculator._sites_to_gmf_keys([Site(-117, 40), Site(-116, 42)]) self.assertEqual(["0!0", "2!1"], keys)
def test_read_curve(self): """Verify _get_db_curve.""" the_job = helpers.create_job({}, job_id=self.job.id) calculator = ClassicalRiskCalculator(the_job) curve1 = calculator._get_db_curve(Site(-122.2, 37.5)) self.assertEqual(curve1, zip([0.005, 0.007, 0.0098, 0.0137], [0.354, 0.114, 0.023, 0.002])) curve2 = calculator._get_db_curve(Site(-122.1, 37.5)) self.assertEqual(curve2, zip([0.005, 0.007, 0.0098, 0.0137], [0.454, 0.214, 0.123, 0.102]))
def test_read_curve(self): """Verify _get_db_curve.""" the_job = helpers.create_job({}, job_id=self.job.id) calculator = ClassicalRiskCalculator(the_job) curve1 = calculator._get_db_curve(Site(-122.2, 37.5)) self.assertEqual(list(curve1.abscissae), [0.005, 0.007, 0.0098, 0.0137]) self.assertEqual(list(curve1.ordinates), [0.354, 0.114, 0.023, 0.002]) curve2 = calculator._get_db_curve(Site(-122.1, 37.5)) self.assertEqual(list(curve2.abscissae), [0.005, 0.007, 0.0098, 0.0137]) self.assertEqual(list(curve2.ordinates), [0.454, 0.214, 0.123, 0.102])
def test_hazard_computation_type(self): """Region (REGION_VERTEX) and specific sites (SITES) are not supported at the same time.""" params = {config.SITES: "some, sites"} validator = config.ComputationTypeValidator(params) engine = helpers.create_job(params, validator=validator) self.assertTrue(engine.is_valid()[0]) params = {config.INPUT_REGION: "a, polygon"} validator = config.ComputationTypeValidator(params) engine = helpers.create_job(params, validator=validator) self.assertTrue(engine.is_valid()[0]) params = {config.SITES: "some, sites", config.INPUT_REGION: "a, polygon"} validator = config.ComputationTypeValidator(params) engine = helpers.create_job(params, validator=validator) self.assertFalse(engine.is_valid()[0])
def test_computes_specific_sites_when_specified(self): """When we have hazard jobs only, and we specify a list of sites (SITES parameter in the configuration file) we trigger the computation only on those sites. """ sections = [config.HAZARD_SECTION, config.GENERAL_SECTION] sites = "1.0, 1.5, 1.5, 2.5, 3.0, 3.0, 4.0, 4.5" params = {config.SITES: sites} engine = helpers.create_job(params, sections=sections) expected_sites = [shapes.Site(1.5, 1.0), shapes.Site(2.5, 1.5), shapes.Site(3.0, 3.0), shapes.Site(4.5, 4.0)] self.assertEquals(expected_sites, engine.sites_to_compute())
def test_read_curve(self): """Verify _get_db_curve.""" with Mixin(helpers.create_job({}, job_id=self.job.id), ClassicalPSHABasedMixin) as mixin: curve1 = mixin._get_db_curve(Site(-122.2, 37.5)) self.assertEquals(list(curve1.abscissae), [0.005, 0.007, 0.0098, 0.0137]) self.assertEquals(list(curve1.ordinates), [0.354, 0.114, 0.023, 0.002]) curve2 = mixin._get_db_curve(Site(-122.1, 37.5)) self.assertEquals(list(curve2.abscissae), [0.005, 0.007, 0.0098, 0.0137]) self.assertEquals(list(curve2.ordinates), [0.454, 0.214, 0.123, 0.102])
def setUp(self): self.params = dict( CALCULATION_MODE='Hazard', REFERENCE_VS30_VALUE=500, SADIGH_SITE_TYPE='Rock', REFERENCE_DEPTH_TO_2PT5KM_PER_SEC_PARAM='5.0', DEPTHTO1PT0KMPERSEC='33.33', VS30_TYPE='measured', SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH) self.job_ctxt = helpers.create_job(self.params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) self.job_id = self.job_ctxt.job_id
def test_computes_sites_in_region_when_specified(self): """When we have hazard jobs only, and we specify a region, we use the standard algorithm to split the region in sites. In this example, the region has just four sites (the region boundaries). """ sections = [config.HAZARD_SECTION, config.GENERAL_SECTION] input_region = "2.0, 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, 1.0" params = {config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 1.0} engine = helpers.create_job(params, sections=sections) expected_sites = [shapes.Site(1.0, 1.0), shapes.Site(2.0, 1.0), shapes.Site(1.0, 2.0), shapes.Site(2.0, 2.0)] self.assertEquals(expected_sites, engine.sites_to_compute())
def test_computes_sites_in_region_with_risk_jobs(self): """When we have hazard and risk jobs, we always use the region.""" sections = [config.HAZARD_SECTION, config.GENERAL_SECTION, config.RISK_SECTION] input_region = "2.0, 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, 1.0" params = {config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 1.0} engine = helpers.create_job(params, sections=sections) expected_sites = [shapes.Site(1.0, 1.0), shapes.Site(2.0, 1.0), shapes.Site(1.0, 2.0), shapes.Site(2.0, 2.0)] self.assertEqual(expected_sites, engine.sites_to_compute())
def setUp(self): self.params = {} self.job = helpers.create_job(self.params) self.job_id = self.job.job_id self.expected_mean_curve = numpy.array([9.8542200e-01, 9.8196600e-01, 9.5842000e-01, 9.2639600e-01, 8.6713000e-01, 7.7081800e-01, 6.3448600e-01, 4.7256800e-01, 3.3523400e-01, 3.1255000e-01, 1.7832000e-01, 9.0883400e-02, 4.2189200e-02, 1.7874200e-02, 6.7449200e-03, 2.1658200e-03, 5.3878600e-04, 9.4369400e-05, 8.9830380e-06]) self.empty_curve = [] # deleting server side cached data kvs.flush()
def test_computes_specific_sites_when_specified(self): """When we have hazard jobs only, and we specify a list of sites (SITES parameter in the configuration file) we trigger the computation only on those sites. """ sections = [config.HAZARD_SECTION, config.GENERAL_SECTION] sites = "1.0, 1.5, 1.5, 2.5, 3.0, 3.0, 4.0, 4.5" params = {config.SITES: sites} engine = helpers.create_job(params, sections=sections) expected_sites = [shapes.Site(1.5, 1.0), shapes.Site(2.5, 1.5), shapes.Site(3.0, 3.0), shapes.Site(4.5, 4.0)] self.assertEqual(expected_sites, engine.sites_to_compute())
def test_get_gmvs_at(self): params = { "REGION_VERTEX": "40,-117.5, 42,-117.5, 42,-116, 40,-116", "REGION_GRID_SPACING": "1.0" } the_job = helpers.create_job(params, job_id=self.job.id) calculator = core.EventBasedRiskCalculator(the_job) self.assertEqual([0.1, 0.5, 1.0], calculator._get_gmvs_at(Site(-117, 40))) self.assertEqual([0.2, 0.6, 1.1], calculator._get_gmvs_at(Site(-117, 41))) self.assertEqual([], calculator._get_gmvs_at(Site(-117.5, 40)))
def test_get_gmvs_at(self): params = { "REGION_VERTEX": "40,-117.5, 42,-117.5, 42,-116, 40,-116", "REGION_GRID_SPACING": "1.0"} the_job = helpers.create_job(params, job_id=self.job.id) calculator = core.EventBasedRiskCalculator(the_job) self.assertEqual([0.1, 0.5, 1.0], calculator._get_gmvs_at(Site(-117, 40))) self.assertEqual([0.2, 0.6, 1.1], calculator._get_gmvs_at(Site(-117, 41))) self.assertEqual([], calculator._get_gmvs_at(Site(-117.5, 40)))
def test_computes_sites_in_region_when_specified(self): """When we have hazard jobs only, and we specify a region, we use the standard algorithm to split the region in sites. In this example, the region has just four sites (the region boundaries). """ sections = [config.HAZARD_SECTION, config.GENERAL_SECTION] input_region = "2.0, 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, 1.0" params = {config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 1.0} engine = helpers.create_job(params, sections=sections) expected_sites = [shapes.Site(1.0, 1.0), shapes.Site(2.0, 1.0), shapes.Site(1.0, 2.0), shapes.Site(2.0, 2.0)] self.assertEqual(expected_sites, engine.sites_to_compute())
def setUp(self): self.params = {} self.job = helpers.create_job(self.params) self.job_id = self.job.job_id self.expected_mean_curve = numpy.array([ 9.8542200e-01, 9.8196600e-01, 9.5842000e-01, 9.2639600e-01, 8.6713000e-01, 7.7081800e-01, 6.3448600e-01, 4.7256800e-01, 3.3523400e-01, 3.1255000e-01, 1.7832000e-01, 9.0883400e-02, 4.2189200e-02, 1.7874200e-02, 6.7449200e-03, 2.1658200e-03, 5.3878600e-04, 9.4369400e-05, 8.9830380e-06 ]) self.empty_curve = [] # deleting server side cached data kvs.get_client().flushall()
def test_deterministic_is_not_supported_alone(self): """When we specify a deterministic computation, we only support hazard + risk jobs.""" sections = [config.RISK_SECTION, config.HAZARD_SECTION, config.GENERAL_SECTION] params = {config.CALCULATION_MODE: config.DETERMINISTIC_MODE} validator = config.DeterministicComputationValidator(sections, params) engine = helpers.create_job( None, sections=sections, validator=validator) self.assertTrue(engine.is_valid()[0]) sections.remove(config.RISK_SECTION) self.assertFalse(engine.is_valid()[0])
def setUp(self): self.params = {} self.params["OUTPUT_DIR"] = helpers.OUTPUT_DIR self.params["BASE_PATH"] = "." self.params["INVESTIGATION_TIME"] = 50.0 self.job_ctxt = helpers.create_job( self.params, base_path=".", job_id=self.job.id, oq_job=self.job, oq_job_profile=models.profile4job(self.job.id)) self.job_id = self.job_ctxt.job_id self.job_ctxt.to_kvs() self.vulnerability_function2 = vulnerability_function.VulnerabilityFunction([ 0.0, 0.04, 0.08, 0.12, 0.16, 0.2, 0.24, 0.28, 0.32, 0.36, 0.4, 0.44, 0.48, 0.53, 0.57, 0.61, 0.65, 0.69, 0.73, 0.77, 0.81, 0.85, 0.89, 0.93, 0.97, 1.01, 1.05, 1.09, 1.13, 1.17, 1.21, 1.25, 1.29, 1.33, 1.37, 1.41, 1.45, 1.49, 1.54, 1.58, 1.62, 1.66, 1.7, 1.74, 1.78, 1.82, 1.86, 1.9, 1.94, 1.98, 2.02, 2.06, 2.1, 2.14, 2.18, 2.22, 2.26, 2.3, 2.34, 2.38, 2.42, 2.46, 2.51, 2.55, 2.59, 2.63, 2.67, 2.71, 2.75, 2.79, 2.83, 2.87, 2.91, 2.95, 2.99, 3.03, 3.07, 3.11, 3.15, 3.19, 3.23, 3.27, 3.31, 3.35, 3.39, 3.43, 3.47, 3.52, 3.56, 3.6, 3.64, 3.68, 3.72, 3.76, 3.8, 3.84, 3.88, 3.92, 3.96, 4.0], [0.0, 0.0, 0.0, 0.01, 0.04, 0.07, 0.11, 0.15, 0.2, 0.25, 0.3, 0.35, 0.39, 0.43, 0.47, 0.51, 0.55, 0.58, 0.61, 0.64, 0.67, 0.69, 0.71, 0.73, 0.75, 0.77, 0.79, 0.8, 0.81, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, 0.89, 0.9, 0.91, 0.91, 0.92, 0.92, 0.93, 0.93, 0.94, 0.94, 0.94, 0.95, 0.95, 0.95, 0.95, 0.96, 0.96, 0.96, 0.96, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 1.0, 1.0, 1.0, 1.0, 1.0], [0.0] * 100, "LN") # deleting keys in kvs kvs.get_client().flushall() kvs.set_value_json_encoded( kvs.tokens.vuln_key(self.job_id), {"ID": self.vulnerability_function2.to_json()}) kvs.set_value_json_encoded( kvs.tokens.vuln_key(self.job_id, retrofitted=True), {"ID": self.vulnerability_function2.to_json()})
def setUp(self): self.params = dict( CALCULATION_MODE='Hazard', SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH) self.job_ctxt = helpers.create_job(self.params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) self.job_id = self.job_ctxt.job_id self.expected_curve = numpy.array([9.9178000e-01, 9.8892000e-01, 9.6903000e-01, 9.4030000e-01, 8.8405000e-01, 7.8782000e-01, 6.4897250e-01, 4.8284250e-01, 3.4531500e-01, 3.2337000e-01, 1.8880500e-01, 9.5574000e-02, 4.3707250e-02, 1.9643000e-02, 8.1923000e-03, 2.9157000e-03, 7.9955000e-04, 1.5233000e-04, 1.5582000e-05]) # deleting server side cached data kvs.get_client().flushall()
def test_computes_sites_in_region_when_specified_workaround_1027041(self): class FakeJobProfile(object): @property def workaround_1027041(self): return True sections = [config.HAZARD_SECTION, config.GENERAL_SECTION] input_region = "2.0, 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, 1.0" params = {config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 1.0} engine = helpers.create_job(params, sections=sections) engine.oq_job_profile = FakeJobProfile() expected_sites = [shapes.Site(1.0, 1.0), shapes.Site(2.0, 1.0), shapes.Site(1.0, 2.0), shapes.Site(2.0, 2.0)] self.assertEqual(expected_sites, engine.sites_to_compute())
def setUp(self): params = dict( CALCULATION_MODE='Hazard', SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH, OUTPUT_DIR="output", NUMBER_OF_LOGIC_TREE_SAMPLES=2, WIDTH_OF_MFD_BIN=1) self.job_ctxt = create_job(params, job_id=99) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) # Store the canned result data in the KVS. key = self.job_ctxt.job_id for realization in xrange(2): key = "%s/%s" % (self.job_ctxt.job_id, realization + 1) TestStore.put(key, self.mock_results[realization]) self.keys.append(key) LOG.debug("keys = '%s'" % self.keys) self.calculator.calc = self.FakeLogicTreeProcessor() self.calculator.cache = dict()
def setUp(self): params = dict( CALCULATION_MODE='Hazard', SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH, OUTPUT_DIR="output", NUMBER_OF_LOGIC_TREE_SAMPLES=2, WIDTH_OF_MFD_BIN=1) self.job_ctxt = create_job(params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) # Store the canned result data in the KVS. key = self.job_ctxt.job_id for realization in xrange(2): key = "%s/%s" % (self.job_ctxt.job_id, realization + 1) TestStore.put(key, self.mock_results[realization]) self.keys.append(key) LOG.debug("keys = '%s'" % self.keys) self.calculator.calc = self.FakeLogicTreeProcessor() self.calculator.cache = dict()
def setUp(self): self.params = dict( CALCULATION_MODE='Hazard', SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH) self.job_ctxt = helpers.create_job(self.params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) self.job_id = self.job_ctxt.job_id self.expected_curve = numpy.array([ 9.9178000e-01, 9.8892000e-01, 9.6903000e-01, 9.4030000e-01, 8.8405000e-01, 7.8782000e-01, 6.4897250e-01, 4.8284250e-01, 3.4531500e-01, 3.2337000e-01, 1.8880500e-01, 9.5574000e-02, 4.3707250e-02, 1.9643000e-02, 8.1923000e-03, 2.9157000e-03, 7.9955000e-04, 1.5233000e-04, 1.5582000e-05 ]) # deleting server side cached data kvs.get_client().flushall()
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) calc_proxy = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) # storing the basic exposure model ClassicalRiskCalculator(calc_proxy).store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="aa/aatc-D/LR") if not assets: # This model did not exist in the database before. site = shapes.Site(1.0, 2.0) # more assets at same location models.ExposureData(exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_1", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() models.ExposureData(exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_2", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() site = shapes.Site(2.0, 2.0) # just one asset at location models.ExposureData(exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_3", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
def test_computes_sites_in_region_when_specified(self): # When we have hazard jobs only, and we specify a region, #we use the standard algorithm to split the region in sites. In this # example, the region has just four sites (the region boundaries). class FakeJobProfile(object): @property def workaround_1027041(self): return False sections = [config.HAZARD_SECTION, config.GENERAL_SECTION] input_region = "2.0, 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, 1.0" params = {config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 1.0} engine = helpers.create_job(params, sections=sections) engine.oq_job_profile = FakeJobProfile() expected_sites = [shapes.Site(1.0, 1.0), shapes.Site(2.0, 1.0), shapes.Site(1.0, 2.0), shapes.Site(2.0, 2.0)] self.assertEqual(expected_sites, engine.sites_to_compute())
def setUp(self): params = dict( CALCULATION_MODE='Hazard', NUMBER_OF_LOGIC_TREE_SAMPLES=2, WIDTH_OF_MFD_BIN=1, SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT, GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT, BASE_PATH=SIMPLE_FAULT_BASE_PATH, SITES=('38.0, -121.9, 38.0, -121.8, 38.0, -122.9, 38.0, -122.8 ' '38.0, -123.9, 38.0, -123.8, 38.0, -124.9, 38.0, -124.8')) self.job_ctxt = create_job(params) self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt) self.calculator.calc = self.FakeLogicTreeProcessor() self.calculator.cache = dict() for method in ["do_curves", "do_means", "do_quantiles"]: self.methods[method] = getattr(self.calculator, method) setattr(self.calculator, method, mock.mocksignature(self.methods[method])) patcher = patch("openquake.utils.config.hazard_block_size") patcher.start().return_value = 3 self.patchers.append(patcher)
def test_computes_sites_in_region_with_risk_jobs(self): """When we have hazard and risk jobs, we always use the region.""" class FakeJobProfile(object): @property def workaround_1027041(self): return False sections = [config.HAZARD_SECTION, config.GENERAL_SECTION, config.RISK_SECTION] input_region = "2.0, 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, 1.0" params = {config.INPUT_REGION: input_region, config.REGION_GRID_SPACING: 1.0} engine = helpers.create_job(params, sections=sections) engine.oq_job_profile = FakeJobProfile() expected_sites = [shapes.Site(1.0, 1.0), shapes.Site(2.0, 1.0), shapes.Site(1.0, 2.0), shapes.Site(2.0, 2.0)] self.assertEqual(expected_sites, engine.sites_to_compute())