def test_read_sites_from_exposure(self): """ Test reading site data from an exposure file using :py:function:`openquake.risk.read_sites_from_exposure`. """ job_config_file = helpers.testdata_path("simplecase/config.gem") test_job = helpers.job_from_file(job_config_file) expected_sites = [ shapes.Site(-118.077721, 33.852034), shapes.Site(-118.067592, 33.855398), shapes.Site(-118.186739, 33.779013), ] self.assertEqual(expected_sites, engine.read_sites_from_exposure(test_job))
def test_read_sites_from_exposure(self): # Test reading site data from an exposure file using # :py:function:`openquake.risk.read_sites_from_exposure`. job_cfg = helpers.testdata_path('simplecase/config.gem') test_job = helpers.job_from_file(job_cfg) calc = core.EventBasedRiskCalculator(test_job) calc.store_exposure_assets() expected_sites = set([ shapes.Site(-118.077721, 33.852034), shapes.Site(-118.067592, 33.855398), shapes.Site(-118.186739, 33.779013)]) actual_sites = set(engine.read_sites_from_exposure(test_job)) self.assertEqual(expected_sites, actual_sites)
def partition(self): """Split the sites to compute in blocks and store them in the underlying KVS system.""" # pylint: disable=W0404 from openquake import engine self.job_ctxt.blocks_keys = [] # pylint: disable=W0201 sites = engine.read_sites_from_exposure(self.job_ctxt) block_count = 0 for block in split_into_blocks(self.job_ctxt.job_id, sites): self.job_ctxt.blocks_keys.append(block.block_id) block.to_kvs() block_count += 1 LOG.info("Job has partitioned %s sites into %s blocks", len(sites), block_count)