def test__insert_input_files(self): # A new input record is inserted for the GMPE logic tree but the # existing input row is reused for the source model logic tree. engine._insert_input_files(self.PARAMS, self.job, False) [glt_i] = models.inputs4job(self.job.id, input_type="lt_gmpe") self.assertNotEqual(self.glt_i.id, glt_i.id) [slt_i] = models.inputs4job(self.job.id, input_type="lt_source") self.assertEqual(self.slt_i.id, slt_i.id)
def test_model_content_many_files(self): slt_content = open(self.SLT, 'r').read() glt_content = open(self.GLT, 'r').read() engine._insert_input_files(self.PARAMS, self.job, True) [slt] = models.inputs4job(self.job.id, input_type="lt_source") [glt] = models.inputs4job(self.job.id, input_type="lt_gmpe") self.assertEqual('xml', slt.model_content.content_type) self.assertEqual(slt_content, slt.model_content.raw_content) self.assertEqual('xml', glt.model_content.content_type) self.assertEqual(glt_content, glt.model_content.raw_content)
def test__insert_input_files(self): # A new input record is inserted for the GMPE logic tree but the # existing input row is reused for the source model logic tree. engine._insert_input_files(self.PARAMS, self.job, False) [glt_i] = models.inputs4job(self.job.id, input_type="lt_gmpe") self.assertNotEqual(self.glt_i.id, glt_i.id) [slt_i] = models.inputs4job(self.job.id, input_type="lt_source") self.assertEqual(self.slt_i.id, slt_i.id) # Make sure the LT and the hazard source have been associated. [src_link] = models.Src2ltsrc.objects.filter(lt_src=slt_i) self.assertEqual("dissFaultModel.xml", src_link.filename) self.assertEqual(slt_i, src_link.lt_src) [hzrd_i] = models.inputs4job(self.job.id, input_type="source") self.assertEqual(hzrd_i, src_link.hzrd_src)
def make_assets(self): [ism] = models.inputs4job(self.job.id, input_type="exposure") em = models.ExposureModel( owner=ism.owner, input=ism, name="AAA", category="single_asset", reco_type="aggregated", reco_unit="USD", stco_type="aggregated", stco_unit="USD") em.save() site_1 = shapes.Site(-116.0, 41.0) site_2 = shapes.Site(-117.0, 42.0) asset_1 = models.ExposureData( exposure_model=em, taxonomy="RC", asset_ref="asset_1", number_of_units=100, stco=1, site=geos.GEOSGeometry(site_1.point.to_wkt()), reco=1) asset_2 = models.ExposureData( exposure_model=em, taxonomy="RM", asset_ref="asset_2", number_of_units=40, stco=1, site=geos.GEOSGeometry(site_2.point.to_wkt()), reco=1) asset_3 = models.ExposureData( exposure_model=em, taxonomy="RM", asset_ref="asset_3", number_of_units=40, stco=1, site=geos.GEOSGeometry(site_2.point.to_wkt()), reco=1) asset_1.save() asset_2.save() asset_3.save() return asset_1, asset_2, asset_3
def store_exposure_assets(job_id, base_path): """ Load exposure assets from input file and store them into database. If the given job already has an input of type `exposure`, this function simply returns without doing anything. :param job_id: the id of the job where the assets belong to. :type job_id: integer :param base_path: the path where the application has been triggered. It is used to properly locate the input files, that are stored with a relative path. :type base_path: string """ [emi] = models.inputs4job(job_id, "exposure") if emi.exposuremodel_set.all().count() > 0: return path = os.path.join(base_path, emi.path) exposure_parser = exposure.ExposureModelFile(path) writer = ExposureDBWriter(emi) writer.serialize(exposure_parser)
def ln_input2job(path, input_type): """Link identical or newly created input to the given job.""" digest = _file_digest(path) linked_inputs = inputs4job(job.id) if any(li.digest == digest and li.input_type == input_type for li in linked_inputs): return in_model = (_identical_input(input_type, digest, job.owner.id) if not force_inputs else None) if in_model is None: # Save the raw input file contents to the DB: model_content = ModelContent() with open(path, 'rb') as fh: model_content.raw_content = fh.read() # Try to guess the content type: model_content.content_type = _get_content_type(path) model_content.save() in_model = Input(path=path, input_type=input_type, owner=job.owner, size=os.path.getsize(path), digest=digest, model_content=model_content) in_model.save() # Make sure we don't link to the same input more than once. if in_model.id not in inputs_seen: inputs_seen.add(in_model.id) i2j = Input2job(input=in_model, oq_job=job) i2j.save() return in_model
def test_inputs4job_with_single_input(self): # The single input is returned. inp = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp.save() models.Input2job(oq_job=self.job, input=inp).save() self.assertEqual([inp], models.inputs4job(self.job.id))
def ln_input2job(path, input_type): """Link identical or newly created input to the given job.""" digest = _file_digest(path) linked_inputs = inputs4job(job.id) if any(li.digest == digest and li.input_type == input_type for li in linked_inputs): return in_model = (_identical_input(input_type, digest, job.owner.id) if not force_inputs else None) if in_model is None: # Save the raw input file contents to the DB: model_content = ModelContent() with open(path, 'rb') as fh: model_content.raw_content = fh.read() # Try to guess the content type: model_content.content_type = _get_content_type(path) model_content.save() in_model = Input(path=path, input_type=input_type, owner=job.owner, size=os.path.getsize(path), digest=digest, model_content=model_content) in_model.save() # Make sure we don't link to the same input more than once. if in_model.id not in inputs_seen: inputs_seen.append(in_model.id) i2j = Input2job(input=in_model, oq_job=job) i2j.save() return in_model
def test_inputs4job_with_correct_input_type_and_path(self): # The source inputs are returned. inp1 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="source", size=self.sizes.next()) inp1.save() models.Input2job(oq_job=self.job, input=inp1).save() path = self.paths.next() inp2 = models.Input(owner=self.job.owner, path=path, input_type="source", size=self.sizes.next()) inp2.save() models.Input2job(oq_job=self.job, input=inp2).save() inp3 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="source", size=self.sizes.next()) inp3.save() models.Input2job(oq_job=self.job, input=inp3).save() self.assertEqual([inp2], models.inputs4job(self.job.id, input_type="source", path=path))
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) calc_proxy = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) # storing the basic exposure model ClassicalRiskCalculator(calc_proxy).store_exposure_assets() [em_input] = models.inputs4job(cls.job.id, input_type="exposure") [model] = em_input.exposuremodel_set.all() site = shapes.Site(1.0, 2.0) # more assets at same location models.ExposureData( exposure_model=model, taxonomy="NOT_USED", asset_ref="ASSET_1", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() models.ExposureData( exposure_model=model, taxonomy="NOT_USED", asset_ref="ASSET_2", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() site = shapes.Site(2.0, 2.0) # just one asset at location models.ExposureData( exposure_model=model, taxonomy="NOT_USED", asset_ref="ASSET_3", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
def _store_dsc_fmodel(self): [ism] = models.inputs4job(self.job.id, input_type="fragility") fmodel = models.FragilityModel( owner=ism.owner, input=ism, imls=[0.1, 0.3, 0.5, 0.7], imt="mmi", lss=["LS1", "LS2"], format="discrete", no_damage_limit=0.05) fmodel.save() models.Ffd( fragility_model=fmodel, taxonomy="RC", ls="LS2", poes=[0.00, 0.05, 0.20, 0.50], lsi=2).save() models.Ffd( fragility_model=fmodel, taxonomy="RC", ls="LS1", poes=[0.05, 0.20, 0.50, 1.00], lsi=1).save() models.Ffd( fragility_model=fmodel, taxonomy="RM", ls="LS2", poes=[0.02, 0.07, 0.25, 0.60], lsi=2).save() models.Ffd( fragility_model=fmodel, taxonomy="RM", ls="LS1", poes=[0.03, 0.12, 0.42, 0.90], lsi=1).save() return fmodel
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) cls.job_ctxt = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) calc = ClassicalRiskCalculator(cls.job_ctxt) calc.store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR") # Add some more assets. coos = [(10.000155392289116, 46.546194318563), (10.222034128255, 46.0071299176413), (10.520376165581, 46.247463385278)] for lat, lon in coos: site = shapes.Site(lat, lon) cls.sites.append(site) if assets: continue location = geos.GEOSGeometry(site.point.to_wkt()) asset = models.ExposureData(exposure_model=model, taxonomy="af/ctc-D/LR", asset_ref=helpers.random_string(6), stco=lat * 2, site=location, reco=1.1 * lon) asset.save()
def test_dda_iml_above_range(self): # corner case where we have a ground motion value # (that corresponds to the intensity measure level in the # fragility function) that is higher than the highest # intensity measure level defined in the model (in this # particular case 0.7). Given this condition, to compute # the fractions of buildings we use the highest intensity # measure level defined in the model (0.7 in this case) [ism] = models.inputs4job(self.job.id, input_type="fragility") fmodel = models.FragilityModel( owner=ism.owner, input=ism, imls=[0.1, 0.3, 0.5, 0.7], imt="mmi", lss=["LS1"], format="discrete") fmodel.save() func = models.Ffd( fragility_model=fmodel, taxonomy="RC", ls="LS1", poes=[0.05, 0.20, 0.50, 1.00], lsi=1) func.save() self._close_to(compute_gmv_fractions([func], 0.7), compute_gmv_fractions([func], 0.8))
def test_inputs4job_with_wrong_path(self): # No input is returned. inp = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp.save() models.Input2job(oq_job=self.job, input=inp).save() self.assertEqual([], models.inputs4job(self.job.id, path="xyz"))
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) cls.job_ctxt = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) calc = ClassicalRiskCalculator(cls.job_ctxt) calc.store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR") # Add some more assets. coos = [(10.000155392289116, 46.546194318563), (10.222034128255, 46.0071299176413), (10.520376165581, 46.247463385278)] for lat, lon in coos: site = shapes.Site(lat, lon) cls.sites.append(site) if assets: continue location = geos.GEOSGeometry(site.point.to_wkt()) asset = models.ExposureData( exposure_model=model, taxonomy="af/ctc-D/LR", asset_ref=helpers.random_string(6), stco=lat * 2, site=location, reco=1.1 * lon) asset.save()
def _store_con_fmodel(self): [ism] = models.inputs4job(self.job.id, input_type="fragility") fmodel = models.FragilityModel( owner=ism.owner, input=ism, lss=["LS1", "LS2"], format="continuous") fmodel.save() models.Ffc( fragility_model=fmodel, taxonomy="RC", ls="LS2", mean="0.35", stddev="0.10", lsi=2).save() models.Ffc( fragility_model=fmodel, taxonomy="RC", ls="LS1", mean="0.20", stddev="0.05", lsi=1).save() models.Ffc( fragility_model=fmodel, taxonomy="RM", ls="LS2", mean="0.40", stddev="0.12", lsi=2).save() models.Ffc( fragility_model=fmodel, taxonomy="RM", ls="LS1", mean="0.25", stddev="0.08", lsi=1).save() return fmodel
def test_dda_iml_below_range_damage_limit_defined(self): # corner case where we have a ground motion value # (that corresponds to the intensity measure level in the # fragility function) that is lower than the lowest # intensity measure level defined in the model (in this # particular case 0.1) and lower than the no_damage_limit # attribute defined in the model. Given this condition, the # fractions of buildings is 100% no_damage and 0% for the # remaining limit states defined in the model. [ism] = models.inputs4job(self.job.id, input_type="fragility") fmodel = models.FragilityModel( owner=ism.owner, input=ism, imls=[0.1, 0.3, 0.5, 0.7], imt="mmi", lss=["LS1"], format="discrete", no_damage_limit=0.05) fmodel.save() func = models.Ffd( fragility_model=fmodel, taxonomy="RC", ls="LS1", poes=[0.05, 0.20, 0.50, 1.00], lsi=1) func.save() self._close_to([1.0, 0.0], compute_gmv_fractions([func], 0.02))
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) calc_proxy = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) # storing the basic exposure model ClassicalRiskCalculator(calc_proxy).store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="aa/aatc-D/LR") if not assets: # This model did not exist in the database before. site = shapes.Site(1.0, 2.0) # more assets at same location models.ExposureData( exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_1", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() models.ExposureData( exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_2", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() site = shapes.Site(2.0, 2.0) # just one asset at location models.ExposureData( exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_3", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
def _store_em(self): [ism] = models.inputs4job(self.job.id, input_type="exposure") em = models.ExposureModel(owner=ism.owner, input=ism, name="AAA", category="single_asset", reco_type="aggregated", reco_unit="USD", stco_type="aggregated", stco_unit="USD") em.save() models.ExposureData(exposure_model=em, taxonomy="RC", asset_ref="A", number_of_units=100, stco=1, site=geos.GEOSGeometry(self.site.point.to_wkt()), reco=1).save() models.ExposureData(exposure_model=em, taxonomy="RM", asset_ref="B", number_of_units=40, stco=1, site=geos.GEOSGeometry(self.site.point.to_wkt()), reco=1).save() return em
def test_model_content_unknown_content_type(self): test_file = helpers.touch() params = dict(GMPE_LOGIC_TREE_FILE=test_file) engine._insert_input_files(params, self.job, True) [glt] = models.inputs4job(self.job.id, input_type="lt_gmpe") self.assertEqual('unknown', glt.model_content.content_type)
def store_exposure_assets(self): """Load exposure assets and write them to database.""" [emdl] = models.inputs4job(self.job_ctxt.job_id, "exposure") path = os.path.join(self.job_ctxt.base_path, emdl.path) exposure_parser = exposure.ExposureModelFile(path) writer = ExposureDBWriter(emdl) writer.serialize(exposure_parser)
def _load_exposure_model(cls, job_id): """Load and cache the exposure model.""" if cls._em_inputs is None or cls._em_job_id != job_id: # This query obtains the exposure model input rows and needs to be # made only once in the course of a risk calculation. cls._em_inputs = models.inputs4job(job_id, "exposure") cls._em_job_id = job_id
def _fm(oq_job): """ Return the fragility model related to the current computation. """ [ism] = inputs4job(oq_job.id, input_type="fragility") [fm] = FragilityModel.objects.filter(input=ism, owner=oq_job.owner) return fm
def test_model_content_single_file(self): # The contents of input files (such as logic trees, exposure models, # etc.) should be saved to the uiapi.model_content table. slt_path = os.path.join(self.PARAMS['BASE_PATH'], self.SLT) expected_content = open(slt_path, 'r').read() engine._insert_input_files(self.PARAMS, self.job, True) [slt] = models.inputs4job(self.job.id, input_type="lt_source") self.assertEqual('xml', slt.model_content.content_type) self.assertEqual(expected_content, slt.model_content.raw_content)
def setUp(self): path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, "LCB-exposure.yaml") inputs = [("exposure", path)] self.job = self.setup_classic_job(inputs=inputs) [input] = models.inputs4job(self.job.id, input_type="exposure", path=path) owner = models.OqUser.objects.get(user_name="openquake") emdl = input.model() if not emdl: emdl = models.ExposureModel(owner=owner, input=input, description="LCB exposure model", category="LCB cars", stco_unit="peanuts", stco_type="aggregated") emdl.save() asset_data = [ (Site(-118.077721, 33.852034), { u'stco': 5.07, u'asset_ref': u'a5625', u'taxonomy': u'HAZUS_RM1L_LC' }), (Site(-118.077721, 33.852034), { u'stco': 5.63, u'asset_ref': u'a5629', u'taxonomy': u'HAZUS_URML_LC' }), (Site(-118.077721, 33.852034), { u'stco': 11.26, u'asset_ref': u'a5630', u'taxonomy': u'HAZUS_URML_LS' }), (Site(-118.077721, 33.852034), { u'stco': 5.5, u'asset_ref': u'a5636', u'taxonomy': u'HAZUS_C3L_MC' }), ] for idx, (site, adata) in enumerate(asset_data): location = GEOSGeometry(site.point.to_wkt()) asset = models.ExposureData(exposure_model=emdl, site=location, **adata) asset.save() RISK_LOSS_CURVE_DATA[idx][1][1] = asset output_path = self.generate_output_path(self.job) self.display_name = os.path.basename(output_path) self.writer = LossCurveDBWriter(output_path, self.job.id) self.reader = LossCurveDBReader()
def store_exposure_assets(self): """Load exposure assets and write them to database.""" [emi] = models.inputs4job(self.job_ctxt.job_id, "exposure") if emi.exposuremodel_set.all().count() > 0: return path = os.path.join(self.job_ctxt.base_path, emi.path) exposure_parser = exposure.ExposureModelFile(path) writer = ExposureDBWriter(emi) writer.serialize(exposure_parser) return emi.model()
def setUp(self): path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, TEST_FILE) inputs = [("exposure", path)] self.job = self.setup_classic_job(inputs=inputs) [input] = models.inputs4job(self.job.id, input_type="exposure", path=path) writer = ExposureDBWriter(input) exposure_parser = exposure.ExposureModelFile(path) writer.serialize(exposure_parser) self.model = writer.model self.epsilon_provider = general.EpsilonProvider(dict())
def setUp(self): path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, "LMB-exposure.yaml") inputs = [("exposure", path)] self.job = self.setup_classic_job(inputs=inputs) [input] = models.inputs4job(self.job.id, input_type="exposure", path=path) owner = models.OqUser.objects.get(user_name="openquake") emdl = input.model() if not emdl: emdl = models.ExposureModel(owner=owner, input=input, description="LMB exposure model", category="LMB yachts", stco_unit="oranges", stco_type="aggregated") emdl.save() asset_data = [ ("asset_a_1", SITE_A, { u'stco': 5.07, u'asset_ref': u'a1711', u'taxonomy': u'HAZUS_RM1L_LC' }), ("asset_a_2", SITE_A, { u'stco': 5.63, u'asset_ref': u'a1712', u'taxonomy': u'HAZUS_URML_LC' }), ("asset_b_1", SITE_B, { u'stco': 5.5, u'asset_ref': u'a1713', u'taxonomy': u'HAZUS_C3L_MC' }), ] for idx, (name, site, adata) in enumerate(asset_data): location = GEOSGeometry(site.point.to_wkt()) asset = models.ExposureData(exposure_model=emdl, site=location, **adata) asset.save() setattr(self, name, asset) SAMPLE_NONSCENARIO_LOSS_MAP_DATA[1][1][0][1] = self.asset_a_1 SAMPLE_NONSCENARIO_LOSS_MAP_DATA[1][1][1][1] = self.asset_a_2 SAMPLE_NONSCENARIO_LOSS_MAP_DATA[2][1][0][1] = self.asset_b_1 output_path = self.generate_output_path(self.job) self.display_name = os.path.basename(output_path) self.writer = LossMapDBWriter(output_path, self.job.id) self.reader = LossMapDBReader()
def test_model_content_single_file(self): # The contents of input files (such as logic trees, exposure models, # etc.) should be saved to the uiapi.model_content table. expected_content = open(self.SLT, 'r').read() params = dict(SOURCE_MODEL_LOGIC_TREE_FILE=self.SLT) engine._insert_input_files(params, self.job, True) [slt] = models.inputs4job(self.job.id, input_type="lt_source") self.assertEqual('xml', slt.model_content.content_type) self.assertEqual(expected_content, slt.model_content.raw_content)
def test_inputs4job_with_correct_path(self): # The exposure inputs are returned. inp1 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp1.save() models.Input2job(oq_job=self.job, input=inp1).save() path = self.paths.next() inp2 = models.Input(owner=self.job.owner, path=path, input_type="rupture", size=self.sizes.next()) inp2.save() models.Input2job(oq_job=self.job, input=inp2).save() self.assertEqual([inp2], models.inputs4job(self.job.id, path=path))
def setUp(self): self.job_ctxt = helpers.job_from_file(os.path.join(helpers.DATA_DIR, 'config.gem')) [input] = models.inputs4job(self.job_ctxt.job_id, input_type="exposure") owner = models.OqUser.objects.get(user_name="openquake") emdl = input.model() if not emdl: emdl = models.ExposureModel( owner=owner, input=input, description="RCT exposure model", category="RCT villas", stco_unit="roofs", stco_type="aggregated") emdl.save() asset_data = [ ((0, 0), shapes.Site(10.0, 10.0), {u'stco': 5.07, u'asset_ref': u'a5625', u'taxonomy': u'rctc-ad-83'}), ((0, 1), shapes.Site(10.1, 10.0), {u'stco': 5.63, u'asset_ref': u'a5629', u'taxonomy': u'rctc-ad-83'}), ((1, 0), shapes.Site(10.0, 10.1), {u'stco': 11.26, u'asset_ref': u'a5630', u'taxonomy': u'rctc-ad-83'}), ((1, 1), shapes.Site(10.1, 10.1), {u'stco': 5.5, u'asset_ref': u'a5636', u'taxonomy': u'rctc-ad-83'}), ] assets = emdl.exposuredata_set.filter(taxonomy="rctc-ad-83"). \ order_by("id") for idx, (gcoo, site, adata) in enumerate(asset_data): if not assets: location = geos.GEOSGeometry(site.point.to_wkt()) asset = models.ExposureData(exposure_model=emdl, site=location, **adata) asset.save() else: asset = assets[idx] GRID_ASSETS[gcoo] = asset self.grid = shapes.Grid(shapes.Region.from_coordinates( [(10.0, 10.0), (10.0, 10.1), (10.1, 10.1), (10.1, 10.0)]), 0.1) # this is the expected output of grid_assets_iterator and an input of # asset_losses_per_site self.grid_assets = [ (shapes.GridPoint(self.grid, 0, 0), GRID_ASSETS[(0, 0)]), (shapes.GridPoint(self.grid, 1, 0), GRID_ASSETS[(0, 1)]), (shapes.GridPoint(self.grid, 0, 1), GRID_ASSETS[(1, 0)]), (shapes.GridPoint(self.grid, 1, 1), GRID_ASSETS[(1, 1)])]
def setUp(self): path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, TEST_FILE) inputs = [("exposure", path)] self.job = self.setup_classic_job(inputs=inputs) [input] = models.inputs4job(self.job.id, input_type="exposure", path=path) writer = ExposureDBWriter(input) exposure_parser = exposure.ExposureModelFile(path) writer.serialize(exposure_parser) self.model = writer.model self.epsilon_provider = general.EpsilonProvider( dict(EPSILON_RANDOM_SEED=37))
def store_fragility_model(self): """Load fragility model and write it to database.""" new_models = [] fmis = models.inputs4job(self.job_ctxt.job_id, "fragility") for fmi in fmis: if fmi.fragilitymodel_set.all().count() > 0: continue path = os.path.join(self.job_ctxt.base_path, fmi.path) parser = fragility.FragilityModelParser(path) writer = FragilityDBWriter(fmi, parser) writer.serialize() new_models.append(writer.model) return new_models if new_models else None
def test_model_content_detect_content_type(self): # Test detection of the content type (using the file extension). test_file = helpers.touch(suffix=".html") # We use the gmpe logic tree as our test target because there is no # parsing required in the function under test. Thus, we can put # whatever test garbage we want in the file, or just use an empty file # (which is the case here). params = dict(GMPE_LOGIC_TREE_FILE=test_file) engine._insert_input_files(params, self.job, True) [glt] = models.inputs4job(self.job.id, input_type="lt_gmpe") self.assertEqual('html', glt.model_content.content_type)
def test_compute_bcr_in_the_classical_psha_calculator(self): self._compute_risk_classical_psha_setup() helpers.delete_profile(self.job) bcr_config = helpers.demo_file('benefit_cost_ratio/config.gem') job_profile, params, sections = engine.import_job_profile( bcr_config, self.job) # We need to adjust a few of the parameters for this test: job_profile.imls = [ 0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527, 0.0738, 0.103, 0.145, 0.203, 0.284, 0.397, 0.556, 0.778] params['ASSET_LIFE_EXPECTANCY'] = '50' job_profile.asset_life_expectancy = 50 params['REGION_VERTEX'] = '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0' job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords( params['REGION_VERTEX'])) job_profile.save() job_ctxt = engine.JobContext( params, self.job_id, sections=sections, oq_job_profile=job_profile) calculator = classical_core.ClassicalRiskCalculator(job_ctxt) [input] = models.inputs4job(self.job.id, input_type="exposure") emdl = input.model() if not emdl: emdl = models.ExposureModel( owner=self.job.owner, input=input, description="c-psha test exposure model", category="c-psha power plants", stco_unit="watt", stco_type="aggregated", reco_unit="joule", reco_type="aggregated") emdl.save() assets = emdl.exposuredata_set.filter(asset_ref="rubcr") if not assets: asset = models.ExposureData(exposure_model=emdl, taxonomy="ID", asset_ref="rubcr", stco=1, reco=123.45, site=GEOSGeometry("POINT(1.0 1.0)")) asset.save() Block.from_kvs(self.job_id, self.block_id) calculator.compute_risk(self.block_id) result_key = kvs.tokens.bcr_block_key(self.job_id, self.block_id) res = kvs.get_value_json_decoded(result_key) expected_result = {'bcr': 0.0, 'eal_original': 0.003032, 'eal_retrofitted': 0.003032} helpers.assertDeepAlmostEqual( self, res, [[[1, 1], [[expected_result, "rubcr"]]]])
def setUpClass(cls): path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, "SEB-exposure.yaml") inputs = [("exposure", path)] cls.job = cls.setup_classic_job(inputs=inputs) [input] = models.inputs4job(cls.job.id, input_type="exposure", path=path) owner = models.OqUser.objects.get(user_name="openquake") cls.emdl = input.model() if not cls.emdl: cls.emdl = models.ExposureModel( owner=owner, input=input, description="SEB exposure model", category="SEB factory buildings", stco_unit="screws", stco_type="aggregated") cls.emdl.save()
def setUpClass(cls): path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, "SEB-exposure.yaml") inputs = [("exposure", path)] cls.job = cls.setup_classic_job(inputs=inputs) [input] = models.inputs4job(cls.job.id, input_type="exposure", path=path) owner = models.OqUser.objects.get(user_name="openquake") cls.emdl = input.model() if not cls.emdl: cls.emdl = models.ExposureModel(owner=owner, input=input, description="SEB exposure model", category="SEB factory buildings", stco_unit="screws", stco_type="aggregated") cls.emdl.save()
def test_inputs4job_with_correct_input_type(self): # The exposure inputs are returned. inp1 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp1.save() models.Input2job(oq_job=self.job, input=inp1).save() inp2 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="rupture", size=self.sizes.next()) inp2.save() models.Input2job(oq_job=self.job, input=inp2).save() inp3 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp3.save() models.Input2job(oq_job=self.job, input=inp3).save() actual = sorted(models.inputs4job(self.job.id, input_type="exposure"), key=lambda input: input.id) self.assertEqual([inp1, inp3], actual)
def read_sites_from_exposure(job_ctxt): """ Given a :class:`JobContext` object, get all of the sites in the exposure model which are contained by the region of interest (defined in the `JobContext`). It is assumed that exposure model is already loaded into the database. :param job_ctxt: :class:`JobContext` instance. :returns: `list` of :class:`openquake.shapes.Site` objects, with no duplicates """ em_inputs = models.inputs4job(job_ctxt.job_id, input_type="exposure") exp_points = models.ExposureData.objects.filter( exposure_model__input__id__in=[em.id for em in em_inputs], site__contained=job_ctxt.oq_job_profile.region).values( 'site').distinct() return [shapes.Site(p['site'].x, p['site'].y) for p in exp_points]
def setUpClass(cls): cls.job = engine.prepare_job() jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job) calc_proxy = helpers.create_job({}, job_id=cls.job.id, oq_job_profile=jp, oq_job=cls.job) # storing the basic exposure model ClassicalRiskCalculator(calc_proxy).store_exposure_assets() [input] = models.inputs4job(cls.job.id, input_type="exposure") model = input.model() assets = model.exposuredata_set.filter(taxonomy="aa/aatc-D/LR") if not assets: # This model did not exist in the database before. site = shapes.Site(1.0, 2.0) # more assets at same location models.ExposureData(exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_1", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() models.ExposureData(exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_2", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save() site = shapes.Site(2.0, 2.0) # just one asset at location models.ExposureData(exposure_model=model, taxonomy="aa/aatc-D/LR", asset_ref="ASSET_3", stco=1, site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
def test_inputs4job_with_correct_input_type(self): # The exposure inputs are returned. inp1 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp1.save() models.Input2job(oq_job=self.job, input=inp1).save() inp2 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="rupture", size=self.sizes.next()) inp2.save() models.Input2job(oq_job=self.job, input=inp2).save() inp3 = models.Input(owner=self.job.owner, path=self.paths.next(), input_type="exposure", size=self.sizes.next()) inp3.save() models.Input2job(oq_job=self.job, input=inp3).save() self.assertEqual([inp1, inp3], models.inputs4job(self.job.id, input_type="exposure"))
def get_site_model(job_id): """Get the site model :class:`~openquake.db.models.Input` record for the given job id. :param int job_id: ID of a job. :returns: The site model :class:`~openquake.db.models.Input` record for this job. :raises: :exc:`RuntimeError` if the job has more than 1 site model. """ site_model = models.inputs4job(job_id, input_type='site_model') if len(site_model) == 0: return None elif len(site_model) > 1: # Multiple site models for 1 job are not allowed. raise RuntimeError("Only 1 site model per job is allowed, found %s." % len(site_model)) # There's only one site model. return site_model[0]
def teardown_job(cls, job, filesystem_only=True): """ Tear down the file system (and potentially db) artefacts for the given job. :param job: a :py:class:`db.models.OqJob` instance :param bool filesystem_only: if set the oq_job/oq_param/upload/ input database records will be left intact. This saves time and the test db will be dropped/recreated prior to the next db test suite run anyway. """ DbTestCase.teardown_inputs(models.inputs4job(job.id), filesystem_only=filesystem_only) if filesystem_only: return job.delete() try: oqjp = models.profile4job(job.id) oqjp.delete() except ValueError: # no job profile for this job pass
def test_initialize_stores_site_model(self): job_ctxt = helpers.prepare_job_context( helpers.demo_file( 'simple_fault_demo_hazard/config_with_site_model.gem')) calc = general.BaseHazardCalculator(job_ctxt) [site_model_input] = models.inputs4job( job_ctxt.oq_job.id, input_type='site_model') site_model_nodes = models.SiteModel.objects.filter( input=site_model_input) # Test precondition: The site_model table shouldn't be populated yet. self.assertEqual(0, len(site_model_nodes)) calc.initialize() # Now it should be populated. site_model_nodes = models.SiteModel.objects.filter( input=site_model_input) # It would be overkill to test the contents; just check that the number # of records is correct. self.assertEqual(2601, len(site_model_nodes))