示例#1
0
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)

        cls.job_ctxt = helpers.create_job({},
                                          job_id=cls.job.id,
                                          oq_job_profile=jp,
                                          oq_job=cls.job)
        calc = ClassicalRiskCalculator(cls.job_ctxt)

        calc.store_exposure_assets()
        [input] = models.inputs4job(cls.job.id, input_type="exposure")
        model = input.model()
        assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR")
        # Add some more assets.
        coos = [(10.000155392289116, 46.546194318563),
                (10.222034128255, 46.0071299176413),
                (10.520376165581, 46.247463385278)]
        for lat, lon in coos:
            site = shapes.Site(lat, lon)
            cls.sites.append(site)
            if assets:
                continue
            location = geos.GEOSGeometry(site.point.to_wkt())
            asset = models.ExposureData(exposure_model=model,
                                        taxonomy="af/ctc-D/LR",
                                        asset_ref=helpers.random_string(6),
                                        stco=lat * 2,
                                        site=location,
                                        reco=1.1 * lon)
            asset.save()
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)

        cls.job_ctxt = helpers.create_job({}, job_id=cls.job.id,
                                          oq_job_profile=jp, oq_job=cls.job)
        calc = ClassicalRiskCalculator(cls.job_ctxt)

        calc.store_exposure_assets()
        [input] = models.inputs4job(cls.job.id, input_type="exposure")
        model = input.model()
        assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR")
        # Add some more assets.
        coos = [(10.000155392289116, 46.546194318563),
                (10.222034128255, 46.0071299176413),
                (10.520376165581, 46.247463385278)]
        for lat, lon in coos:
            site = shapes.Site(lat, lon)
            cls.sites.append(site)
            if assets:
                continue
            location = geos.GEOSGeometry(site.point.to_wkt())
            asset = models.ExposureData(
                exposure_model=model, taxonomy="af/ctc-D/LR",
                asset_ref=helpers.random_string(6), stco=lat * 2,
                site=location, reco=1.1 * lon)
            asset.save()
    def test__serialize_xml_filenames(self):
        # Test that the file names of the loss XML artifacts are correct.
        # See https://bugs.launchpad.net/openquake/+bug/894706.
        expected_lrc_file_name = (
            'losscurves-block-#%(job_id)s-block#%(block)s.xml')
        expected_lr_file_name = (
            'losscurves-loss-block-#%(job_id)s-block#%(block)s.xml')

        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(
            params, job.id, sections=sections,
            serialize_results_to=['xml', 'db'], oq_job_profile=job_profile,
            oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        with helpers.patch('openquake.writer.FileWriter.serialize'):
            # The 'curves' key in the kwargs just needs to be present;
            # because of the serialize mock in place above, it doesn't need
            # to have a real value.

            # First, we test loss ratio curve output,
            # then we'll do the same test for loss curve output.

            # We expect to get a single file path back.
            [file_path] = calculator._serialize(
                0, **dict(curve_mode='loss_ratio', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lrc_file_name % dict(job_id=job.id,
                                              block=0),
                file_name)

            # The same test again, except for loss curves this time.
            [file_path] = calculator._serialize(
                0, **dict(curve_mode='loss', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lr_file_name % dict(job_id=job.id,
                                             block=0),
                file_name)
示例#4
0
    def test_read_curve(self):
        """Verify _get_db_curve."""
        the_job = helpers.create_job({}, job_id=self.job.id)
        calculator = ClassicalRiskCalculator(the_job)

        curve1 = calculator._get_db_curve(Site(-122.2, 37.5))
        self.assertEqual(list(curve1.abscissae),
                         [0.005, 0.007, 0.0098, 0.0137])
        self.assertEqual(list(curve1.ordinates), [0.354, 0.114, 0.023, 0.002])

        curve2 = calculator._get_db_curve(Site(-122.1, 37.5))
        self.assertEqual(list(curve2.abscissae),
                         [0.005, 0.007, 0.0098, 0.0137])
        self.assertEqual(list(curve2.ordinates), [0.454, 0.214, 0.123, 0.102])
示例#5
0
    def test__serialize_xml_filenames(self):
        # Test that the file names of the loss XML artifacts are correct.
        # See https://bugs.launchpad.net/openquake/+bug/894706.
        expected_lrc_file_name = (
            'losscurves-block-#%(job_id)s-block#%(block)s.xml')
        expected_lr_file_name = (
            'losscurves-loss-block-#%(job_id)s-block#%(block)s.xml')

        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     sections=sections,
                                     serialize_results_to=['xml', 'db'],
                                     oq_job_profile=job_profile,
                                     oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        with helpers.patch('openquake.writer.FileWriter.serialize'):
            # The 'curves' key in the kwargs just needs to be present;
            # because of the serialize mock in place above, it doesn't need
            # to have a real value.

            # First, we test loss ratio curve output,
            # then we'll do the same test for loss curve output.

            # We expect to get a single file path back.
            [file_path] = calculator._serialize(
                0, **dict(curve_mode='loss_ratio', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lrc_file_name % dict(job_id=job.id, block=0),
                file_name)

            # The same test again, except for loss curves this time.
            [file_path
             ] = calculator._serialize(0, **dict(curve_mode='loss', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lr_file_name % dict(job_id=job.id, block=0),
                file_name)
    def test_read_curve(self):
        """Verify _get_db_curve."""
        the_job = helpers.create_job({}, job_id=self.job.id)
        calculator = ClassicalRiskCalculator(the_job)

        curve1 = calculator._get_db_curve(Site(-122.2, 37.5))
        self.assertEqual(curve1,
                          zip([0.005, 0.007, 0.0098, 0.0137],
                              [0.354, 0.114, 0.023, 0.002]))

        curve2 = calculator._get_db_curve(Site(-122.1, 37.5))
        self.assertEqual(curve2,
                         zip([0.005, 0.007, 0.0098, 0.0137],
                             [0.454, 0.214, 0.123, 0.102]))
示例#7
0
    def test_write_output(self):
        # Test that the loss map writers are properly called when
        # write_output is invoked.
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        # Set conditional loss poe so that loss maps are created.
        # If this parameter is not specified, no loss maps will be serialized
        # at the end of the job.
        params['CONDITIONAL_LOSS_POE'] = '0.01'
        job_profile.conditional_loss_poe = [0.01]
        job_profile.save()

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     sections=sections,
                                     serialize_results_to=['xml', 'db'],
                                     oq_job_profile=job_profile,
                                     oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        # Mock the composed loss map serializer:
        with helpers.patch('openquake.writer.CompositeWriter'
                           '.serialize') as writer_mock:
            calculator.write_output()

            self.assertEqual(1, writer_mock.call_count)

            # Now test that the composite writer got the correct
            # 'serialize to' instructions. The composite writer should have
            # 1 DB and 1 XML loss map serializer:
            composite_writer = writer_mock.call_args[0][0]
            writers = composite_writer.writers

            self.assertEqual(2, len(writers))
            # We don't assume anything about the order of the writers,
            # and we don't care anyway in this test:
            self.assertTrue(
                any(isinstance(w, LossMapDBWriter) for w in writers))
            self.assertTrue(
                any(
                    isinstance(w, LossMapNonScenarioXMLWriter)
                    for w in writers))
示例#8
0
    def test__serialize_xml_filenames(self):
        # Test that the file names of the loss XML artifacts are correct.
        # See https://bugs.launchpad.net/openquake/+bug/894706.
        expected_lrc_file_name = "losscurves-block-#%(calculation_id)s-block#%(block)s.xml"
        expected_lr_file_name = "losscurves-loss-block-#%(calculation_id)s-block#%(block)s.xml"

        cfg_file = demo_file("classical_psha_based_risk/config.gem")

        job_profile, params, sections = import_job_profile(cfg_file)

        calculation = OqCalculation(owner=job_profile.owner, oq_job_profile=job_profile)
        calculation.save()

        calc_proxy = CalculationProxy(
            params,
            calculation.id,
            sections=sections,
            serialize_results_to=["xml", "db"],
            oq_job_profile=job_profile,
            oq_calculation=calculation,
        )

        calculator = ClassicalRiskCalculator(calc_proxy)

        with patch("openquake.writer.FileWriter.serialize"):
            # The 'curves' key in the kwargs just needs to be present;
            # because of the serialize mock in place above, it doesn't need
            # to have a real value.

            # First, we test loss ratio curve output,
            # then we'll do the same test for loss curve output.

            # We expect to get a single file path back.
            [file_path] = calculator._serialize(0, **dict(curve_mode="loss_ratio", curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(expected_lrc_file_name % dict(calculation_id=calculation.id, block=0), file_name)

            # The same test again, except for loss curves this time.
            [file_path] = calculator._serialize(0, **dict(curve_mode="loss", curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(expected_lr_file_name % dict(calculation_id=calculation.id, block=0), file_name)
示例#9
0
    def test_write_output(self):
        # Test that the loss map writers are properly called when
        # write_output is invoked.
        cfg_file = demo_file("classical_psha_based_risk/config.gem")

        job_profile, params, sections = import_job_profile(cfg_file)

        # Set conditional loss poe so that loss maps are created.
        # If this parameter is not specified, no loss maps will be serialized
        # at the end of the calculation.
        params["CONDITIONAL_LOSS_POE"] = "0.01"
        job_profile.conditional_loss_poe = [0.01]
        job_profile.save()

        calculation = OqCalculation(owner=job_profile.owner, oq_job_profile=job_profile)
        calculation.save()

        calc_proxy = CalculationProxy(
            params,
            calculation.id,
            sections=sections,
            serialize_results_to=["xml", "db"],
            oq_job_profile=job_profile,
            oq_calculation=calculation,
        )

        calculator = ClassicalRiskCalculator(calc_proxy)

        # Mock the composed loss map serializer:
        with patch("openquake.writer.CompositeWriter" ".serialize") as writer_mock:
            calculator.write_output()

            self.assertEqual(1, writer_mock.call_count)

            # Now test that the composite writer got the correct
            # 'serialize to' instructions. The composite writer should have
            # 1 DB and 1 XML loss map serializer:
            composite_writer = writer_mock.call_args[0][0]
            writers = composite_writer.writers

            self.assertEqual(2, len(writers))
            # We don't assume anything about the order of the writers,
            # and we don't care anyway in this test:
            self.assertTrue(any(isinstance(w, LossMapDBWriter) for w in writers))
            self.assertTrue(any(isinstance(w, LossMapNonScenarioXMLWriter) for w in writers))
    def test_write_output(self):
        # Test that the loss map writers are properly called when
        # write_output is invoked.
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        # Set conditional loss poe so that loss maps are created.
        # If this parameter is not specified, no loss maps will be serialized
        # at the end of the job.
        params['CONDITIONAL_LOSS_POE'] = '0.01'
        job_profile.conditional_loss_poe = [0.01]
        job_profile.save()

        job_ctxt = engine.JobContext(
            params, job.id, sections=sections,
            serialize_results_to=['xml', 'db'], oq_job_profile=job_profile,
            oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        # Mock the composed loss map serializer:
        with helpers.patch('openquake.writer.CompositeWriter'
                           '.serialize') as writer_mock:
            calculator.write_output()

            self.assertEqual(1, writer_mock.call_count)

            # Now test that the composite writer got the correct
            # 'serialize to' instructions. The composite writer should have
            # 1 DB and 1 XML loss map serializer:
            composite_writer = writer_mock.call_args[0][0]
            writers = composite_writer.writers

            self.assertEqual(2, len(writers))
            # We don't assume anything about the order of the writers,
            # and we don't care anyway in this test:
            self.assertTrue(any(
                isinstance(w, LossMapDBWriter) for w in writers))
            self.assertTrue(any(
                isinstance(w, LossMapNonScenarioXMLWriter) for w in writers))
示例#11
0
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)
        calc_proxy = helpers.create_job({},
                                        job_id=cls.job.id,
                                        oq_job_profile=jp,
                                        oq_job=cls.job)

        # storing the basic exposure model
        ClassicalRiskCalculator(calc_proxy).store_exposure_assets()
        [input] = models.inputs4job(cls.job.id, input_type="exposure")
        model = input.model()
        assets = model.exposuredata_set.filter(taxonomy="aa/aatc-D/LR")

        if not assets:
            # This model did not exist in the database before.
            site = shapes.Site(1.0, 2.0)
            # more assets at same location
            models.ExposureData(exposure_model=model,
                                taxonomy="aa/aatc-D/LR",
                                asset_ref="ASSET_1",
                                stco=1,
                                site=geos.GEOSGeometry(site.point.to_wkt()),
                                reco=1).save()

            models.ExposureData(exposure_model=model,
                                taxonomy="aa/aatc-D/LR",
                                asset_ref="ASSET_2",
                                stco=1,
                                site=geos.GEOSGeometry(site.point.to_wkt()),
                                reco=1).save()

            site = shapes.Site(2.0, 2.0)
            # just one asset at location
            models.ExposureData(exposure_model=model,
                                taxonomy="aa/aatc-D/LR",
                                asset_ref="ASSET_3",
                                stco=1,
                                site=geos.GEOSGeometry(site.point.to_wkt()),
                                reco=1).save()