Ejemplo n.º 1
0
    def test_export_for_scenario(self):
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.demo_file('scenario_hazard/job.ini')

            # run the calculation to create something to export
            retcode = helpers.run_hazard_job_sp(cfg, silence=True)
            self.assertEqual(0, retcode)

            job = models.OqJob.objects.latest('id')

            outputs = export_core.get_outputs(job.id)

            self.assertEqual(1, len(outputs))  # 1 GMF

            gmf_outputs = outputs.filter(output_type='gmf_scenario')
            self.assertEqual(1, len(gmf_outputs))

            exported_files = hazard.export(gmf_outputs[0].id, target_dir)

            self.assertEqual(1, len(exported_files))
            # Check the file paths exist, is absolute, and the file isn't
            # empty.
            f = exported_files[0]
            self._test_exported_file(f)

            # Check for the correct number of GMFs in the file:
            tree = etree.parse(f)
            self.assertEqual(10, number_of('nrml:gmf', tree))
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 2
0
    def export(self, *args, **kwargs):
        """
        If requested by the user, automatically export all result artifacts to
        the specified format. (NOTE: The only export format supported at the
        moment is NRML XML.

        :returns:
            A list of the export filenames, including the absolute path to each
            file.
        """
        exported_files = []

        logs.LOG.debug('> starting exports')
        if 'exports' in kwargs and 'xml' in kwargs['exports']:
            outputs = export_core.get_outputs(self.job.id)

            for output in outputs:
                exported_files.extend(hazard_export.export(
                    output.id, self.job.hazard_calculation.export_dir))

            for exp_file in exported_files:
                logs.LOG.debug('exported %s' % exp_file)
        logs.LOG.debug('< done with exports')

        return exported_files
Ejemplo n.º 3
0
    def test_export_for_scenario(self):
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('scenario_hazard/job.ini')

            # run the calculation in process to create something to export
            os.environ['OQ_NO_DISTRIBUTE'] = '1'
            try:
                helpers.run_hazard_job(cfg)
            finally:
                del os.environ['OQ_NO_DISTRIBUTE']
            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = export_core.get_outputs(job.id)

            self.assertEqual(1, len(outputs))  # 1 GMF

            gmf_outputs = outputs.filter(output_type='gmf_scenario')
            self.assertEqual(1, len(gmf_outputs))

            exported_files = check_export(gmf_outputs[0].id, target_dir)

            self.assertEqual(1, len(exported_files))
            # Check the file paths exist, is absolute, and the file isn't
            # empty.
            f = exported_files[0]
            self._test_exported_file(f)

            # Check for the correct number of GMFs in the file:
            tree = etree.parse(f)
            self.assertEqual(20, number_of('nrml:gmf', tree))
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 4
0
    def test_export_for_scenario(self):
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('scenario_hazard/job.ini')
            # run the calculation in process to create something to export
            with mock.patch.dict(os.environ, {'OQ_NO_DISTRIBUTE': '1'}):
                helpers.run_job(cfg)
            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = core.get_outputs(job.id)

            self.assertEqual(2, len(outputs))  # 1 GMF, 1 SES

            gmf_outputs = outputs.filter(output_type='gmf_scenario')
            self.assertEqual(1, len(gmf_outputs))

            exported_file = check_export(gmf_outputs[0].id, target_dir)

            # Check the file paths exist, is absolute, and the file isn't
            # empty.
            self._test_exported_file(exported_file)

            # Check for the correct number of GMFs in the file:
            tree = etree.parse(exported_file)
            self.assertEqual(20, number_of('nrml:gmf', tree))
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 5
0
    def test_bcr_risk_export(self):
        # Tests that outputs of a risk classical calculation are
        # exported

        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.demo_file('classical_bcr/job.ini')

            # run the calculation to create something to export
            retcode = helpers.run_risk_job_sp(cfg, self.hazard_id,
                                              silence=True)
            self.assertEqual(0, retcode)

            job = models.OqJob.objects.latest('id')

            outputs = export_core.get_outputs(job.id)
            expected_outputs = 1  # 1 bcr distribution
            self.assertEqual(expected_outputs, len(outputs))

            # Export the loss curves:
            distribution = outputs.filter(output_type='bcr_distribution')[0]
            rc_files = risk.export(distribution.id, target_dir)

            self.assertEqual(1, len(rc_files))

            for f in rc_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 6
0
    def test_export_for_scenario(self):
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('scenario_hazard/job.ini')
            # run the calculation in process to create something to export
            with mock.patch.dict(os.environ, {'OQ_NO_DISTRIBUTE': '1'}):
                helpers.run_job(cfg)
            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = core.get_outputs(job.id)

            gmf_outputs = outputs.filter(ds_key='gmfs')
            self.assertEqual(1, len(gmf_outputs))

            exported_file = check_export(gmf_outputs[0].id, target_dir)

            # Check the file paths exist, is absolute, and the file isn't
            # empty.
            self._test_exported_file(exported_file)

            # Check for the correct number of GMFs in the file:
            tree = etree.parse(exported_file)
            self.assertEqual(20, number_of('nrml:gmf', tree))
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 7
0
    def export(self, *args, **kwargs):
        """
        If requested by the user, automatically export all result artifacts to
        the specified format. (NOTE: The only export format supported at the
        moment is NRML XML.

        :returns:
            A list of the export filenames, including the absolute path to each
            file.
        """
        exported_files = []

        logs.LOG.debug('> starting exports')
        if 'exports' in kwargs and 'xml' in kwargs['exports']:
            outputs = export_core.get_outputs(self.job.id)

            if not self.hc.export_multi_curves:
                outputs = outputs.exclude(output_type='hazard_curve_multi')

            for output in outputs:
                with self.monitor('exporting %s' % output.output_type):
                    fname = hazard_export.export(
                        output.id, self.job.hazard_calculation.export_dir)
                    exported_files.extend(fname)
                    logs.LOG.debug('exported %s' % fname)

        logs.LOG.debug('< done with exports')

        return exported_files
Ejemplo n.º 8
0
    def _get_outputs_for_export(self):
        """
        Util function for getting :class:`openquake.engine.db.models.Output`
        objects to be exported.

        Gathers all outputs for the job, but filters out `hazard_curve_multi`
        outputs if this option was turned off in the calculation profile.
        """
        outputs = export_core.get_outputs(self.job.id)
        if not self.hc.export_multi_curves:
            outputs = outputs.exclude(output_type='hazard_curve_multi')
        return outputs
Ejemplo n.º 9
0
    def _get_outputs_for_export(self):
        """
        Util function for getting :class:`openquake.engine.db.models.Output`
        objects to be exported.

        Gathers all outputs for the job, but filters out `hazard_curve_multi`
        outputs if this option was turned off in the calculation profile.
        """
        outputs = export_core.get_outputs(self.job.id)
        if not self.hc.export_multi_curves:
            outputs = outputs.exclude(output_type='hazard_curve_multi')
        return outputs
Ejemplo n.º 10
0
    def test_classical_hazard_export(self):
        # Run a hazard calculation to compute some curves and maps
        # Call the exporter and verify that files were created
        # Since the hazard curve XML writer is concerned with correctly
        # generating XML, we won't test that here.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.demo_file('simple_fault_demo_hazard/job.ini')

            # run the calculation to create something to export
            retcode = helpers.run_hazard_job_sp(cfg, silence=True)
            self.assertEqual(0, retcode)

            job = models.OqJob.objects.latest('id')

            outputs = export_core.get_outputs(job.id)
            expected_outputs = 18  # 6 hazard curves + 12 hazard maps
            self.assertEqual(expected_outputs, len(outputs))

            # Export the hazard curves:
            curves = outputs.filter(output_type='hazard_curve')
            hc_files = []
            for curve in curves:
                hc_files.extend(hazard.export(curve.id, target_dir))

            self.assertEqual(6, len(hc_files))

            for f in hc_files:
                self._test_exported_file(f)

            # Test hazard map export as well.
            maps = outputs.filter(output_type='hazard_map')
            hm_files = []
            for haz_map in maps:
                hm_files.extend(hazard.export(haz_map.id, target_dir))

            self.assertEqual(12, len(hm_files))

            for f in hm_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 11
0
    def test_classical_risk_export(self):
        # Tests that outputs of a risk classical calculation are exported
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.demo_file('classical_psha_based_risk/job.ini')

            # run the calculation to create something to export
            retcode = helpers.run_risk_job_sp(cfg, self.hazard_id,
                                              silence=True)
            self.assertEqual(0, retcode)

            job = models.OqJob.objects.latest('id')

            outputs = export_core.get_outputs(job.id)
            expected_outputs = 4  # 1 loss curve set + 3 loss curve map set
            self.assertEqual(expected_outputs, len(outputs))

            # Export the loss curves:
            curves = outputs.filter(output_type='loss_curve')
            rc_files = []
            for curve in curves:
                rc_files.extend(risk.export(curve.id, target_dir))

            self.assertEqual(1, len(rc_files))

            for f in rc_files:
                self._test_exported_file(f)

            # Test loss map export as well.
            maps = outputs.filter(output_type='loss_map')
            lm_files = sum(
                [risk.export(loss_map.id, target_dir)
                 for loss_map in maps], [])

            self.assertEqual(3, len(lm_files))

            for f in lm_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 12
0
    def test_disagg_hazard_export(self):
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('disaggregation/job.ini')

            # run the calculation in process to create something to export
            os.environ['OQ_NO_DISTRIBUTE'] = '1'
            try:
                helpers.run_job(cfg)
            finally:
                del os.environ['OQ_NO_DISTRIBUTE']

            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = export_core.get_outputs(job.id)

            # Test curve export:
            curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(4, len(curves))
            curve_files = []
            for curve in curves:
                curve_files.append(check_export(curve.id, target_dir))

            self.assertEqual(4, len(curve_files))
            for f in curve_files:
                self._test_exported_file(f)

            # Test disagg matrix export:
            matrices = outputs.filter(output_type='disagg_matrix')
            self.assertEqual(8, len(matrices))
            disagg_files = []
            for matrix in matrices:
                disagg_files.append(check_export(matrix.id, target_dir))

            self.assertEqual(8, len(disagg_files))
            for f in disagg_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 13
0
    def test_disagg_hazard_export(self):
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('disaggregation/job.ini')

            # run the calculation in process to create something to export
            os.environ['OQ_NO_DISTRIBUTE'] = '1'
            try:
                helpers.run_job(cfg)
            finally:
                del os.environ['OQ_NO_DISTRIBUTE']

            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = core.get_outputs(job.id)

            # Test curve export:
            curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(4, len(curves))
            curve_files = []
            for curve in curves:
                curve_files.append(check_export(curve.id, target_dir))

            self.assertEqual(4, len(curve_files))
            for f in curve_files:
                self._test_exported_file(f)

            # Test disagg matrix export:
            matrices = outputs.filter(output_type='disagg_matrix')
            self.assertEqual(8, len(matrices))
            disagg_files = []
            for matrix in matrices:
                disagg_files.append(check_export(matrix.id, target_dir))

            self.assertEqual(8, len(disagg_files))
            for f in disagg_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 14
0
    def test_disagg_hazard_export(self):
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.demo_file('disaggregation/job.ini')

            retcode = helpers.run_hazard_job_sp(cfg, silence=True)
            self.assertEqual(0, retcode)

            job = models.OqJob.objects.latest('id')

            outputs = export_core.get_outputs(job.id)

            # Test curve export:
            curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(4, len(curves))
            curve_files = []
            for curve in curves:
                curve_files.extend(hazard.export(curve.id, target_dir))

            self.assertEqual(4, len(curve_files))
            for f in curve_files:
                self._test_exported_file(f)

            # Test disagg matrix export:
            matrices = outputs.filter(output_type='disagg_matrix')
            self.assertEqual(8, len(matrices))
            disagg_files = []
            for matrix in matrices:
                disagg_files.extend(hazard.export(matrix.id, target_dir))

            self.assertEqual(8, len(disagg_files))
            for f in disagg_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 15
0
    def test_export_for_event_based(self):
        # Run an event-based hazard calculation to compute SESs and GMFs
        # Call the exporters for both SES and GMF results  and verify that
        # files were created
        # Since the XML writers (in `openquake.nrmllib.writers`) are concerned
        # with correctly generating the XML, we don't test that here...
        # but we should still have an end-to-end QA test.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('event_based_hazard/job.ini')

            # run the calculation in process to create something to export
            os.environ['OQ_NO_DISTRIBUTE'] = '1'
            try:
                helpers.run_hazard_job(cfg)
            finally:
                del os.environ['OQ_NO_DISTRIBUTE']

            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = export_core.get_outputs(job.id)
            # 2 GMFs, 2 SESs, 1 complete logic tree SES, 1 complete LT GMF,
            # ((2 imts * 2 realizations)
            # + ((2 imts + 1 multi) * (1 mean + 3 quantiles))
            # hazard curves,
            # (2 poes * 2 imts * 2 realizations)
            # + (2 poes * 2 imts * (1 mean + 3 quantiles)) hazard maps
            # Total: 42
            self.assertEqual(46, len(outputs))

            #######
            # SESs:
            ses_outputs = outputs.filter(output_type='ses')
            self.assertEqual(2, len(ses_outputs))

            exported_files = []
            for ses_output in ses_outputs:
                out_file = check_export(ses_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(2, len(exported_files))

            for f in exported_files:
                self._test_exported_file(f)

            ##################
            # Complete LT SES:
            [complete_lt_ses] = outputs.filter(output_type='complete_lt_ses')

            exported_file = check_export(complete_lt_ses.id, target_dir)

            self._test_exported_file(exported_file)

            #######
            # GMFs:
            gmf_outputs = outputs.filter(output_type='gmf')
            self.assertEqual(2, len(gmf_outputs))

            exported_files = []
            for gmf_output in gmf_outputs:
                out_file = check_export(gmf_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(2, len(exported_files))
            # Check the file paths exist, are absolute, and the files aren't
            # empty.
            for f in exported_files:
                self._test_exported_file(f)

            ##################
            # Complete LT GMF:
            [complete_lt_gmf] = outputs.filter(output_type='complete_lt_gmf')

            exported_file = check_export(complete_lt_gmf.id, target_dir)

            self._test_exported_file(exported_file)

            # Check for the correct number of GMFs in the file:
            tree = etree.parse(exported_file)
            # NB: the number of generated gmfs depends on the number
            # of ruptures, which is stochastic number; even having fixed
            # the seed, it will change by changing the order in which the
            # stochastic functions are called; a test relying on that
            # precise number would be fragile, this is why here we just
            # check that there are gmfs (MS)
            self.assertGreater(number_of('nrml:gmf', tree), 0)

            ################
            # Hazard curves:
            haz_curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(12, haz_curves.count())
            for curve in haz_curves:
                exported_file = check_export(curve.id, target_dir)
                self._test_exported_file(exported_file)

            ##############
            # Hazard maps:
            haz_maps = outputs.filter(output_type='hazard_map')
            self.assertEqual(24, haz_maps.count())
            for hmap in haz_maps:
                exported_file = check_export(hmap.id, target_dir)
                self._test_exported_file(exported_file)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 16
0
    def test_classical_hazard_export(self):
        # Run a hazard calculation to compute some curves and maps
        # Call the exporter and verify that files were created
        # Since the hazard curve XML writer is concerned with correctly
        # generating XML, we won't test that here.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini')

            # run the calculation to create something to export
            helpers.run_job(cfg)

            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = core.get_outputs(job.id)

            # 10 hazard curves, 20 maps, 10 uhs, 5 multi curves
            expected_outputs = 45
            self.assertEqual(expected_outputs, outputs.count())

            # Number of curves:
            # (2 imts * 2 realizations)
            # + (2 imts * (1 mean + 2 quantiles)
            # = 10
            curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(10, curves.count())

            # Number of multi-curves
            # (2 realizations + 1 mean + 2 quantiles)
            multi_curves = outputs.filter(output_type="hazard_curve_multi")
            self.assertEqual(5, multi_curves.count())

            # Number of maps:
            # (2 poes * 2 imts * 2 realizations)
            # + (2 poes * 2 imts * (1 mean + 2 quantiles))
            # = 20
            # Number of UHS:
            maps = outputs.filter(output_type='hazard_map')
            self.assertEqual(20, maps.count())

            # Number of UHS:
            # (20 maps_PGA_SA / 2 poes)
            # = 10
            uhs = outputs.filter(output_type='uh_spectra')
            self.assertEqual(10, uhs.count())

            # Test hazard curve export:
            hc_files = []
            for curve in curves:
                hc_files.append(check_export(curve.id, target_dir))

            self.assertEqual(10, len(hc_files))

            # Test multi hazard curve export:
            hc_files = []
            for curve in multi_curves:
                hc_files.append(check_export(curve.id, target_dir))

            self.assertEqual(5, len(hc_files))

            for f in hc_files:
                self._test_exported_file(f)

            # Test hazard map export:
            hm_files = []
            for haz_map in maps:
                hm_files.append(check_export(haz_map.id, target_dir))

            self.assertEqual(20, len(hm_files))

            for f in hm_files:
                self._test_exported_file(f)

            # Test UHS export:
            uhs_files = []
            for u in uhs:
                uhs_files.append(check_export(u.id, target_dir))
            for f in uhs_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 17
0
    def test_export_for_event_based(self):
        # Run an event-based hazard calculation to compute SESs and GMFs
        # Call the exporters for both SES and GMF results  and verify that
        # files were created
        # Since the XML writers (in `openquake.commonlib`) are concerned
        # with correctly generating the XML, we don't test that here
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('event_based_hazard/job.ini')

            # run the calculation in process to create something to export
            with mock.patch.dict(os.environ, {'OQ_NO_DISTRIBUTE': '1'}):
                job = helpers.run_job(cfg, maximum_distance=1,
                                      ses_per_logic_tree_path=1,
                                      investigation_time=12,
                                      number_of_logic_tree_samples=1).job
            self.assertEqual(job.status, 'complete')

            dstore = datastore.DataStore(job.id)

            # 1 SES + 1 GMF + 1 hazard_curve_multi + 2 hazard_curve +
            # 4 hazard maps (with poes 0.1, 0.2 and IMT PGA, SA(0.1))
            outputs = core.get_outputs(job.id)

            # SESs
            ses_outputs = outputs.filter(ds_key='sescollection')
            self.assertEqual(1, len(ses_outputs))

            exported_files = []
            for ses_output in ses_outputs:
                out_file = check_export(ses_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(1, len(exported_files))

            for f in exported_files:
                self._test_exported_file(f)

            # GMFs
            gmf_outputs = outputs.filter(ds_key='gmfs')
            self.assertEqual(1, len(gmf_outputs))

            exported_files = []
            for gmf_output in gmf_outputs:
                out_file = check_export(gmf_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(1, len(exported_files))
            # Check the file paths exist, are absolute, and the files aren't
            # empty.
            for f in exported_files:
                self._test_exported_file(f)

            # check the exact values of the GMFs
            gmfs = writers.write_csv(
                io.StringIO(), dstore['gmfs']['col00'].value).encode('utf8')
            self.check_file_content('expected_gmfset_1.txt', gmfs)

            # Hazard curves
            haz_curves = outputs.filter(ds_key='hcurves')
            self.assertEqual(1, haz_curves.count())
            for curve in haz_curves:
                exported_file = check_export(curve.id, target_dir)
                self._test_exported_file(exported_file)

            # Hazard maps
            haz_maps = outputs.filter(ds_key='hmaps')
            self.assertEqual(1, haz_maps.count())
            for hmap in haz_maps:
                exported_file = check_export(hmap.id, target_dir)
                self._test_exported_file(exported_file)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 18
0
    def test_export_for_event_based(self):
        # Run an event-based hazard calculation to compute SESs and GMFs
        # Call the exporters for both SES and GMF results  and verify that
        # files were created
        # Since the XML writers (in `openquake.nrmllib.writers`) are concerned
        # with correctly generating the XML, we don't test that here...
        # but we should still have an end-to-end QA test.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('event_based_hazard/job.ini')

            # run the calculation in process to create something to export
            with mock.patch.dict(os.environ, {'OQ_NO_DISTRIBUTE': '1'}):
                job = helpers.run_job(cfg, maximum_distance=1,
                                      ses_per_logic_tree_path=1)
            self.assertEqual(job.status, 'complete')

            outputs = export_core.get_outputs(job.id)
            # 2 GMFs, 1 SES,
            # ((2 imts * 2 realizations)
            self.assertEqual(45, len(outputs))

            #######
            # SESs:
            ses_outputs = outputs.filter(output_type='ses')
            self.assertEqual(1, len(ses_outputs))

            exported_files = []
            for ses_output in ses_outputs:
                out_file = check_export(ses_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(1, len(exported_files))

            for f in exported_files:
                self._test_exported_file(f)

            #######
            # GMFs:
            gmf_outputs = outputs.filter(output_type='gmf')
            self.assertEqual(2, len(gmf_outputs))

            exported_files = []
            for gmf_output in gmf_outputs:
                out_file = check_export(gmf_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(2, len(exported_files))
            # Check the file paths exist, are absolute, and the files aren't
            # empty.
            for f in exported_files:
                self._test_exported_file(f)

            # check the exact values of the GMFs
            [gmfset1] = gmf_outputs[0].gmf
            [gmfset2] = gmf_outputs[1].gmf
            self.check_file_content('expected_gmfset_1.txt', str(gmfset1))
            self.check_file_content('expected_gmfset_2.txt', str(gmfset2))

            ################
            # Hazard curves:
            haz_curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(12, haz_curves.count())
            for curve in haz_curves:
                exported_file = check_export(curve.id, target_dir)
                self._test_exported_file(exported_file)

            ##############
            # Hazard maps:
            haz_maps = outputs.filter(output_type='hazard_map')
            self.assertEqual(24, haz_maps.count())
            for hmap in haz_maps:
                exported_file = check_export(hmap.id, target_dir)
                self._test_exported_file(exported_file)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 19
0
    def test_export_for_event_based(self):
        # Run an event-based hazard calculation to compute SESs and GMFs
        # Call the exporters for both SES and GMF results  and verify that
        # files were created
        # Since the XML writers (in `openquake.commonlib`) are concerned
        # with correctly generating the XML, we don't test that here
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('event_based_hazard/job.ini')

            # run the calculation in process to create something to export
            with mock.patch.dict(os.environ, {'OQ_NO_DISTRIBUTE': '1'}):
                job = helpers.run_job(cfg,
                                      maximum_distance=1,
                                      ses_per_logic_tree_path=1,
                                      number_of_logic_tree_samples=1).job
            self.assertEqual(job.status, 'complete')

            # 1 SES + 1 GMF + 1 hazard_curve_multi + 2 hazard_curve +
            # 4 hazard maps (with poes 0.1, 0.2 and IMT PGA, SA(0.1))
            outputs = core.get_outputs(job.id)
            self.assertEqual(9, len(outputs))

            # SESs
            ses_outputs = outputs.filter(output_type='ses')
            self.assertEqual(1, len(ses_outputs))

            exported_files = []
            for ses_output in ses_outputs:
                out_file = check_export(ses_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(1, len(exported_files))

            for f in exported_files:
                self._test_exported_file(f)

            # GMFs
            gmf_outputs = outputs.filter(output_type='gmf')
            self.assertEqual(1, len(gmf_outputs))

            exported_files = []
            for gmf_output in gmf_outputs:
                out_file = check_export(gmf_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(1, len(exported_files))
            # Check the file paths exist, are absolute, and the files aren't
            # empty.
            for f in exported_files:
                self._test_exported_file(f)

            # check the exact values of the GMFs
            [gmfset1] = gmf_outputs[0].gmf
            self.check_file_content('expected_gmfset_1.txt', str(gmfset1))

            # Hazard curves
            haz_curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(2, haz_curves.count())
            for curve in haz_curves:
                exported_file = check_export(curve.id, target_dir)
                self._test_exported_file(exported_file)

            # Hazard maps
            haz_maps = outputs.filter(output_type='hazard_map')
            self.assertEqual(4, haz_maps.count())
            for hmap in haz_maps:
                exported_file = check_export(hmap.id, target_dir)
                self._test_exported_file(exported_file)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 20
0
    def test_export_for_event_based(self):
        # Run an event-based hazard calculation to compute SESs and GMFs
        # Call the exporters for both SES and GMF results  and verify that
        # files were created
        # Since the XML writers (in `openquake.nrmllib.writers`) are concerned
        # with correctly generating the XML, we don't test that here...
        # but we should still have an end-to-end QA test.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('event_based_hazard/job.ini')

            # run the calculation in process to create something to export
            os.environ['OQ_NO_DISTRIBUTE'] = '1'
            try:
                helpers.run_hazard_job(cfg)
            finally:
                del os.environ['OQ_NO_DISTRIBUTE']

            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = export_core.get_outputs(job.id)
            # 2 GMFs, 2 SESs, 1 complete logic tree SES, 1 complete LT GMF,
            # ((2 imts * 2 realizations)
            # + ((2 imts + 1 multi) * (1 mean + 3 quantiles))
            # hazard curves,
            # (2 poes * 2 imts * 2 realizations)
            # + (2 poes * 2 imts * (1 mean + 3 quantiles)) hazard maps
            # Total: 42
            self.assertEqual(46, len(outputs))

            #######
            # SESs:
            ses_outputs = outputs.filter(output_type='ses')
            self.assertEqual(2, len(ses_outputs))

            exported_files = []
            for ses_output in ses_outputs:
                files = check_export(ses_output.id, target_dir)
                exported_files.extend(files)

            self.assertEqual(2, len(exported_files))

            for f in exported_files:
                self._test_exported_file(f)

            ##################
            # Complete LT SES:
            [complete_lt_ses] = outputs.filter(output_type='complete_lt_ses')

            [exported_file] = check_export(complete_lt_ses.id, target_dir)

            self._test_exported_file(exported_file)

            #######
            # GMFs:
            gmf_outputs = outputs.filter(output_type='gmf')
            self.assertEqual(2, len(gmf_outputs))

            exported_files = []
            for gmf_output in gmf_outputs:
                files = check_export(gmf_output.id, target_dir)
                exported_files.extend(files)

            self.assertEqual(2, len(exported_files))
            # Check the file paths exist, are absolute, and the files aren't
            # empty.
            for f in exported_files:
                self._test_exported_file(f)

            ##################
            # Complete LT GMF:
            [complete_lt_gmf] = outputs.filter(output_type='complete_lt_gmf')

            [exported_file] = check_export(complete_lt_gmf.id, target_dir)

            self._test_exported_file(exported_file)

            # Check for the correct number of GMFs in the file:
            tree = etree.parse(exported_file)
            # NB: the number of generated gmfs depends on the number
            # of ruptures, which is stochastic number; even having fixed
            # the seed, it will change by changing the order in which the
            # stochastic functions are called; a test relying on that
            # precise number would be fragile, this is why here we just
            # check that there are gmfs (MS)
            self.assertGreater(number_of('nrml:gmf', tree), 0)

            ################
            # Hazard curves:
            haz_curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(12, haz_curves.count())
            for curve in haz_curves:
                [exported_file] = check_export(curve.id, target_dir)
                self._test_exported_file(exported_file)

            ##############
            # Hazard maps:
            haz_maps = outputs.filter(output_type='hazard_map')
            self.assertEqual(24, haz_maps.count())
            for hmap in haz_maps:
                [exported_file] = check_export(hmap.id, target_dir)
                self._test_exported_file(exported_file)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 21
0
    def test_export_for_event_based(self):
        # Run an event-based hazard calculation to compute SESs and GMFs
        # Call the exporters for both SES and GMF results  and verify that
        # files were created
        # Since the XML writers (in `openquake.nrmllib.writers`) are concerned
        # with correctly generating the XML, we don't test that here...
        # but we should still have an end-to-end QA test.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.demo_file('event_based_hazard/job.ini')

            # run the calculation to create something to export
            retcode = helpers.run_hazard_job_sp(cfg, silence=True)
            self.assertEqual(0, retcode)

            job = models.OqJob.objects.latest('id')

            outputs = export_core.get_outputs(job.id)
            # 2 GMFs, 2 SESs, 1 complete logic tree SES, 1 complete LT GMF,
            # and 4 hazard curve collections
            self.assertEqual(18, len(outputs))

            #######
            # SESs:
            ses_outputs = outputs.filter(output_type='ses')
            self.assertEqual(2, len(ses_outputs))

            exported_files = []
            for ses_output in ses_outputs:
                files = hazard.export(ses_output.id, target_dir)
                exported_files.extend(files)

            self.assertEqual(2, len(exported_files))

            for f in exported_files:
                self._test_exported_file(f)

            ##################
            # Complete LT SES:
            [complete_lt_ses] = outputs.filter(output_type='complete_lt_ses')

            [exported_file] = hazard.export(complete_lt_ses.id, target_dir)

            self._test_exported_file(exported_file)

            #######
            # GMFs:
            gmf_outputs = outputs.filter(output_type='gmf')
            self.assertEqual(2, len(gmf_outputs))

            exported_files = []
            for gmf_output in gmf_outputs:
                files = hazard.export(gmf_output.id, target_dir)
                exported_files.extend(files)

            self.assertEqual(2, len(exported_files))
            # Check the file paths exist, are absolute, and the files aren't
            # empty.
            for f in exported_files:
                self._test_exported_file(f)

            ##################
            # Complete LT GMF:
            [complete_lt_gmf] = outputs.filter(output_type='complete_lt_gmf')

            [exported_file] = hazard.export(complete_lt_gmf.id, target_dir)

            self._test_exported_file(exported_file)

            # Check for the correct number of GMFs in the file:
            tree = etree.parse(exported_file)
            self.assertEqual(442, number_of('nrml:gmf', tree))

            ################
            # Hazard curves:
            haz_curves = outputs.filter(output_type='hazard_curve')
            for curve in haz_curves:
                [exported_file] = hazard.export(curve.id, target_dir)
                self._test_exported_file(exported_file)

        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 22
0
    def test_event_based_risk_export(self):
        # Tests that outputs of a risk classical calculation are exported
        target_dir = tempfile.mkdtemp()

        try:
            # use get_risk_job to create a fake GmfCollection
            job, _ = helpers.get_risk_job('event_based_risk/job.ini',
                                          'event_based_hazard/job.ini',
                                          'gmf')

            cfg = helpers.demo_file('event_based_risk/job.ini')

            # run the calculation to create something to export

            # at the moment, only gmf for a specific realization are
            # supported as hazard input
            retcode = helpers.run_risk_job_sp(
                cfg, silence=True,
                hazard_id=job.risk_calculation.hazard_output.id)
            self.assertEqual(0, retcode)

            job = models.OqJob.objects.latest('id')

            outputs = export_core.get_outputs(job.id)
            # 1 loss curve set + 3 loss curve map set + 1 insured + 1 aggregate
            expected_outputs = 6
            self.assertEqual(expected_outputs, len(outputs))

            # Export the loss curves...
            curves = outputs.filter(output_type='loss_curve')
            rc_files = []
            for curve in curves:
                rc_files.extend(risk.export(curve.id, target_dir))

            self.assertEqual(1, len(rc_files))

            for f in rc_files:
                self._test_exported_file(f)

            # ... loss map ...
            maps = outputs.filter(output_type='loss_map')
            lm_files = sum(
                [risk.export(loss_map.id, target_dir)
                 for loss_map in maps], [])

            self.assertEqual(3, len(lm_files))

            for f in lm_files:
                self._test_exported_file(f)

            # ... aggregate losses...
            maps = outputs.filter(output_type='agg_loss_curve')
            lm_files = sum(
                [risk.export(loss_map.id, target_dir)
                 for loss_map in maps], [])

            self.assertEqual(1, len(lm_files))

            for f in lm_files:
                self._test_exported_file(f)

            # and insured losses.
            maps = outputs.filter(output_type='ins_loss_curve')
            lm_files = sum(
                [risk.export(loss_map.id, target_dir)
                 for loss_map in maps], [])

            self.assertEqual(1, len(lm_files))

            for f in lm_files:
                self._test_exported_file(f)

        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 23
0
    def test_classical_hazard_export(self):
        # Run a hazard calculation to compute some curves and maps
        # Call the exporter and verify that files were created
        # Since the hazard curve XML writer is concerned with correctly
        # generating XML, we won't test that here.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini')

            # run the calculation to create something to export
            helpers.run_job(cfg)

            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = core.get_outputs(job.id)

            # 10 hazard curves, 20 maps, 10 uhs, 5 multi curves
            expected_outputs = 45
            self.assertEqual(expected_outputs, outputs.count())

            # Number of curves:
            # (2 imts * 2 realizations)
            # + (2 imts * (1 mean + 2 quantiles)
            # = 10
            curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(10, curves.count())

            # Number of multi-curves
            # (2 realizations + 1 mean + 2 quantiles)
            multi_curves = outputs.filter(output_type="hazard_curve_multi")
            self.assertEqual(5, multi_curves.count())

            # Number of maps:
            # (2 poes * 2 imts * 2 realizations)
            # + (2 poes * 2 imts * (1 mean + 2 quantiles))
            # = 20
            # Number of UHS:
            maps = outputs.filter(output_type='hazard_map')
            self.assertEqual(20, maps.count())

            # Number of UHS:
            # (20 maps_PGA_SA / 2 poes)
            # = 10
            uhs = outputs.filter(output_type='uh_spectra')
            self.assertEqual(10, uhs.count())

            # Test hazard curve export:
            hc_files = []
            for curve in curves:
                hc_files.append(check_export(curve.id, target_dir))

            self.assertEqual(10, len(hc_files))

            # Test multi hazard curve export:
            hc_files = []
            for curve in multi_curves:
                hc_files.append(check_export(curve.id, target_dir))

            self.assertEqual(5, len(hc_files))

            for f in hc_files:
                self._test_exported_file(f)

            # Test hazard map export:
            hm_files = []
            for haz_map in maps:
                hm_files.append(check_export(haz_map.id, target_dir))

            self.assertEqual(20, len(hm_files))

            for f in hm_files:
                self._test_exported_file(f)

            # Test UHS export:
            uhs_files = []
            for u in uhs:
                uhs_files.append(check_export(u.id, target_dir))
            for f in uhs_files:
                self._test_exported_file(f)
        finally:
            shutil.rmtree(target_dir)
Ejemplo n.º 24
0
    def test_export_for_event_based(self):
        # Run an event-based hazard calculation to compute SESs and GMFs
        # Call the exporters for both SES and GMF results  and verify that
        # files were created
        # Since the XML writers (in `openquake.nrmllib.writers`) are concerned
        # with correctly generating the XML, we don't test that here...
        # but we should still have an end-to-end QA test.
        target_dir = tempfile.mkdtemp()

        try:
            cfg = helpers.get_data_path('event_based_hazard/job.ini')

            # run the calculation in process to create something to export
            os.environ['OQ_NO_DISTRIBUTE'] = '1'
            try:
                helpers.run_hazard_job(cfg)
            finally:
                del os.environ['OQ_NO_DISTRIBUTE']

            job = models.OqJob.objects.latest('id')
            self.assertEqual(job.status, 'complete')

            outputs = export_core.get_outputs(job.id)
            # 2 GMFs, 2 SESs,
            # ((2 imts * 2 realizations)
            # + ((2 imts + 1 multi) * (1 mean + 3 quantiles))
            # hazard curves,
            # (2 poes * 2 imts * 2 realizations)
            # + (2 poes * 2 imts * (1 mean + 3 quantiles)) hazard maps
            # Total: 42
            self.assertEqual(44, len(outputs))

            #######
            # SESs:
            ses_outputs = outputs.filter(output_type='ses')
            self.assertEqual(2, len(ses_outputs))

            exported_files = []
            for ses_output in ses_outputs:
                out_file = check_export(ses_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(2, len(exported_files))

            for f in exported_files:
                self._test_exported_file(f)

            #######
            # GMFs:
            gmf_outputs = outputs.filter(output_type='gmf')
            self.assertEqual(2, len(gmf_outputs))

            exported_files = []
            for gmf_output in gmf_outputs:
                out_file = check_export(gmf_output.id, target_dir)
                exported_files.append(out_file)

            self.assertEqual(2, len(exported_files))
            # Check the file paths exist, are absolute, and the files aren't
            # empty.
            for f in exported_files:
                self._test_exported_file(f)

            ################
            # Hazard curves:
            haz_curves = outputs.filter(output_type='hazard_curve')
            self.assertEqual(12, haz_curves.count())
            for curve in haz_curves:
                exported_file = check_export(curve.id, target_dir)
                self._test_exported_file(exported_file)

            ##############
            # Hazard maps:
            haz_maps = outputs.filter(output_type='hazard_map')
            self.assertEqual(24, haz_maps.count())
            for hmap in haz_maps:
                exported_file = check_export(hmap.id, target_dir)
                self._test_exported_file(exported_file)
        finally:
            shutil.rmtree(target_dir)