def _save_uhs(job, uhs_results, poe, rlz=None, statistics=None, quantile=None): """ Save computed UHS data to the DB. UHS results can be either for an end branch or for mean or quantile statistics. :param job: :class:`openquake.engine.db.models.OqJob` instance to be associated with the results. :param uhs_results: UHS computation results structured like the output of :func:`make_uhs`. :param float poe: Probability of exceedance of the hazard maps from which these UH Spectra were produced. :param rlz: :class:`openquake.engine.db.models.LtRealization`. Specify only if these results are for an end branch. :param statistics: 'mean' or 'quantile'. Specify only if these are statistical results. :param float quantile: Specify only if ``statistics`` == 'quantile'. """ output = models.Output( oq_job=job, owner=job.owner, output_type='uh_spectra' ) uhs = models.UHS( poe=poe, investigation_time=job.hazard_calculation.investigation_time, periods=uhs_results['periods'], ) if rlz is not None: uhs.lt_realization = rlz output.display_name = _UHS_DISP_NAME_FMT % dict(poe=poe, rlz=rlz.id) elif statistics is not None: uhs.statistics = statistics if statistics == 'quantile': uhs.quantile = quantile output.display_name = (_UHS_DISP_NAME_QUANTILE_FMT % dict(poe=poe, quantile=quantile)) else: # mean output.display_name = _UHS_DISP_NAME_MEAN_FMT % dict(poe=poe) output.save() uhs.output = output # This should fail if neither `lt_realization` nor `statistics` is defined: uhs.save() with transaction.commit_on_success(using='reslt_writer'): inserter = CacheInserter(models.UHSData, CURVE_CACHE_SIZE) for lon, lat, imls in uhs_results['uh_spectra']: inserter.add( models.UHSData( uhs_id=uhs.id, imls='{%s}' % ','.join(str(x) for x in imls), location='POINT(%s %s)' % (lon, lat)) ) inserter.flush()
def _save_uhs(job, uhs_results, poe, rlz=None, statistics=None, quantile=None): """ Save computed UHS data to the DB. UHS results can be either for an end branch or for mean or quantile statistics. :param job: :class:`openquake.engine.db.models.OqJob` instance to be associated with the results. :param uhs_results: UHS computation results structured like the output of :func:`make_uhs`. :param float poe: Probability of exceedance of the hazard maps from which these UH Spectra were produced. :param rlz: :class:`openquake.engine.db.models.LtRealization`. Specify only if these results are for an end branch. :param statistics: 'mean' or 'quantile'. Specify only if these are statistical results. :param float quantile: Specify only if ``statistics`` == 'quantile'. """ output = models.Output(oq_job=job, output_type='uh_spectra') uhs = models.UHS( poe=poe, investigation_time=job.hazard_calculation.investigation_time, periods=uhs_results['periods'], ) if rlz is not None: uhs.lt_realization = rlz output.display_name = _UHS_DISP_NAME_FMT % dict(poe=poe, rlz=rlz.id) elif statistics is not None: uhs.statistics = statistics if statistics == 'quantile': uhs.quantile = quantile output.display_name = (_UHS_DISP_NAME_QUANTILE_FMT % dict(poe=poe, quantile=quantile)) else: # mean output.display_name = _UHS_DISP_NAME_MEAN_FMT % dict(poe=poe) output.save() uhs.output = output # This should fail if neither `lt_realization` nor `statistics` is defined: uhs.save() with transaction.commit_on_success(using='reslt_writer'): inserter = CacheInserter(models.UHSData, CURVE_CACHE_SIZE) for lon, lat, imls in uhs_results['uh_spectra']: inserter.add( models.UHSData(uhs_id=uhs.id, imls='{%s}' % ','.join(str(x) for x in imls), location='POINT(%s %s)' % (lon, lat))) inserter.flush()
def test_insert_gmf(self): cache = CacheInserter(GmfData, 10) gmf1 = GmfData(gmf_id=1, imt='PGA', gmvs=[], rupture_ids=[], site_id=1) gmf2 = GmfData(gmf_id=1, imt='PGA', gmvs=[], rupture_ids=[], site_id=2) cache.add(gmf1) cache.add(gmf2) cache.flush() connection = writer.connections['reslt_writer'] self.assertEqual( connection.data, '1\t\\N\tPGA\t\\N\t\\N\t{}\t{}\t1\n1\t\\N\tPGA\t\\N\t\\N\t{}\t{}\t2\n' ) self.assertEqual(connection.table, '"hzrdr"."gmf_data"') self.assertEqual(connection.columns, [ 'gmf_id', 'ses_id', 'imt', 'sa_period', 'sa_damping', 'gmvs', 'rupture_ids', 'site_id' ])
def test_insert_gmf(self): cache = CacheInserter(GmfData, 10) gmf1 = GmfData( gmf_id=1, imt='PGA', gmvs=[], rupture_ids=[], site_id=1) gmf2 = GmfData( gmf_id=1, imt='PGA', gmvs=[], rupture_ids=[], site_id=2) cache.add(gmf1) cache.add(gmf2) cache.flush() connection = writer.connections['job_init'] self.assertEqual( connection.data, '1\t\\N\tPGA\t\\N\t\\N\t{}\t{}\t1\n1\t\\N\tPGA\t\\N\t\\N\t{}\t{}\t2\n') self.assertEqual(connection.table, '"hzrdr"."gmf_data"') self.assertEqual( connection.columns, ['gmf_id', 'task_no', 'imt', 'sa_period', 'sa_damping', 'gmvs', 'rupture_ids', 'site_id'])