Ejemplo n.º 1
0
    def test_get_list_json_decoded(self):
        data = [{u'1': u'one'}, {u'2': u'two'}, {u'3': u'three'}]

        for item in data:
            kvs.get_client().rpush(TEST_KEY, json.dumps(item))

        self.assertEqual(data, kvs.get_list_json_decoded(TEST_KEY))
Ejemplo n.º 2
0
    def test_load_gmvs_at(self):
        """
        Exercise the function
        :func:`openquake.calculators.risk.general.load_gmvs_at`.
        """

        gmvs = [{
            'site_lon': 0.1,
            'site_lat': 0.2,
            'mag': 0.117
        }, {
            'site_lon': 0.1,
            'site_lat': 0.2,
            'mag': 0.167
        }, {
            'site_lon': 0.1,
            'site_lat': 0.2,
            'mag': 0.542
        }]

        expected_gmvs = [0.117, 0.167, 0.542]
        point = self.region.grid.point_at(shapes.Site(0.1, 0.2))

        # we expect this point to be at row 1, column 0
        self.assertEqual(1, point.row)
        self.assertEqual(0, point.column)

        key = kvs.tokens.ground_motion_values_key(self.job_id, point)

        # place the test values in kvs
        for gmv in gmvs:
            kvs.get_client().rpush(key, json.JSONEncoder().encode(gmv))

        actual_gmvs = load_gmvs_at(self.job_id, point)
        self.assertEqual(expected_gmvs, actual_gmvs)
Ejemplo n.º 3
0
def compute_conditional_loss(job_id, col, row, loss_curve, asset, loss_poe):
    """Compute the conditional loss for a loss curve and Probability of
    Exceedance (PoE)."""

    loss_conditional = _compute_conditional_loss(loss_curve, loss_poe)
    key = kvs.tokens.loss_key(job_id, row, col, asset.asset_ref, loss_poe)
    kvs.get_client().set(key, loss_conditional)
Ejemplo n.º 4
0
    def compute_loss_ratio_curve(
            self, col, row, asset, gmf_slice, loss_ratios):
        """Compute the loss ratio curve for a single asset."""
        calc_proxy = self.calc_proxy

        vuln_function = self.vuln_curves.get(
            asset["taxonomy"], None)

        if not vuln_function:
            LOGGER.error(
                "Unknown vulnerability function %s for asset %s"
                % (asset["taxonomy"], asset["assetID"]))

            return None

        epsilon_provider = general.EpsilonProvider(calc_proxy.params)

        loss_histogram_bins = calc_proxy.oq_job_profile.loss_histogram_bins
        loss_ratio_curve = general.compute_loss_ratio_curve(
            vuln_function, gmf_slice, epsilon_provider, asset,
            loss_histogram_bins, loss_ratios=loss_ratios)

        # NOTE (jmc): Early exit if the loss ratio is all zeros
        if not False in (loss_ratio_curve.ordinates == 0.0):
            return None

        key = kvs.tokens.loss_ratio_key(
            self.calc_proxy.job_id, row, col, asset["assetID"])

        kvs.get_client().set(key, loss_ratio_curve.to_json())

        LOGGER.debug("Loss ratio curve is %s, write to key %s" %
                (loss_ratio_curve, key))

        return loss_ratio_curve
Ejemplo n.º 5
0
def compute_conditional_loss(job_id, col, row, loss_curve, asset, loss_poe):
    """Compute the conditional loss for a loss curve and Probability of
    Exceedance (PoE)."""

    loss_conditional = _compute_conditional_loss(loss_curve, loss_poe)
    key = kvs.tokens.loss_key(job_id, row, col, asset.asset_ref, loss_poe)
    kvs.get_client().set(key, loss_conditional)
    def test_load_gmvs_at(self):
        """
        Exercise the function
        :func:`openquake.calculators.risk.general.load_gmvs_at`.
        """

        gmvs = [
            {'site_lon': 0.1, 'site_lat': 0.2, 'mag': 0.117},
            {'site_lon': 0.1, 'site_lat': 0.2, 'mag': 0.167},
            {'site_lon': 0.1, 'site_lat': 0.2, 'mag': 0.542}]

        expected_gmvs = [0.117, 0.167, 0.542]
        point = self.region.grid.point_at(shapes.Site(0.1, 0.2))

        # we expect this point to be at row 1, column 0
        self.assertEqual(1, point.row)
        self.assertEqual(0, point.column)

        key = kvs.tokens.ground_motion_values_key(self.job_id, point)

        # place the test values in kvs
        for gmv in gmvs:
            kvs.get_client().rpush(key, json.JSONEncoder().encode(gmv))

        actual_gmvs = load_gmvs_at(self.job_id, point)
        self.assertEqual(expected_gmvs, actual_gmvs)
Ejemplo n.º 7
0
    def _write_output_for_block(self, job_id, block_id):
        """ Given a job and a block, write out a plotted curve """
        loss_ratio_curves = []
        loss_curves = []
        block = Block.from_kvs(job_id, block_id)
        for point, asset in self.grid_assets_iterator(
                block.grid(self.calc_proxy.region)):
            site = shapes.Site(asset['lon'], asset['lat'])

            loss_curve = kvs.get_client().get(
                kvs.tokens.loss_curve_key(
                    job_id, point.row, point.column, asset["assetID"]))
            loss_ratio_curve = kvs.get_client().get(
                kvs.tokens.loss_ratio_key(
                    job_id, point.row, point.column, asset["assetID"]))

            if loss_curve:
                loss_curve = shapes.Curve.from_json(loss_curve)
                loss_curves.append((site, (loss_curve, asset)))

            if loss_ratio_curve:
                loss_ratio_curve = shapes.Curve.from_json(loss_ratio_curve)
                loss_ratio_curves.append((site, (loss_ratio_curve, asset)))

        results = self._serialize(block_id,
                                           curves=loss_ratio_curves,
                                           curve_mode='loss_ratio')
        if loss_curves:
            results.extend(
                self._serialize(
                    block_id, curves=loss_curves, curve_mode='loss',
                    curve_mode_prefix='loss_curve', render_multi=True))
        return results
Ejemplo n.º 8
0
    def setUp(self):
        self.params = dict(
            CALCULATION_MODE='Hazard',
            REFERENCE_VS30_VALUE=500,
            SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT,
            GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT,
            BASE_PATH=SIMPLE_FAULT_BASE_PATH)

        self.imls = [
            5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02,
            3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01,
            1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01,
            7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00
        ]

        self.job_ctxt = helpers.create_job(self.params)
        self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt)
        self.job_id = self.job_ctxt.job_id

        self.empty_mean_curve = []

        # deleting server side cached data
        kvs.get_client().flushall()

        mean_curve = [
            9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01,
            6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01,
            1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03,
            2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06
        ]

        self.site = shapes.Site(2.0, 5.0)
        self._store_curve_at(self.site, mean_curve)
Ejemplo n.º 9
0
    def setUp(self):
        self.params = dict(
            CALCULATION_MODE='Hazard',
            REFERENCE_VS30_VALUE=500,
            SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT,
            GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT,
            BASE_PATH=SIMPLE_FAULT_BASE_PATH)

        self.imls = [5.0000e-03, 7.0000e-03,
                1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02,
                7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01,
                2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00,
                1.5200e+00, 2.1300e+00]

        self.job_ctxt = helpers.create_job(self.params)
        self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt)
        self.job_id = self.job_ctxt.job_id

        self.empty_mean_curve = []

        # deleting server side cached data
        kvs.get_client().flushall()

        mean_curve = [9.8728e-01, 9.8266e-01, 9.4957e-01,
                9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01,
                2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02,
                1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05,
                7.3425e-06]

        self.site = shapes.Site(2.0, 5.0)
        self._store_curve_at(self.site, mean_curve)
Ejemplo n.º 10
0
    def setUp(self):
        kvs.get_client().flushall()

        base_path = helpers.testdata_path("scenario")
        job = engine.prepare_job()
        self.job_profile, self.params, self.sections = (
            engine.import_job_profile(SCENARIO_SMOKE_TEST, job))
        self.job_ctxt = JobContext(self.params,
                                   job.id,
                                   sections=self.sections,
                                   base_path=base_path,
                                   oq_job_profile=self.job_profile,
                                   oq_job=job)

        self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "1"

        self.job_ctxt.params['SERIALIZE_RESULTS_TO'] = 'xml'
        self.job_ctxt.serialize_results_to = ["xml"]

        # saving the default java implementation
        self.default = (
            scenario.ScenarioHazardCalculator.compute_ground_motion_field)

        self.grid = self.job_ctxt.region.grid

        self.job_ctxt.to_kvs()
Ejemplo n.º 11
0
    def test_get_list_json_decoded(self):
        data = [{u'1': u'one'}, {u'2': u'two'}, {u'3': u'three'}]

        for item in data:
            kvs.get_client().rpush(TEST_KEY, json.dumps(item))

        self.assertEqual(data, kvs.get_list_json_decoded(TEST_KEY))
Ejemplo n.º 12
0
    def setUp(self):
        kvs.get_client().flushall()

        base_path = helpers.testdata_path("scenario")
        self.job_profile, self.params, self.sections = (
            engine.import_job_profile(SCENARIO_SMOKE_TEST))
        calculation = OqCalculation(owner=self.job_profile.owner,
                                    oq_job_profile=self.job_profile)
        calculation.save()
        self.calc_proxy = CalculationProxy(
            self.params, calculation.id, sections=self.sections,
            base_path=base_path, oq_job_profile=self.job_profile,
            oq_calculation=calculation)

        self.calc_proxy.params[NUMBER_OF_CALC_KEY] = "1"

        self.calc_proxy.params['SERIALIZE_RESULTS_TO'] = 'xml'

        # saving the default java implementation
        self.default = (
            scenario.ScenarioHazardCalculator.compute_ground_motion_field)

        self.grid = self.calc_proxy.region.grid

        self.calc_proxy.to_kvs()
Ejemplo n.º 13
0
 def test_get_client_same_conn(self):
     """
     get_client() returns redis client instances with the same connection
     pool.
     """
     obj1 = kvs.get_client()
     obj2 = kvs.get_client()
     self.assertIs(obj1.connection_pool, obj2.connection_pool)
Ejemplo n.º 14
0
 def test_get_client_same_conn(self):
     """
     get_client() returns redis client instances with the same connection
     pool.
     """
     obj1 = kvs.get_client()
     obj2 = kvs.get_client()
     self.assertIs(obj1.connection_pool, obj2.connection_pool)
Ejemplo n.º 15
0
    def _insured_loss_curve_on_kvs(self, column, row, insured_loss_curve,
                                   asset):
        """
        Put the insured loss curve on kvs.
        """

        key_ic = kvs.tokens.insured_loss_curve_key(self.job_ctxt.job_id, row,
                                                   column, asset.asset_ref)
        kvs.get_client().set(key_ic, insured_loss_curve.to_json())
Ejemplo n.º 16
0
    def _insured_loss_ratio_curve_on_kvs(self, column, row,
                                            insured_loss_ratio_curve, asset):
        """
        Put the insured loss ratio curve on kvs.
        """
        key = kvs.tokens.insured_loss_ratio_curve_key(self.job_ctxt.job_id,
                row, column, asset.asset_ref)

        kvs.get_client().set(key, insured_loss_ratio_curve.to_json())
Ejemplo n.º 17
0
    def _loss_ratio_curve_on_kvs(self, column, row, loss_ratio_curve, asset):
        """
        Put the loss ratio curve on kvs.
        """

        key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id,
            row, column, asset.asset_ref)
        kvs.get_client().set(key, loss_ratio_curve.to_json())

        LOGGER.debug("Loss ratio curve is %s, write to key %s" %
                     (loss_ratio_curve, key))
Ejemplo n.º 18
0
    def _loss_ratio_curve_on_kvs(self, column, row, loss_ratio_curve, asset):
        """
        Put the loss ratio curve on kvs.
        """

        key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id, row, column,
                                        asset.asset_ref)
        kvs.get_client().set(key, loss_ratio_curve.to_json())

        LOGGER.debug("Loss ratio curve is %s, write to key %s" %
                     (loss_ratio_curve, key))
    def test_load_assets_for_point(self):
        """
        Exercises the function
        :py:func:`openquake.risk.job.deterministic.load_assets_for_point`.
        """

        kvs.flush()

        # Fabricate some test data.
        test_assets = [
            {'assetValue': 2129.5,
             'assetID': '104',
             'listDescription': 'fake_description',
             'structureCategory': 'S4L_MC',
             'lon': 0.11,
             'assetDescription': 'LA building',
             'vulnerabilityFunctionReference': 'HAZUS_S4L_MC',
             'listID': 'LA01',
             'assetValueUnit': 'EUR',
             'lat': 0.11},
            {'assetValue': 2229.5,
             'assetID': '105',
             'listDescription': 'fake_description',
             'structureCategory': 'S4L_MC',
             'lon': 0.11,
             'assetDescription': 'LA building',
             'vulnerabilityFunctionReference': 'HAZUS_S4L_MC',
             'listID': 'LA02',
             'assetValueUnit': 'EUR',
             'lat': 0.12}]

        test_site = shapes.Site(0.1, 0.1)
        test_point = TEST_REGION.grid.point_at(test_site)

        encoder = json.JSONEncoder()

        assets_key = kvs.tokens.asset_key(
            TEST_JOB_ID, test_point.row, test_point.column)

        # Throw the test data into the KVS.
        for asset in test_assets:
            kvs.get_client().rpush(assets_key, encoder.encode(asset))

        # The data should now be in the KVS.
        # Now verify that the load_assets_for_point function returns
        # the appropriate data.
        actual_assets = \
            risk_job_det.load_assets_for_point(TEST_JOB_ID, test_point)

        kvs.flush()

        # They should come out exactly the way they went in.
        self.assertEqual(test_assets, actual_assets)
Ejemplo n.º 20
0
    def store_exposure_assets(self):
        """ Load exposure assets and write to kvs """
        exposure_parser = exposure.ExposurePortfolioFile("%s/%s" %
            (self.base_path, self.params[job.EXPOSURE]))

        for site, asset in exposure_parser.filter(self.region):
            # TODO(JMC): This is kludgey
            asset['lat'] = site.latitude
            asset['lon'] = site.longitude
            gridpoint = self.region.grid.point_at(site)
            asset_key = kvs.tokens.asset_key(self.id, gridpoint.row,
                gridpoint.column)
            kvs.get_client().rpush(asset_key, json.JSONEncoder().encode(asset))
Ejemplo n.º 21
0
def compute_conditional_loss(job_id, col, row, loss_curve, asset, loss_poe):
    """Compute the conditional loss for a loss curve and Probability of
    Exceedance (PoE)."""

    loss_conditional = _compute_conditional_loss(
        loss_curve, loss_poe)

    key = kvs.tokens.loss_key(
            job_id, row, col, asset["assetID"], loss_poe)

    LOG.debug("Conditional loss is %s, write to key %s" %
            (loss_conditional, key))

    kvs.get_client().set(key, loss_conditional)
Ejemplo n.º 22
0
def get_pattern(regexp):
    """Get all the values whose keys satisfy the given regexp.

    Return an empty list if there are no keys satisfying the given regxep.
    """

    values = []

    keys = kvs.get_client().keys(regexp)

    if keys:
        values = kvs.get_client().mget(keys)

    return values
Ejemplo n.º 23
0
def get_pattern(regexp):
    """Get all the values whose keys satisfy the given regexp.

    Return an empty list if there are no keys satisfying the given regxep.
    """

    values = []

    keys = kvs.get_client().keys(regexp)

    if keys:
        values = kvs.get_client().mget(keys)

    return values
Ejemplo n.º 24
0
    def compute_loss_curve(self, column, row, loss_ratio_curve, asset):
        """Compute the loss curve for a single asset."""

        if asset is None:
            return None

        loss_curve = loss_ratio_curve.rescale_abscissae(asset.value)

        key = kvs.tokens.loss_curve_key(self.job_ctxt.job_id, row, column,
                                        asset.asset_ref)

        LOGGER.debug("Loss curve is %s, write to key %s" % (loss_curve, key))
        kvs.get_client().set(key, loss_curve.to_json())

        return loss_curve
Ejemplo n.º 25
0
    def compute_loss_curve(self, column, row, loss_ratio_curve, asset):
        """Compute the loss curve for a single asset."""

        if asset is None:
            return None

        loss_curve = loss_ratio_curve.rescale_abscissae(asset.value)

        key = kvs.tokens.loss_curve_key(
            self.job_ctxt.job_id, row, column, asset.asset_ref)

        LOGGER.debug("Loss curve is %s, write to key %s" % (loss_curve, key))
        kvs.get_client().set(key, loss_curve.to_json())

        return loss_curve
Ejemplo n.º 26
0
    def _compute_risk_classical_psha_setup(self):
        SITE = shapes.Site(1.0, 1.0)
        # deletes all keys from kvs
        kvs.get_client().flushall()

        self.job = self.setup_classic_job()

        # at the moment the hazard part doesn't do exp on the 'x'
        # so it's done on the risk part. To adapt the calculation
        # we do the reverse of the exp, i.e. log(x)
        self.hazard_curve = [
            (SITE,
             {'IMLValues': [0.001, 0.080, 0.170, 0.260, 0.360,
                            0.550, 0.700],
              'PoEValues': [0.99, 0.96, 0.89, 0.82, 0.70, 0.40, 0.01],
              'statistics': 'mean'})]

        # Vitor provided this Vulnerability Function
        imls_1 = [0.03, 0.04, 0.07, 0.1, 0.12, 0.22, 0.37, 0.52]
        loss_ratios_1 = [0.001, 0.022, 0.051, 0.08, 0.1, 0.2, 0.405, 0.700]
        covs_1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
        self.vuln_function = shapes.VulnerabilityFunction(imls_1,
            loss_ratios_1, covs_1)

        imls_2 = [0.1, 0.2, 0.4, 0.6]
        loss_ratios_2 = [0.05, 0.08, 0.2, 0.4]
        covs_2 = [0.5, 0.3, 0.2, 0.1]
        self.vuln_function_2 = shapes.VulnerabilityFunction(imls_2,
            loss_ratios_2, covs_2)

        self.job_id = self.job.id

        self.asset_1 = {"taxonomy": "ID",
                "assetValue": 124.27}

        self.region = shapes.RegionConstraint.from_simple(
                (0.0, 0.0), (2.0, 2.0))

        self.block_id = kvs.tokens.risk_block_key(self.job_id, 7)
        block = Block((SITE, SITE), self.block_id)
        block.to_kvs()

        writer = hazard.HazardCurveDBWriter('test_path.xml', self.job_id)
        writer.serialize(self.hazard_curve)

        kvs.set_value_json_encoded(
                kvs.tokens.vuln_key(self.job_id),
                {"ID": self.vuln_function.to_json()})
Ejemplo n.º 27
0
 def write_loss_map(self, loss_poe):
     """ Iterates through all the assets and maps losses at loss_poe """
     decoder = json.JSONDecoder()
     # Make a special grid at a higher resolution
     risk_grid = shapes.Grid(self.region, float(self['RISK_CELL_SIZE']))
     filename = "losses_at-%s.tiff" % (loss_poe)
     path = os.path.join(self.base_path, self['OUTPUT_DIR'], filename) 
     output_generator = geotiff.LossMapGeoTiffFile(path, risk_grid, 
             init_value=0.0, normalize=True)
     for point in self.region.grid:
         asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
         asset_list = kvs.get_client().lrange(asset_key, 0, -1)
         for asset in [decoder.decode(x) for x in asset_list]:
             key = kvs.tokens.loss_key(self.id, point.row, point.column, 
                     asset["assetID"], loss_poe)
             loss = kvs.get(key)
             LOG.debug("Loss for asset %s at %s %s is %s" % 
                 (asset["assetID"], asset['lon'], asset['lat'], loss))
             if loss:
                 loss_ratio = float(loss) / float(asset["assetValue"])
                 risk_site = shapes.Site(asset['lon'], asset['lat'])
                 risk_point = risk_grid.point_at(risk_site)
                 output_generator.write(
                         (risk_point.row, risk_point.column), loss_ratio)
     output_generator.close()
     return [path]
Ejemplo n.º 28
0
 def write_loss_map(self, loss_poe):
     """ Iterates through all the assets and maps losses at loss_poe """
     # Make a special grid at a higher resolution
     risk_grid = shapes.Grid(self.region, float(self['RISK_CELL_SIZE']))
     path = os.path.join(self.base_path,
                         self['OUTPUT_DIR'],
                         "losses_at-%s.tiff" % loss_poe)
     output_generator = geotiff.LossMapGeoTiffFile(path, risk_grid,
             init_value=0.0, normalize=True)
     for point in self.region.grid:
         asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
         asset_list = kvs.get_client().lrange(asset_key, 0, -1)
         for asset in [json.loads(x) for x in asset_list]:
             key = kvs.tokens.loss_key(self.id, point.row, point.column,
                     asset["assetID"], loss_poe)
             loss = kvs.get(key)
             LOG.debug("Loss for asset %s at %s %s is %s" %
                 (asset["assetID"], asset['lon'], asset['lat'], loss))
             if loss:
                 loss_ratio = float(loss) / float(asset["assetValue"])
                 risk_site = shapes.Site(asset['lon'], asset['lat'])
                 risk_point = risk_grid.point_at(risk_site)
                 output_generator.write(
                         (risk_point.row, risk_point.column), loss_ratio)
     output_generator.close()
     return [path]
Ejemplo n.º 29
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) exposure portfolio (=assets)
         3) vulnerability

        """

        block = job.Block.from_kvs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        for point in block.grid(self.region):
            curve_token = kvs.tokens.mean_hazard_curve_key(
                self.job_id, point.site)

            decoded_curve = kvs.get_value_json_decoded(curve_token)

            hazard_curve = Curve([(exp(float(el['x'])), el['y'])
                                  for el in decoded_curve['curve']])

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            assets = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in assets]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve)

                self.compute_loss_curve(point, loss_ratio_curve, asset)

        return True
Ejemplo n.º 30
0
def generate_erf(job_id):
    """
    Stubbed ERF generator

    Takes a job_id, returns a job_id.

    Connects to the Java HazardEngine using hazardwrapper, waits for an ERF to
    be generated, and then writes it to KVS.
    """

    # TODO(JM): implement real ERF computation

    erf_key = kvs.generate_product_key(job_id, kvs.tokens.ERF_KEY_TOKEN)
    kvs.get_client().set(erf_key, json.JSONEncoder().encode([job_id]))

    return job_id
Ejemplo n.º 31
0
    def execute(self):
        """Entry point to trigger the computation."""

        random_generator = java.jclass(
            "Random")(int(self.params["GMF_RANDOM_SEED"]))

        encoder = json.JSONEncoder()
        kvs_client = kvs.get_client(binary=False)

        grid = self.region.grid

        for _ in xrange(self._number_of_calculations()):
            gmf = self.compute_ground_motion_field(random_generator)

            for gmv in gmf_to_dict(
                gmf, self.params["INTENSITY_MEASURE_TYPE"]):

                site = shapes.Site(gmv["site_lon"], gmv["site_lat"])
                point = grid.point_at(site)

                key = kvs.tokens.ground_motion_values_key(
                    self.job_id, point)

                kvs_client.rpush(key, encoder.encode(gmv))

        return [True]
Ejemplo n.º 32
0
    def execute(self):
        """Entry point to trigger the computation."""

        random_generator = java.jclass("Random")(int(
            self.job_ctxt.params["GMF_RANDOM_SEED"]))

        encoder = json.JSONEncoder()
        kvs_client = kvs.get_client()

        num_calculations = self._number_of_calculations()
        self.initialize_pr_data(num_calculations=num_calculations)

        for cnum in xrange(num_calculations):
            try:
                gmf = self.compute_ground_motion_field(random_generator)
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_done", 1)
            except:
                # Count failure
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_failed", 1)
                raise
            logs.log_percent_complete(self.job_ctxt.job_id, "hazard")
            imt = self.job_ctxt.params["INTENSITY_MEASURE_TYPE"]
            self._serialize_gmf(gmf, imt, cnum)

            for gmv in gmf_to_dict(gmf, imt):
                site = shapes.Site(gmv["site_lon"], gmv["site_lat"])

                key = kvs.tokens.ground_motion_values_key(
                    self.job_ctxt.job_id, site)
                kvs_client.rpush(key, encoder.encode(gmv))
Ejemplo n.º 33
0
    def setUp(self):
        self.params = {}
        self.job = helpers.create_job(self.params)
        self.job_id = self.job.job_id

        self.expected_mean_curve = numpy.array([9.8542200e-01, 9.8196600e-01,
                9.5842000e-01, 9.2639600e-01, 8.6713000e-01, 7.7081800e-01,
                6.3448600e-01, 4.7256800e-01, 3.3523400e-01, 3.1255000e-01,
                1.7832000e-01, 9.0883400e-02, 4.2189200e-02, 1.7874200e-02,
                6.7449200e-03, 2.1658200e-03, 5.3878600e-04, 9.4369400e-05,
                8.9830380e-06])

        self.empty_curve = []

        # deleting server side cached data
        kvs.get_client().flushall()
Ejemplo n.º 34
0
    def execute(self):
        """Entry point to trigger the computation."""

        random_generator = java.jclass(
            "Random")(int(self.job_ctxt.params["GMF_RANDOM_SEED"]))

        encoder = json.JSONEncoder()
        kvs_client = kvs.get_client()

        num_calculations = self._number_of_calculations()
        self.initialize_pr_data(num_calculations=num_calculations)

        for cnum in xrange(num_calculations):
            try:
                gmf = self.compute_ground_motion_field(random_generator)
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_done", 1)
            except:
                # Count failure
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_failed", 1)
                raise
            logs.log_percent_complete(self.job_ctxt.job_id, "hazard")
            imt = self.job_ctxt.params["INTENSITY_MEASURE_TYPE"]
            self._serialize_gmf(gmf, imt, cnum)

            for gmv in gmf_to_dict(gmf, imt):
                site = shapes.Site(gmv["site_lon"], gmv["site_lat"])

                key = kvs.tokens.ground_motion_values_key(
                    self.job_ctxt.job_id, site)
                kvs_client.rpush(key, encoder.encode(gmv))
Ejemplo n.º 35
0
    def compute_hazard_curve(self, site_list, realization):
        """ Compute hazard curves, write them to KVS as JSON,
        and return a list of the KVS keys for each curve. """
        jsite_list = self.parameterize_sites(site_list)
        hazard_curves = java.jclass("HazardCalculator").getHazardCurvesAsJson(
            jsite_list,
            self.generate_erf(),
            self.generate_gmpe_map(),
            self.get_iml_list(),
            float(self.params['MAXIMUM_DISTANCE']))

        # write the curves to the KVS and return a list of the keys
        kvs_client = kvs.get_client()
        curve_keys = []
        for i in xrange(0, len(hazard_curves)):
            curve = hazard_curves[i]
            site = site_list[i]
            lon = site.longitude
            lat = site.latitude
            curve_key = kvs.tokens.hazard_curve_key(self.id,
                                                    realization,
                                                    lon,
                                                    lat)
            kvs_client.set(curve_key, curve)
            curve_keys.append(curve_key)
        return curve_keys
Ejemplo n.º 36
0
    def store_exposure_assets(self):
        """Load exposure assets and write them to KVS."""

        exposure_parser = exposure.ExposurePortfolioFile(
            os.path.join(self.base_path, self.params[job_config.EXPOSURE]))

        for site, asset in exposure_parser.filter(self.region):
# TODO(ac): This is kludgey (?)
            asset["lat"] = site.latitude
            asset["lon"] = site.longitude
            gridpoint = self.region.grid.point_at(site)

            asset_key = kvs.tokens.asset_key(
                self.job_id, gridpoint.row, gridpoint.column)

            kvs.get_client().rpush(asset_key, json.JSONEncoder().encode(asset))
Ejemplo n.º 37
0
    def _write_output_for_block(self, job_id, block_id):
        """ Given a job and a block, write out a plotted curve """
        decoder = json.JSONDecoder()
        loss_ratio_curves = []
        block = job.Block.from_kvs(block_id)
        for point in block.grid(self.region):
            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [decoder.decode(x) for x in asset_list]:
                site = shapes.Site(asset["lon"], asset["lat"])
                key = kvs.tokens.loss_ratio_key(job_id, point.row, point.column, asset["AssetID"])
                loss_ratio_curve = kvs.get(key)
                if loss_ratio_curve:
                    loss_ratio_curve = shapes.Curve.from_json(loss_ratio_curve)
                    loss_ratio_curves.append((site, (loss_ratio_curve, asset)))

        LOG.debug("Serializing loss_ratio_curves")
        filename = "%s-block-%s.xml" % (self["LOSS_CURVES_OUTPUT_PREFIX"], block_id)
        path = os.path.join(self.base_path, self["OUTPUT_DIR"], filename)
        output_generator = risk_output.LossRatioCurveXMLWriter(path)
        # TODO(JMC): Take mean or max for each site
        output_generator.serialize(loss_ratio_curves)

        filename = "%s-block-%s.svg" % (self["LOSS_CURVES_OUTPUT_PREFIX"], block_id)
        curve_path = os.path.join(self.base_path, self["OUTPUT_DIR"], filename)

        plotter = curve.RiskCurvePlotter(curve_path, path, mode="loss_ratio")
        plotter.plot(autoscale_y=False)

        results = [path]
        results.extend(list(plotter.filenames()))
        return results
Ejemplo n.º 38
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) exposure portfolio (=assets)
         3) vulnerability

        """

        block = job.Block.from_kvs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        for point in block.grid(self.region):
            curve_token = kvs.tokens.mean_hazard_curve_key(self.job_id,
                                point.site)

            decoded_curve = kvs.get_value_json_decoded(curve_token)

            hazard_curve = Curve([(exp(float(el['x'])), el['y'])
                            for el in decoded_curve['curve']])

            asset_key = kvs.tokens.asset_key(self.id,
                            point.row, point.column)
            assets = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in assets]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve)

                self.compute_loss_curve(point, loss_ratio_curve, asset)

        return True
Ejemplo n.º 39
0
def generate_erf(job_id):
    """
    Stubbed ERF generator

    Takes a job_id, returns a job_id.

    Connects to the Java HazardEngine using hazardwrapper, waits for an ERF to
    be generated, and then writes it to KVS.
    """

    # TODO(JM): implement real ERF computation

    erf_key = kvs.generate_product_key(job_id, kvs.tokens.ERF_KEY_TOKEN)
    kvs.get_client().set(erf_key, json.JSONEncoder().encode([job_id]))

    return job_id
Ejemplo n.º 40
0
    def setUp(self):
        self.generated_files = []
        self.kvs_client = kvs.get_client()

        # We will run a full test using amqp logging, as configured in
        # openquake.cfg
        helpers.declare_signalling_exchange()
def _assets_keys_for_gmfs(job_id, gmfs_key):
    """Return the asset related to the GMFs given."""

    column, row = kvs.tokens.column_row_from_gmf_set_key(gmfs_key)

    key = kvs.tokens.asset_key(job_id, row, column)

    return kvs.get_client().lrange(key, 0, -1)
Ejemplo n.º 42
0
    def setUp(self):
        self.params = {}
        self.job = helpers.create_job(self.params)
        self.job_id = self.job.job_id

        self.expected_mean_curve = numpy.array([
            9.8542200e-01, 9.8196600e-01, 9.5842000e-01, 9.2639600e-01,
            8.6713000e-01, 7.7081800e-01, 6.3448600e-01, 4.7256800e-01,
            3.3523400e-01, 3.1255000e-01, 1.7832000e-01, 9.0883400e-02,
            4.2189200e-02, 1.7874200e-02, 6.7449200e-03, 2.1658200e-03,
            5.3878600e-04, 9.4369400e-05, 8.9830380e-06
        ])

        self.empty_curve = []

        # deleting server side cached data
        kvs.get_client().flushall()
Ejemplo n.º 43
0
    def _store_gmvs(self, gmvs):
        client = kvs.get_client()
        encoder = json.JSONEncoder()

        key = ground_motion_values_key(self.job.id, self.site)

        for gmv in gmvs:
            client.rpush(key, encoder.encode({"mag": gmv}))
 def _keys_found(self, job_id, keys):
     """Return the keys found in kvs."""
     result = []
     conn = kvs.get_client()
     for key in keys:
         key %= job_id
         if conn.get(key) is not None:
             result.append(key)
     return result
Ejemplo n.º 45
0
 def _keys_found(self, job_id, keys):
     """Return the keys found in kvs."""
     result = []
     conn = kvs.get_client()
     for key in keys:
         key %= job_id
         if conn.get(key) is not None:
             result.append(key)
     return result
Ejemplo n.º 46
0
def _assets_keys_for_gmfs(job_id, gmfs_key):
    """Return the asset related to the GMFs given."""

    row = lambda key: key.split(kvs.KVS_KEY_SEPARATOR)[3]
    column = lambda key: key.split(kvs.KVS_KEY_SEPARATOR)[2]

    key = kvs.tokens.asset_key(job_id, row(gmfs_key), column(gmfs_key))

    return kvs.get_client().lrange(key, 0, -1)
Ejemplo n.º 47
0
    def _write_output_for_block(self, job_id, block_id):
        """
        Write loss / loss ratio curves to xml for a single block.
        """

        loss_curves = []
        loss_ratio_curves = []
        block = Block.from_kvs(job_id, block_id)

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            assets = BaseRiskCalculator.assets_at(self.job_ctxt.job_id, site)

            for asset in assets:
                loss_curve = kvs.get_client().get(
                    kvs.tokens.loss_curve_key(job_id, point.row, point.column,
                                              asset.asset_ref))

                loss_ratio_curve = kvs.get_client().get(
                    kvs.tokens.loss_ratio_key(job_id, point.row, point.column,
                                              asset.asset_ref))

                if loss_curve:
                    loss_curve = shapes.Curve.from_json(loss_curve)
                    loss_curves.append((site, (loss_curve, asset)))

                if loss_ratio_curve:
                    loss_ratio_curve = shapes.Curve.from_json(loss_ratio_curve)
                    loss_ratio_curves.append((site, (loss_ratio_curve, asset)))

        results = self._serialize(block_id,
                                  curves=loss_ratio_curves,
                                  curve_mode="loss_ratio")

        if loss_curves:
            results.extend(
                self._serialize(block_id,
                                curves=loss_curves,
                                curve_mode="loss",
                                curve_mode_prefix="loss_curve",
                                render_multi=True))

        return results
Ejemplo n.º 48
0
    def setUpClass(cls):
        cls.client = kvs.get_client()

        cls.client.delete(tokens.CURRENT_JOBS)
        cls.client.delete(tokens.NEXT_JOB_ID)

        # create 3 jobs
        # this will add job keys to CURRENT_JOBS
        for i in range(1, 4):
            tokens.alloc_job_key()
Ejemplo n.º 49
0
def simple_task_return_name_to_memcache(name, **kwargs):

    logger = simple_task_return_name_to_memcache.get_logger(**kwargs)

    memcache_client = kvs.get_client(binary=False)

    wait_time = _wait_a_bit()
    logger.info("processing %s, waited %s milliseconds" % (name, wait_time))

    memcache_client.set(name, name)
    logger.info("wrote to memcache key %s" % (name))
Ejemplo n.º 50
0
    def compute_loss_curve(self, point, loss_ratio_curve, asset):
        """
        Computes the loss ratio and store it in kvs to provide
        data to the @output decorator which does the serialization.

        :param point: the point of the grid we want to compute
        :type point: :py:class:`openquake.shapes.GridPoint`
        :param loss_ratio_curve: the loss ratio curve
        :type loss_ratio_curve: :py:class `openquake.shapes.Curve`
        :param asset: the asset for which to compute the loss curve
        :type asset: :py:class:`dict` as provided by
               :py:class:`openquake.parser.exposure.ExposureModelFile`
        """

        loss_curve = compute_loss_curve(loss_ratio_curve, asset.value)
        loss_key = kvs.tokens.loss_curve_key(self.job_ctxt.job_id, point.row,
                                             point.column, asset.asset_ref)

        kvs.get_client().set(loss_key, loss_curve.to_json())

        return loss_curve
Ejemplo n.º 51
0
    def setUp(self):
        self.params = dict(
            CALCULATION_MODE='Hazard',
            SOURCE_MODEL_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_SRC_MODEL_LT,
            GMPE_LOGIC_TREE_FILE_PATH=SIMPLE_FAULT_GMPE_LT,
            BASE_PATH=SIMPLE_FAULT_BASE_PATH)

        self.job_ctxt = helpers.create_job(self.params)
        self.calculator = classical.ClassicalHazardCalculator(self.job_ctxt)
        self.job_id = self.job_ctxt.job_id

        self.expected_curve = numpy.array([
            9.9178000e-01, 9.8892000e-01, 9.6903000e-01, 9.4030000e-01,
            8.8405000e-01, 7.8782000e-01, 6.4897250e-01, 4.8284250e-01,
            3.4531500e-01, 3.2337000e-01, 1.8880500e-01, 9.5574000e-02,
            4.3707250e-02, 1.9643000e-02, 8.1923000e-03, 2.9157000e-03,
            7.9955000e-04, 1.5233000e-04, 1.5582000e-05
        ])

        # deleting server side cached data
        kvs.get_client().flushall()
Ejemplo n.º 52
0
def compute_mgm_intensity(job_id, block_id, site_id):
    """Compute mean ground intensity for a specific site."""

    # We don't actually need the JobContext returned by this function
    # (yet) but this does check if the calculation is still in progress.
    utils_tasks.get_running_job(job_id)
    kvs_client = kvs.get_client()

    mgm_key = kvs.tokens.mgm_key(job_id, block_id, site_id)
    mgm = kvs_client.get(mgm_key)

    return json.JSONDecoder().decode(mgm)
Ejemplo n.º 53
0
def store_gmpe_map(job_id, seed, calc):
    """Generate a hash map of GMPEs (keyed by Tectonic Region Type) and store
    it in the KVS.

    :param int job_id: numeric ID of the job
    :param int seed: seed for random logic tree sampling
    :param calc: logic tree processor
    :type calc: :class:`openquake.input.logictree.LogicTreeProcessor` instance
    """
    LOG.info("Storing GMPE map from job config")
    key = kvs.tokens.gmpe_key(job_id)
    calc.sample_and_save_gmpe_logictree(kvs.get_client(), key, seed)
Ejemplo n.º 54
0
def mget_decoded(keys):
    """
    Retrieve multiple JSON values from the KVS

    :param keys: keys to retrieve (the corresponding value must be a
        JSON string)
    :type keys: list
    :returns: one value for each key in the list
    """
    decoder = json.JSONDecoder()

    return [decoder.decode(value) for value in kvs.get_client().mget(keys)]
Ejemplo n.º 55
0
    def setUp(self):
        # starting the jvm...
        print "About to start the jvm..."
        jpype = java.jvm()
        java_class = jpype.JClass("org.gem.engine.hazard.redis.Cache")
        print ("Not dead yet, and found the class...")
        self.java_client = java_class(settings.KVS_HOST, settings.KVS_PORT)

        self.python_client = kvs.get_client(binary=False)

        self.reader = reader.Reader(self.python_client)
        self._delete_test_file()
Ejemplo n.º 56
0
    def compute_loss_ratio_curve(self, col, row, asset, gmf_slice,
                                 loss_ratios):
        """Compute the loss ratio curve for a single asset.

        :param asset: the asset used to compute loss
        :type asset: an :py:class:`openquake.db.model.ExposureData` instance
        """
        job_ctxt = self.job_ctxt

        vuln_function = self.vuln_curves.get(asset.taxonomy, None)

        if not vuln_function:
            LOGGER.error("Unknown vulnerability function %s for asset %s" %
                         (asset.taxonomy, asset.asset_ref))
            return None

        epsilon_provider = general.EpsilonProvider(job_ctxt.params)

        loss_histogram_bins = job_ctxt.oq_job_profile.loss_histogram_bins
        loss_ratio_curve = general.compute_loss_ratio_curve(
            vuln_function,
            gmf_slice,
            epsilon_provider,
            asset,
            loss_histogram_bins,
            loss_ratios=loss_ratios)

        # NOTE (jmc): Early exit if the loss ratio is all zeros
        if not False in (loss_ratio_curve.ordinates == 0.0):
            return None

        key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id, row, col,
                                        asset.asset_ref)

        kvs.get_client().set(key, loss_ratio_curve.to_json())

        LOGGER.debug("Loss ratio curve is %s, write to key %s" %
                     (loss_ratio_curve, key))

        return loss_ratio_curve
Ejemplo n.º 57
0
    def setUp(self):
        # starting the jvm...
        print "About to start the jvm..."
        jpype = java.jvm()
        java_class = jpype.JClass("org.gem.engine.hazard.redis.Cache")
        print "Not dead yet, and found the class..."
        self.java_client = java_class(config.get("kvs", "host"),
                                      int(config.get("kvs", "port")))

        self.python_client = kvs.get_client()
        self.python_client.flushdb()

        self._delete_test_file()