Example #1
0
    def test_generate_product_key_with_all_test_data(self):
        key = kvs.generate_product_key(self.job_id, self.product, self.block_id,
            self.site)

        ev = "%s!%s!%s!%s" % (
                self.job_id, self.product, self.block_id, self.site)
        self.assertEqual(key, ev)
Example #2
0
    def test_generate_product_key_with_all_test_data(self):
        key = kvs.generate_product_key(self.job_id, self.product,
                                       self.block_id, self.site)

        ev = "%s!%s!%s!%s" % (self.job_id, self.product, self.block_id,
                              self.site)
        self.assertEqual(key, ev)
Example #3
0
 def generate_gmpe_map(self):
     """Generate the GMPE map from the stored GMPE logic tree."""
     key = kvs.generate_product_key(self.id, kvs.tokens.GMPE_TOKEN)
     gmpe_map = java.jclass(
         "JsonSerializer").getGmpeMapFromCache(self.cache, key)
     self.set_gmpe_params(gmpe_map)
     return gmpe_map
Example #4
0
    def test_generate_erf_returns_erf_via_kvs(self):
        results = []
        result_keys = []
        expected_values = {}

        print kvs.tokens.ERF_KEY_TOKEN

        for job_id in TASK_JOBID_SIMPLE:
            erf_key = kvs.generate_product_key(job_id,
                                               kvs.tokens.ERF_KEY_TOKEN)

            # Build the expected values
            expected_values[erf_key] = json.JSONEncoder().encode([job_id])

            # Get our result keys
            result_keys.append(erf_key)

            # Spawn our tasks.
            results.append(tasks.generate_erf.apply_async(args=[job_id]))

        helpers.wait_for_celery_tasks(results)

        result_values = self.kvs_client.get_multi(result_keys)

        self.assertEqual(result_values, expected_values)
Example #5
0
    def test_generate_erf_returns_erf_via_kvs(self):
        results = []
        result_keys = []
        expected_values = {}

        print kvs.tokens.ERF_KEY_TOKEN

        for job_id in TASK_JOBID_SIMPLE:
            erf_key = kvs.generate_product_key(job_id,
                                               kvs.tokens.ERF_KEY_TOKEN)

            # Build the expected values
            expected_values[erf_key] = json.JSONEncoder().encode([job_id])

            # Get our result keys
            result_keys.append(erf_key)

            # Spawn our tasks.
            results.append(tasks.generate_erf.apply_async(args=[job_id]))

        helpers.wait_for_celery_tasks(results)

        result_values = self.kvs_client.get_multi(result_keys)

        self.assertEqual(result_values, expected_values)
Example #6
0
 def generate_gmpe_map(self):
     """Generate the GMPE map from the stored GMPE logic tree."""
     key = kvs.generate_product_key(self.id, kvs.tokens.GMPE_TOKEN)
     gmpe_map = java.jclass("JsonSerializer").getGmpeMapFromCache(
         self.cache, key)
     self.set_gmpe_params(gmpe_map)
     return gmpe_map
Example #7
0
    def test_kvs_doesnt_support_spaces_in_keys(self):
        self.product = "A TestProduct"
        self.site = "Testville, TestLand"
        key = kvs.generate_product_key(self.job_id, self.product,
            site=self.site)

        ev = "%s!ATestProduct!!Testville,TestLand" % self.job_id
        self.assertEqual(key, ev)
Example #8
0
    def test_kvs_doesnt_support_spaces_in_keys(self):
        self.product = "A TestProduct"
        self.site = "Testville, TestLand"
        key = kvs.generate_product_key(
            self.job_id, self.product, site=self.site)

        ev = "%s!ATestProduct!!Testville,TestLand" % self.job_id
        self.assertEqual(key, ev)
Example #9
0
 def generate_erf(self):
     """Generate the Earthquake Rupture Forecast from the currently stored
     source model logic tree."""
     key = kvs.generate_product_key(self.id, kvs.tokens.SOURCE_MODEL_TOKEN)
     sources = java.jclass("JsonSerializer").getSourceListFromCache(self.cache, key)
     erf = java.jclass("GEM1ERF")(sources)
     self.calc.setGEM1ERFParams(erf)
     return erf
Example #10
0
 def generate_erf(self):
     """Generate the Earthquake Rupture Forecast from the currently stored
     source model logic tree."""
     key = kvs.generate_product_key(self.id, kvs.tokens.SOURCE_MODEL_TOKEN)
     sources = java.jclass("JsonSerializer").getSourceListFromCache(
         self.cache, key)
     erf = java.jclass("GEM1ERF")(sources)
     self.calc.setGEM1ERFParams(erf)
     return erf
Example #11
0
    def test_hazard_engine_jobber_runs(self):
        """Construction of LogicTreeProcessor in Java should not throw
        errors, and should have params loaded from KVS."""

        hazengine = job.Job.from_file(TEST_JOB_FILE)
        self.generated_files.append(hazengine.super_config_path)
        with mixins.Mixin(hazengine, openquake.hazard.job.HazJobMixin):
            hazengine.execute()

            source_model_key = kvs.generate_product_key(hazengine.id,
                                kvs.tokens.SOURCE_MODEL_TOKEN)
            self.kvs_client.get(source_model_key)
            # We have the random seed in the config, so this is guaranteed
            # TODO(JMC): Add this back in
            # self.assertEqual(source_model, TEST_SOURCE_MODEL)

            gmpe_key = kvs.generate_product_key(hazengine.id,
                                kvs.tokens.GMPE_TOKEN)
            self.kvs_client.get(gmpe_key)
Example #12
0
    def store_source_model(self, seed):
        """Generates an Earthquake Rupture Forecast, using the source zones and
        logic trees specified in the job config file. Note that this has to be
        done currently using the file itself, since it has nested references to
        other files."""

        LOG.info("Storing source model from job config")
        key = kvs.generate_product_key(self.id, kvs.tokens.SOURCE_MODEL_TOKEN)
        print "source model key is", key
        self.calc.sampleAndSaveERFTree(self.cache, key, seed)
Example #13
0
    def store_source_model(self, seed):
        """Generates an Earthquake Rupture Forecast, using the source zones and
        logic trees specified in the job config file. Note that this has to be
        done currently using the file itself, since it has nested references to
        other files."""

        LOG.info("Storing source model from job config")
        key = kvs.generate_product_key(self.id, kvs.tokens.SOURCE_MODEL_TOKEN)
        print "source model key is", key
        self.calc.sampleAndSaveERFTree(self.cache, key, seed)
Example #14
0
    def compute_loss_ratio_curve(self, gridpoint):
        """ Returns the loss ratio curve for a single gridpoint"""

        # check in kvs if hazard and exposure for gridpoint are there
        kvs_key_hazard = kvs.generate_product_key(self.job_id, 
            kvs.tokens.HAZARD_CURVE_KEY_TOKEN, self.block_id, gridpoint)
       
        hazard_curve_json = kvs.get_client(binary=False).get(kvs_key_hazard)
        LOGGER.debug("hazard curve as JSON: %s" % hazard_curve_json)
 
        hazard_curve = shapes.EMPTY_CURVE
        hazard_curve.from_json(hazard_curve_json)

        LOGGER.debug("hazard curve at key %s is %s" % (kvs_key_hazard, 
            hazard_curve))

        if hazard_curve is None:
            LOGGER.debug("no hazard curve found")
            return None

        kvs_key_exposure = kvs.generate_product_key(self.job_id, 
            kvs.tokens.EXPOSURE_KEY_TOKEN, self.block_id, gridpoint)
        
        asset = kvs.get_value_json_decoded(kvs_key_exposure)

        LOGGER.debug("asset at key %s is %s" % (kvs_key_exposure, asset))

        if asset is None:
            LOGGER.debug("no asset found")
            return None

        LOGGER.debug("compute method: vuln curves are")
        for k, v in self.vulnerability_curves.items(): #pylint: disable=E1101
            LOGGER.debug("%s: %s" % (k, v.values))

        #pylint: disable=E1101
        vulnerability_curve = \
            self.vulnerability_curves[asset['VulnerabilityFunction']]

        # selected vuln function is Curve
        return classical_psha_based.compute_loss_ratio_curve(
            vulnerability_curve, hazard_curve)
Example #15
0
 def store_gmpe_map(self, seed):
     """Generates a hash of tectonic regions and GMPEs, using the logic tree
     specified in the job config file."""
     key = kvs.generate_product_key(self.id, kvs.tokens.GMPE_TOKEN)
     print "GMPE map key is", key
     jpype = java.jvm()
     try:
         self.calc.sampleAndSaveGMPETree(self.cache, key, seed)
     except jpype.JException(jpype.java.lang.RuntimeException), ex:
         unwrap_validation_error(
             jpype, ex, self.params.get("GMPE_LOGIC_TREE_FILE_PATH"))
Example #16
0
    def test_hazard_engine_jobber_runs(self):
        """Construction of LogicTreeProcessor in Java should not throw
        errors, and should have params loaded from KVS."""

        hazengine = job.Job.from_file(TEST_JOB_FILE)
        self.generated_files.append(hazengine.super_config_path)
        with mixins.Mixin(hazengine, openquake.hazard.job.HazJobMixin,
            key="hazard"):
            hazengine.execute()

            source_model_key = kvs.generate_product_key(hazengine.id,
                                kvs.tokens.SOURCE_MODEL_TOKEN)
            self.kvs_client.get(source_model_key)
            # We have the random seed in the config, so this is guaranteed
            # TODO(JMC): Add this back in
            # self.assertEqual(source_model, TEST_SOURCE_MODEL)

            gmpe_key = kvs.generate_product_key(hazengine.id,
                                kvs.tokens.GMPE_TOKEN)
            self.kvs_client.get(gmpe_key)
Example #17
0
    def _prepopulate_sites_for_block(self, job_id, block_id):
        sites = ["Testville,TestLand", "Provaville,TestdiTerra",
                 "Teststadt,Landtesten", "villed'essai,paystest"]
        sites_key = kvs.generate_sites_key(job_id, block_id)

        self.kvs_client.set(sites_key, json.JSONEncoder().encode(sites))

        for site in sites:
            site_key = kvs.generate_product_key(job_id,
                kvs.tokens.HAZARD_CURVE_KEY_TOKEN, block_id, site)

            self.kvs_client.set(site_key, ONE_CURVE_MODEL)
Example #18
0
    def _prepopulate_sites_for_block(self, job_id, block_id):
        sites = ["Testville,TestLand", "Provaville,TestdiTerra",
                 "Teststadt,Landtesten", "villed'essai,paystest"]
        sites_key = kvs.generate_sites_key(job_id, block_id)

        self.kvs_client.set(sites_key, json.JSONEncoder().encode(sites))

        for site in sites:
            site_key = kvs.generate_product_key(job_id,
                kvs.tokens.HAZARD_CURVE_KEY_TOKEN, block_id, site)

            self.kvs_client.set(site_key, ONE_CURVE_MODEL)
Example #19
0
    def execute(self):
        """Main hazard processing block.
        
        Loops through various random realizations, spawning tasks to compute
        GMFs."""
        results = []
        
        source_model_generator = random.Random()
        source_model_generator.seed(
                self.params.get('SOURCE_MODEL_LT_RANDOM_SEED', None))
        
        gmpe_generator = random.Random()
        gmpe_generator.seed(self.params.get('GMPE_LT_RANDOM_SEED', None))
        
        gmf_generator = random.Random()
        gmf_generator.seed(self.params.get('GMF_RANDOM_SEED', None))
        
        histories = int(self.params['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        LOG.info("Going to run hazard for %s histories of %s realizations each."
                % (histories, realizations))

        for i in range(0, histories):
            pending_tasks = []
            for j in range(0, realizations):
                self.store_source_model(source_model_generator.getrandbits(32))
                self.store_gmpe_map(gmpe_generator.getrandbits(32))
                for site_list in self.site_list_generator():
                    stochastic_set_id = "%s!%s" % (i, j)
                    # pylint: disable=E1101
                    pending_tasks.append(
                        tasks.compute_ground_motion_fields.delay(
                            self.id,
                            site_list,
                            stochastic_set_id, gmf_generator.getrandbits(32)))
        
            for task in pending_tasks:
                task.wait()
                if task.status != 'SUCCESS': 
                    raise Exception(task.result)
                    
            # if self.params['OUTPUT_GMF_FILES']
            for j in range(0, realizations):
                stochastic_set_id = "%s!%s" % (i, j)
                stochastic_set_key = kvs.generate_product_key(
                    self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, 
                    stochastic_set_id)
                print "Writing output for ses %s" % stochastic_set_key
                ses = kvs.get_value_json_decoded(stochastic_set_key)
                if ses:
                    results.extend(self.write_gmf_files(ses))
        return results
Example #20
0
    def slice_gmfs(self, block_id):
        """Load and collate GMF values for all sites in this block. """
        # TODO(JMC): Confirm this works regardless of the method of haz calc.
        histories = int(self['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        num_ses = histories * realizations
        
        block = job.Block.from_kvs(block_id)
        sites_list = block.sites
        gmfs = {}
        for site in sites_list:
            risk_point = self.region.grid.point_at(site)
            key = "%s!%s" % (risk_point.row, risk_point.column)
            gmfs[key] = []
            
        for i in range(0, histories):
            for j in range(0, realizations):
                key = kvs.generate_product_key(
                        self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, "%s!%s" % 
                            (i, j))
                fieldset = shapes.FieldSet.from_json(kvs.get(key), 
                    self.region.grid)

                for field in fieldset:
                    for key in gmfs.keys():
                        (row, col) = key.split("!")
                        gmfs[key].append(field.get(int(row), int(col)))
                                        
        for key, gmf_slice in gmfs.items():
            (row, col) = key.split("!")
            key_gmf = kvs.generate_product_key(self.id,
                kvs.tokens.GMF_KEY_TOKEN, col, row)
            LOGGER.debug( "GMF_SLICE for %s X %s : \n\t%s" % (
                    col, row, gmf_slice ))
            timespan = float(self['INVESTIGATION_TIME'])
            gmf = {"IMLs": gmf_slice, "TSES": num_ses * timespan, 
                    "TimeSpan": timespan}
            kvs.set_value_json_encoded(key_gmf, gmf)
Example #21
0
    def compute_ground_motion_fields(self, site_list, stochastic_set_id, seed):
        """Ground motion field calculation, runs on the workers."""
        jpype = java.jvm()

        jsite_list = self.parameterize_sites(site_list)
        key = kvs.generate_product_key(self.id,
                                       kvs.tokens.STOCHASTIC_SET_TOKEN,
                                       stochastic_set_id)
        gmc = self.params['GROUND_MOTION_CORRELATION']
        correlate = (gmc == "true" and True or False)
        java.jclass("HazardCalculator").generateAndSaveGMFs(
            self.cache, key, stochastic_set_id, jsite_list,
            self.generate_erf(), self.generate_gmpe_map(),
            java.jclass("Random")(seed), jpype.JBoolean(correlate))
Example #22
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) gmfs
         3) exposure portfolio (=assets)
         4) vulnerability

        TODO(fab): make conditional_loss_poe (set of probabilities of
        exceedance for which the loss computation is done)
        a list of floats, and read it from the job configuration.
        """

        conditional_loss_poes = [
            float(x)
            for x in self.params.get('CONDITIONAL_LOSS_POE', "0.01").split()
        ]
        self.slice_gmfs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        # TODO(jmc): DONT assumes that hazard and risk grid are the same
        block = job.Block.from_kvs(block_id)

        for point in block.grid(self.region):
            key = kvs.generate_product_key(self.job_id,
                                           kvs.tokens.GMF_KEY_TOKEN,
                                           point.column, point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in asset_list]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf_slice)
                if loss_ratio_curve is not None:

                    # compute loss curve
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in conditional_loss_poes:
                        self.compute_conditional_loss(point.column, point.row,
                                                      loss_curve, asset,
                                                      loss_poe)
        return True
Example #23
0
    def execute(self):
        """Main hazard processing block.

        Loops through various random realizations, spawning tasks to compute
        GMFs."""
        results = []

        source_model_generator = random.Random()
        source_model_generator.seed(
            self.params.get('SOURCE_MODEL_LT_RANDOM_SEED', None))

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.params.get('GMPE_LT_RANDOM_SEED', None))

        gmf_generator = random.Random()
        gmf_generator.seed(self.params.get('GMF_RANDOM_SEED', None))

        histories = int(self.params['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        LOG.info(
            "Going to run hazard for %s histories of %s realizations each." %
            (histories, realizations))

        for i in range(0, histories):
            pending_tasks = []
            for j in range(0, realizations):
                self.store_source_model(source_model_generator.getrandbits(32))
                self.store_gmpe_map(gmpe_generator.getrandbits(32))
                stochastic_set_id = "%s!%s" % (i, j)
                pending_tasks.append(
                    tasks.compute_ground_motion_fields.delay(
                        self.id, self.sites_for_region(), stochastic_set_id,
                        gmf_generator.getrandbits(32)))

            for task in pending_tasks:
                task.wait()
                if task.status != 'SUCCESS':
                    raise Exception(task.result)

            for j in range(0, realizations):
                stochastic_set_id = "%s!%s" % (i, j)
                stochastic_set_key = kvs.generate_product_key(
                    self.id, kvs.tokens.STOCHASTIC_SET_TOKEN,
                    stochastic_set_id)
                print "Writing output for ses %s" % stochastic_set_key
                ses = kvs.get_value_json_decoded(stochastic_set_key)
                if ses:
                    results.extend(self.write_gmf_files(ses))
        return results
Example #24
0
    def compute_ground_motion_fields(self, site_list, stochastic_set_id, seed):
        """Ground motion field calculation, runs on the workers."""
        jpype = java.jvm()

        jsite_list = self.parameterize_sites(site_list)
        key = kvs.generate_product_key(
            self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, stochastic_set_id)
        gmc = self.params['GROUND_MOTION_CORRELATION']
        correlate = (gmc == "true" and True or False)
        java.jclass("HazardCalculator").generateAndSaveGMFs(
                self.cache, key, stochastic_set_id, jsite_list,
                 self.generate_erf(),
                self.generate_gmpe_map(),
                java.jclass("Random")(seed),
                jpype.JBoolean(correlate))
Example #25
0
    def compute_loss_curve(self, gridpoint, loss_ratio_curve):
        """Return the loss curve based on loss ratio and exposure."""
        
        if loss_ratio_curve is None:
            return None

        kvs_key_exposure = kvs.generate_product_key(self.job_id,
            kvs.tokens.EXPOSURE_KEY_TOKEN, self.block_id, gridpoint)

        asset = kvs.get_value_json_decoded(kvs_key_exposure)

        if asset is None:
            return None

        return classical_psha_based.compute_loss_curve(
            loss_ratio_curve, asset['AssetValue'])
Example #26
0
def generate_erf(job_id):
    """
    Stubbed ERF generator

    Takes a job_id, returns a job_id.

    Connects to the Java HazardEngine using hazardwrapper, waits for an ERF to
    be generated, and then writes it to KVS.
    """

    # TODO(JM): implement real ERF computation

    erf_key = kvs.generate_product_key(job_id, kvs.tokens.ERF_KEY_TOKEN)
    kvs.get_client().set(erf_key, json.JSONEncoder().encode([job_id]))

    return job_id
Example #27
0
def generate_erf(job_id):
    """
    Stubbed ERF generator

    Takes a job_id, returns a job_id.

    Connects to the Java HazardEngine using hazardwrapper, waits for an ERF to
    be generated, and then writes it to KVS.
    """

    # TODO(JM): implement real ERF computation

    erf_key = kvs.generate_product_key(job_id, kvs.tokens.ERF_KEY_TOKEN)
    kvs.get_client().set(erf_key, json.JSONEncoder().encode([job_id]))

    return job_id
Example #28
0
    def store_source_model(self, seed):
        """Generates an Earthquake Rupture Forecast, using the source zones and
        logic trees specified in the job config file. Note that this has to be
        done currently using the file itself, since it has nested references to
        other files."""

        LOG.info("Storing source model from job config")
        key = kvs.generate_product_key(self.id, kvs.tokens.SOURCE_MODEL_TOKEN)
        print "source model key is", key
        jpype = java.jvm()
        try:
            self.calc.sampleAndSaveERFTree(self.cache, key, seed)
        except jpype.JException(jpype.java.lang.RuntimeException), ex:
            unwrap_validation_error(
                jpype, ex,
                self.params.get("SOURCE_MODEL_LOGIC_TREE_FILE_PATH"))
Example #29
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) gmfs
         3) exposure portfolio (=assets)
         4) vulnerability

        TODO(fab): make conditional_loss_poe (set of probabilities of
        exceedance for which the loss computation is done)
        a list of floats, and read it from the job configuration.
        """

        conditional_loss_poes = [float(x) for x in self.params.get(
                    'CONDITIONAL_LOSS_POE', "0.01").split()]
        self.slice_gmfs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        # TODO(jmc): DONT assumes that hazard and risk grid are the same
        block = job.Block.from_kvs(block_id)

        for point in block.grid(self.region):
            key = kvs.generate_product_key(self.job_id,
                kvs.tokens.GMF_KEY_TOKEN, point.column, point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in asset_list]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                        point.column, point.row, asset, gmf_slice)
                if loss_ratio_curve is not None:

                    # compute loss curve
                    loss_curve = self.compute_loss_curve(
                            point.column, point.row,
                            loss_ratio_curve, asset)

                    for loss_poe in conditional_loss_poes:
                        self.compute_conditional_loss(point.column, point.row,
                                loss_curve, asset, loss_poe)
        return True
Example #30
0
    def test_compute_mgm_intensity(self):
        results = []
        block_id = 8801
        site = "Testville,TestLand"

        mgm_intensity = json.JSONDecoder().decode(MEAN_GROUND_INTENSITY)

        for job_id in TASK_JOBID_SIMPLE:
            mgm_key = kvs.generate_product_key(job_id,
                kvs.tokens.MGM_KEY_TOKEN, block_id, site)
            self.kvs_client.set(mgm_key, MEAN_GROUND_INTENSITY)

            results.append(tasks.compute_mgm_intensity.apply_async(
                args=[job_id, block_id, site]))

        helpers.wait_for_celery_tasks(results)

        for result in results:
            self.assertEqual(mgm_intensity, result.get())
Example #31
0
    def test_compute_mgm_intensity(self):
        results = []
        block_id = 8801
        site = "Testville,TestLand"

        mgm_intensity = json.JSONDecoder().decode(MEAN_GROUND_INTENSITY)

        for job_id in TASK_JOBID_SIMPLE:
            mgm_key = kvs.generate_product_key(job_id,
                kvs.tokens.MGM_KEY_TOKEN, block_id, site)
            self.kvs_client.set(mgm_key, MEAN_GROUND_INTENSITY)

            results.append(tasks.compute_mgm_intensity.apply_async(
                args=[job_id, block_id, site]))

        helpers.wait_for_celery_tasks(results)

        for result in results:
            self.assertEqual(mgm_intensity, result.get())
Example #32
0
    def test_generate_product_key_with_tokens_from_kvs(self):
        products = [
            kvs.tokens.ERF_KEY_TOKEN,
            kvs.tokens.MGM_KEY_TOKEN,
            kvs.tokens.HAZARD_CURVE_KEY_TOKEN,
            kvs.tokens.EXPOSURE_KEY_TOKEN,
            kvs.tokens.GMF_KEY_TOKEN,
            kvs.tokens.LOSS_RATIO_CURVE_KEY_TOKEN,
            kvs.tokens.LOSS_CURVE_KEY_TOKEN,
            kvs.tokens.loss_token(0.01),
            kvs.tokens.VULNERABILITY_CURVE_KEY_TOKEN,
        ]

        for product in products:
            key = kvs.generate_product_key(self.job_id, product, self.block_id,
                                           self.site)

            ev = "%s!%s!%s!%s" % (self.job_id, product, self.block_id,
                                  self.site)
            self.assertEqual(key, ev)
Example #33
0
    def test_generate_product_key_with_tokens_from_kvs(self):
        products = [
            kvs.tokens.ERF_KEY_TOKEN,
            kvs.tokens.MGM_KEY_TOKEN,
            kvs.tokens.HAZARD_CURVE_KEY_TOKEN,
            kvs.tokens.EXPOSURE_KEY_TOKEN,
            kvs.tokens.GMF_KEY_TOKEN,
            kvs.tokens.LOSS_RATIO_CURVE_KEY_TOKEN,
            kvs.tokens.LOSS_CURVE_KEY_TOKEN,
            kvs.tokens.loss_token(0.01),
            kvs.tokens.VULNERABILITY_CURVE_KEY_TOKEN,
        ]

        for product in products:
            key = kvs.generate_product_key(self.job_id, product,
                self.block_id, self.site)

            ev = "%s!%s!%s!%s" % (self.job_id, product,
                    self.block_id, self.site)
            self.assertEqual(key, ev)
Example #34
0
def compute_mgm_intensity(job_id, block_id, site_id):
    """
    Compute mean ground intensity for a specific site.
    """

    kvs_client = kvs.get_client(binary=False)

    mgm_key = kvs.generate_product_key(job_id, kvs.tokens.MGM_KEY_TOKEN,
        block_id, site_id)
    mgm = kvs_client.get(mgm_key)

    if not mgm:
        # TODO(jm): implement hazardwrapper and make this work.
        # TODO(chris): uncomment below when hazardwapper is done

        # Synchronous execution.
        #result = hazardwrapper.apply(args=[job_id, block_id, site_id])
        #mgm = kvs_client.get(mgm_key)
        pass

    return json.JSONDecoder().decode(mgm)
Example #35
0
def compute_mgm_intensity(job_id, block_id, site_id):
    """
    Compute mean ground intensity for a specific site.
    """

    kvs_client = kvs.get_client(binary=False)

    mgm_key = kvs.generate_product_key(job_id, kvs.tokens.MGM_KEY_TOKEN,
                                       block_id, site_id)
    mgm = kvs_client.get(mgm_key)

    if not mgm:
        # TODO(jm): implement hazardwrapper and make this work.
        # TODO(chris): uncomment below when hazardwapper is done

        # Synchronous execution.
        #result = hazardwrapper.apply(args=[job_id, block_id, site_id])
        #mgm = kvs_client.get(mgm_key)
        pass

    return json.JSONDecoder().decode(mgm)
Example #36
0
    def slice_gmfs(self, block_id):
        """Load and collate GMF values for all sites in this block. """
        # TODO(JMC): Confirm this works regardless of the method of haz calc.
        histories = int(self['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        num_ses = histories * realizations

        block = job.Block.from_kvs(block_id)
        sites_list = block.sites
        gmfs = {}
        for site in sites_list:
            risk_point = self.region.grid.point_at(site)
            key = "%s!%s" % (risk_point.row, risk_point.column)
            gmfs[key] = []

        for i in range(0, histories):
            for j in range(0, realizations):
                key = kvs.generate_product_key(self.id,
                                               kvs.tokens.STOCHASTIC_SET_TOKEN,
                                               "%s!%s" % (i, j))
                fieldset = shapes.FieldSet.from_json(kvs.get(key),
                                                     self.region.grid)

                for field in fieldset:
                    for key in gmfs.keys():
                        (row, col) = key.split("!")
                        gmfs[key].append(field.get(int(row), int(col)))

        for key, gmf_slice in gmfs.items():
            (row, col) = key.split("!")
            key_gmf = kvs.tokens.gmfs_key(self.id, col, row)
            LOGGER.debug("GMF_SLICE for %s X %s : \n\t%s" %
                         (col, row, gmf_slice))
            timespan = float(self['INVESTIGATION_TIME'])
            gmf = {
                "IMLs": gmf_slice,
                "TSES": num_ses * timespan,
                "TimeSpan": timespan
            }
            kvs.set_value_json_encoded(key_gmf, gmf)
Example #37
0
    def test_generate_product_key_with_job_id_product_and_site(self):
        key = kvs.generate_product_key(
            self.job_id, self.product, site=self.site)

        ev = "%s!%s!!%s" % (self.job_id, self.product, self.site)
        self.assertEqual(key, ev)
Example #38
0
    def test_generate_product_key_with_job_id_product_and_block_id(self):
        key = kvs.generate_product_key(self.job_id, self.product,
                                       self.block_id)

        ev = "%s!%s!%s!" % (self.job_id, self.product, self.block_id)
        self.assertEqual(key, ev)
Example #39
0
    def test_generate_product_key_with_only_job_id_and_product(self):
        key = kvs.generate_product_key(self.job_id, self.product)

        ev = "%s!%s!!" % (self.job_id, self.product)
        self.assertEqual(key, ev)
Example #40
0
 def store_gmpe_map(self, seed):
     """Generates a hash of tectonic regions and GMPEs, using the logic tree
     specified in the job config file."""
     key = kvs.generate_product_key(self.id, kvs.tokens.GMPE_TOKEN)
     print "GMPE map key is", key
     self.calc.sampleAndSaveGMPETree(self.cache, key, seed)
Example #41
0
    def test_generate_product_key_with_only_job_id_and_product(self):
        key = kvs.generate_product_key(self.job_id, self.product)

        ev = "%s!%s!!" % (self.job_id, self.product)
        self.assertEqual(key, ev)
Example #42
0
    def test_generate_product_key_with_job_id_product_and_block_id(self):
        key = kvs.generate_product_key(self.job_id, self.product, self.block_id)

        ev =  "%s!%s!%s!" % (self.job_id, self.product, self.block_id)
        self.assertEqual(key, ev)
Example #43
0
    def test_generate_product_key_with_job_id_product_and_site(self):
        key = kvs.generate_product_key(self.job_id, self.product,
            site=self.site)

        ev =  "%s!%s!!%s" % (self.job_id, self.product, self.site)
        self.assertEqual(key, ev)
Example #44
0
 def store_gmpe_map(self, seed):
     """Generates a hash of tectonic regions and GMPEs, using the logic tree
     specified in the job config file."""
     key = kvs.generate_product_key(self.id, kvs.tokens.GMPE_TOKEN)
     print "GMPE map key is", key
     self.calc.sampleAndSaveGMPETree(self.cache, key, seed)