def test_get_client_same_conn(self): """ get_client() returns redis client instances with the same connection pool. """ obj1 = kvs.get_client() obj2 = kvs.get_client() self.assertIs(obj1.connection_pool, obj2.connection_pool)
def get_pattern(regexp): """Get all the values whose keys satisfy the given regexp. Return an empty list if there are no keys satisfying the given regxep. """ values = [] keys = kvs.get_client().keys(regexp) if keys: values = kvs.get_client().mget(keys) return values
def store_gmpe_map(job_id, seed, calc): """Generate a hash map of GMPEs (keyed by Tectonic Region Type) and store it in the KVS. :param int job_id: numeric ID of the job :param int seed: seed for random logic tree sampling :param calc: logic tree processor :type calc: :class:`openquake.engine.input.logictree.LogicTreeProcessor` instance """ logs.LOG.info("Storing GMPE map from job config") key = kvs.tokens.gmpe_key(job_id) calc.sample_and_save_gmpe_logictree(kvs.get_client(), key, seed)
def store_source_model(job_id, seed, params, calc): """Generate source model from the source model logic tree and store it in the KVS. :param int job_id: numeric ID of the job :param int seed: seed for random logic tree sampling :param dict params: the config parameters as (dict) :param calc: logic tree processor :type calc: :class:`openquake.engine.input.logictree.LogicTreeProcessor` instance """ logs.LOG.info("Storing source model from job config") key = kvs.tokens.source_model_key(job_id) mfd_bin_width = float(params.get("WIDTH_OF_MFD_BIN")) calc.sample_and_save_source_model_logictree(kvs.get_client(), key, seed, mfd_bin_width)
def _slurp_files(self): """Read referenced files and write them into kvs, keyed on their sha1s.""" kvs_client = kvs.get_client() if self.base_path is None: logs.LOG.debug("Can't slurp files without a base path, homie...") return for key, val in self.params.items(): if key[-5:] == '_FILE': path = os.path.join(self.base_path, val) with open(path) as data_file: logs.LOG.debug("Slurping %s" % path) blob = data_file.read() file_key = kvs.tokens.generate_blob_key(self.job_id, blob) kvs_client.set(file_key, blob) self.params[key] = file_key self.params[key + "_PATH"] = path
def setUp(self): self.client = kvs.get_client() self.client.flushall() self.test_job = 1 kvs.mark_job_as_current(self.test_job) # create some keys to hold fake data for test_job self.gmf1_key = kvs.tokens.gmf_set_key(self.test_job, 0, 0) self.gmf2_key = kvs.tokens.gmf_set_key(self.test_job, 0, 1) self.vuln_key = kvs.tokens.vuln_key(self.test_job) # now create the fake data for test_job self.client.set(self.gmf1_key, 'fake gmf data 1') self.client.set(self.gmf2_key, 'fake gmf data 2') self.client.set(self.vuln_key, 'fake vuln curve data') # this job will have no data self.dataless_job = 2 kvs.mark_job_as_current(self.dataless_job)
def setUpClass(cls): cls.client = kvs.get_client() cls.client.delete(kvs.tokens.CURRENT_JOBS)
def setUp(self): self.generated_files = [] self.kvs_client = kvs.get_client()
def setUpClass(cls): cls.client = kvs.get_client()