def compute_uhs_task(job_id, realization, site): """Compute Uniform Hazard Spectra for a given site of interest and 1 or more Probability of Exceedance values. The bulk of the computation will be done by utilizing the `UHSCalculator` class in the Java code. UHS results will be written directly to the database. :param int job_id: ID of the job record in the DB/KVS. :param realization: Logic tree sample number (from 1 to N, where N is the NUMBER_OF_LOGIC_TREE_SAMPLES param defined in the job config. :param site: The site of interest (a :class:`openquake.shapes.Site` object). """ job_ctxt = utils_tasks.get_running_job(job_id) log_msg = ( "Computing UHS for job_id=%s, site=%s, realization=%s." " UHS results will be serialized to the database.") log_msg %= (job_ctxt.job_id, site, realization) LOG.info(log_msg) uhs_results = compute_uhs(job_ctxt, site) write_uhs_spectrum_data(job_ctxt, realization, site, uhs_results)
def compute_uhs_task(job_id, realization, site): """Compute Uniform Hazard Spectra for a given site of interest and 1 or more Probability of Exceedance values. The bulk of the computation will be done by utilizing the `UHSCalculator` class in the Java code. UHS results will be written directly to the database. :param int job_id: ID of the job record in the DB/KVS. :param realization: Logic tree sample number (from 1 to N, where N is the NUMBER_OF_LOGIC_TREE_SAMPLES param defined in the job config. :param site: The site of interest (a :class:`openquake.shapes.Site` object). """ calc_proxy = utils_tasks.get_running_calculation(job_id) log_msg = ( "Computing UHS for job_id=%s, site=%s, realization=%s." " UHS results will be serialized to the database.") log_msg %= (calc_proxy.job_id, site, realization) LOG.info(log_msg) uhs_results = compute_uhs(calc_proxy, site) write_uhs_spectrum_data(calc_proxy, realization, site, uhs_results)
def guarantee_file(path, url): """Based on flag, download test data file or raise error.""" if not os.path.isfile(path): if not FLAGS.download_test_data: raise Exception("Test data does not exist") LOG.info("Downloading test data for %s", path) retcode = subprocess.call(["curl", url, "-o", path]) if retcode: raise Exception("Test data could not be downloaded from %s" % (url))
def store_gmpe_map(job_id, seed, calc): """Generate a hash map of GMPEs (keyed by Tectonic Region Type) and store it in the KVS. :param int job_id: numeric ID of the job :param int seed: seed for random logic tree sampling :param calc: logic tree processor :type calc: :class:`openquake.input.logictree.LogicTreeProcessor` instance """ LOG.info("Storing GMPE map from job config") key = kvs.tokens.gmpe_key(job_id) calc.sample_and_save_gmpe_logictree(kvs.get_client(), key, seed)
def store_source_model(job_id, seed, params, calc): """Generate source model from the source model logic tree and store it in the KVS. :param int job_id: numeric ID of the job :param int seed: seed for random logic tree sampling :param dict params: the config parameters as (dict) :param calc: logic tree processor :type calc: :class:`openquake.input.logictree.LogicTreeProcessor` instance """ LOG.info("Storing source model from job config") key = kvs.tokens.source_model_key(job_id) mfd_bin_width = float(params.get("WIDTH_OF_MFD_BIN")) calc.sample_and_save_source_model_logictree(kvs.get_client(), key, seed, mfd_bin_width)
def store_source_model(job_id, seed, params, calc): """Generate source model from the source model logic tree and store it in the KVS. :param int job_id: numeric ID of the job :param int seed: seed for random logic tree sampling :param dict params: the config parameters as (dict) :param calc: logic tree processor :type calc: :class:`openquake.input.logictree.LogicTreeProcessor` instance """ LOG.info("Storing source model from job config") key = kvs.tokens.source_model_key(job_id) mfd_bin_width = float(params.get('WIDTH_OF_MFD_BIN')) calc.sample_and_save_source_model_logictree( kvs.get_client(), key, seed, mfd_bin_width)
def compute_uhs_task(job_id, realization, site, result_dir): """Compute Uniform Hazard Spectra for a given site of interest and 1 or more Probability of Exceedance values. The bulk of the computation will be done by utilizing the `UHSCalculator` class in the Java code. UHS results (for each poe) will be written as a 1D array into temporary HDF5 files. (The files will later be collected and 'reduced' into final result files.) :param int job_id: ID of the job record in the DB/KVS. :param realization: Logic tree sample number (from 1 to N, where N is the NUMBER_OF_LOGIC_TREE_SAMPLES param defined in the job config. :param site: The site of interest (a :class:`openquake.shapes.Site` object). :param result_dir: NFS result directory path. For each poe, a subfolder will be created to contain intermediate calculation results. (Each call to this task will generate 1 result file per poe.) :returns: A list of the resulting file names (1 per poe). """ utils_tasks.check_job_status(job_id) the_job = Job.from_kvs(job_id) log_msg = ( "Computing UHS for job_id=%s, site=%s, realization=%s." " UHS results will be serialized to `%s`.") log_msg %= (the_job.job_id, site, realization, result_dir) LOG.info(log_msg) uhs_results = compute_uhs(the_job, site) return write_uhs_results(result_dir, realization, site, uhs_results)
def convert(input_path, input_module, output_path, output_module): """Main conversion method. Currently tooled to run GEM1 parsers via jpype, which involves setting static properties on the classes for the input directories. The parsing itself is done in the class constructor, and output is derived from a writeSources method.""" LOG.info("Starting conversion run...") jarpath = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../lib") LOG.debug("Jarpath is %s", jarpath) max_mem = 4000 jpype.startJVM(jpype.getDefaultJVMPath(), "-Djava.ext.dirs=%s" % jarpath, "-Xmx%sM" % max_mem) input_module.init_paths(input_path, jpype) root_node = etree.Element(NRML + "SeismicSourceList", nsmap=NSMAP) # All the GEM1 parsers take a bounding box for the ctor (latmin, latmax, lonmin, lonmax) = input_module.BOUNDING_BOX # TODO(JMC): Make this support non-Java input parsers, too for model, _subdir in input_module.JAVA_MODELS: outfile = os.path.join(output_path, model+"-foo.xml") if os.path.exists(outfile): LOG.info("Output exists, skipping generation of %s", outfile) # continue java_class = jpype.JClass(model) input_parser = java_class(latmin, latmax, lonmin, lonmax) LOG.debug("Loaded a %s parser with %s sources", model, input_parser.getNumSources()) #print(dir(input_parser)) #print dir(input_parser.srcDataList[0]) for source in input_parser.srcDataList: source_node = serialize_source(source, root_node) LOG.debug("Writing output to %s", outfile) #file_writer_class = jpype.JClass("java.io.FileWriter") #input_parser.writeSources2KMLfile( # file_writer_class(outfile)) et = etree.ElementTree(root_node) et.write(outfile, pretty_print=True) LOG.info("Finished conversion run.")