def setUp(self): self.generated_files = [] self.job = Job.from_file(test.do_test_file(CONFIG_FILE)) self.job_with_includes = Job.from_file(test.do_test_file(CONFIG_WITH_INCLUDES)) self.generated_files.append(self.job.super_config_path) self.generated_files.append(self.job_with_includes.super_config_path)
def setUp(self): self.generated_files = [] self.job = Job.from_file(test.do_test_file(CONFIG_FILE)) self.job_with_includes = Job.from_file( test.do_test_file(CONFIG_WITH_INCLUDES)) self.generated_files.append(self.job.super_config_path) self.generated_files.append(self.job_with_includes.super_config_path)
def setUp(self): self.generated_files = [] self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE)) self.job_with_includes = \ Job.from_file(helpers.get_data_path(CONFIG_WITH_INCLUDES)) self.generated_files.append(self.job.super_config_path) self.generated_files.append(self.job_with_includes.super_config_path)
def test_is_job_completed(self): job_id = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db').job_id row = OqJob.objects.get(id=job_id) pairs = [('pending', False), ('running', False), ('succeeded', True), ('failed', True)] for status, is_completed in pairs: row.status = status row.save() self.assertEqual(Job.is_job_completed(job_id), is_completed)
def test_is_job_completed(self): job_id = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db').job_id session = get_db_session("reslt", "writer") pairs = [('pending', False), ('running', False), ('succeeded', True), ('failed', True)] for status, is_completed in pairs: session.query(OqJob).update({'status': status}) session.commit() self.assertEqual(Job.is_job_completed(job_id), is_completed)
def test_get_status_from_db(self): self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db') session = get_db_session("reslt", "writer") session.query(OqJob).update({'status': 'failed'}) session.commit() self.assertEqual(Job.get_status_from_db(self.job.job_id), 'failed') session.query(OqJob).update({'status': 'running'}) session.commit() self.assertEqual(Job.get_status_from_db(self.job.job_id), 'running')
def test_get_status_from_db(self): self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db') row = OqJob.objects.get(id=self.job.job_id) row.status = "failed" row.save() self.assertEqual("failed", Job.get_status_from_db(self.job.job_id)) row.status = "running" row.save() self.assertEqual("running", Job.get_status_from_db(self.job.job_id))
def test_prepares_blocks_using_the_exposure(self): a_job = Job({EXPOSURE: os.path.join(helpers.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE)}) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block((shapes.Site(9.15000, 45.16667), shapes.Site(9.15333, 45.12200), shapes.Site(9.14777, 45.17999))) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def check_job_status(job_id): """ Helper function which is intended to be run by celery task functions. :raises JobCompletedError: If :meth:`~openquake.job.Job.is_job_completed` returns ``True`` for ``job_id``. """ job = Job.from_kvs(job_id) level = job.params.get('debug') if job and job.params else 'warn' logs.init_logs_amqp_send(level=level, job_id=job_id) if Job.is_job_completed(job_id): raise JobCompletedError(job_id)
def test_with_no_partition_we_just_process_a_single_block(self): job.SITES_PER_BLOCK = 1 # test exposure has 6 assets a_job = Job({EXPOSURE: os.path.join(test.DATA_DIR, EXPOSURE_TEST_FILE)}) self.generated_files.append(a_job.super_config_path) a_job._partition() blocks_keys = a_job.blocks_keys # but we have 1 block instead of 6 self.assertEqual(1, len(blocks_keys))
def test_prepares_blocks_using_the_exposure(self): a_job = Job({ EXPOSURE: os.path.join(test.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE) }) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block( (shapes.Site(9.15000, 45.16667), shapes.Site(9.15333, 45.12200), shapes.Site(9.14777, 45.17999))) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def test_prepares_blocks_using_the_exposure_and_filtering(self): a_job = Job({EXPOSURE: os.path.join(helpers.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE), INPUT_REGION: helpers.get_data_path(REGION_EXPOSURE_TEST_FILE)}) self.generated_files.append(a_job.super_config_path) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block((shapes.Site(9.15, 45.16667), shapes.Site(9.15333, 45.122), shapes.Site(9.14777, 45.17999))) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def test_prepares_blocks_using_the_input_region(self): """ This test might be currently catastrophically retarded. If it is blame Lars. """ block_path = test.do_test_file(BLOCK_SPLIT_TEST_FILE) print "In open job" a_job = Job.from_file(block_path) self.generated_files.append(a_job.super_config_path) verts = [float(x) for x in a_job.params['REGION_VERTEX'].split(",")] # Flips lon and lat, and builds a list of coord tuples coords = zip(verts[1::2], verts[::2]) expected = shapes.RegionConstraint.from_coordinates(coords) expected.cell_size = float(a_job.params['REGION_GRID_SPACING']) expected_sites = [] for site in expected: print site expected_sites.append(site) a_job._partition() blocks_keys = a_job.blocks_keys print blocks_keys self.assertEqual(1, len(blocks_keys)) self.assertEqual(job.Block(expected_sites), job.Block.from_kvs(blocks_keys[0]))
def test_prepares_blocks_using_the_input_region(self): """ This test might be currently catastrophically retarded. If it is blame Lars. """ block_path = helpers.get_data_path(BLOCK_SPLIT_TEST_FILE) print "In open job" a_job = Job.from_file(block_path) self.generated_files.append(a_job.super_config_path) verts = [float(x) for x in a_job.params['REGION_VERTEX'].split(",")] # Flips lon and lat, and builds a list of coord tuples coords = zip(verts[1::2], verts[::2]) expected = shapes.RegionConstraint.from_coordinates(coords) expected.cell_size = float(a_job.params['REGION_GRID_SPACING']) expected_sites = [] for site in expected: print site expected_sites.append(site) a_job._partition() blocks_keys = a_job.blocks_keys print blocks_keys self.assertEqual(1, len(blocks_keys)) self.assertEqual(job.Block(expected_sites), job.Block.from_kvs(blocks_keys[0]))
def test_with_no_partition_we_just_process_a_single_block(self): job.SITES_PER_BLOCK = 1 # test exposure has 6 assets a_job = Job({ EXPOSURE: os.path.join(test.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE) }) self.generated_files.append(a_job.super_config_path) a_job._partition() blocks_keys = a_job.blocks_keys # but we have 1 block instead of 6 self.assertEqual(1, len(blocks_keys))
def test_prepares_blocks_using_the_exposure_and_filtering(self): a_job = Job({ EXPOSURE: os.path.join(test.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE), INPUT_REGION: test.do_test_file(REGION_EXPOSURE_TEST_FILE) }) self.generated_files.append(a_job.super_config_path) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block( (shapes.Site(9.15, 45.16667), shapes.Site(9.15333, 45.122), shapes.Site(9.14777, 45.17999))) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def test_prepares_blocks_using_the_exposure(self): a_job = Job({EXPOSURE: os.path.join(test.DATA_DIR, EXPOSURE_TEST_FILE)}) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block( ( shapes.Site(9.15000, 45.16667), shapes.Site(9.15333, 45.12200), shapes.Site(9.14777, 45.17999), shapes.Site(9.15765, 45.13005), shapes.Site(9.15934, 45.13300), shapes.Site(9.15876, 45.13805), ) ) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def check_job_status(job_id): """ Helper function which is intended to be run by celery task functions. :raises JobCompletedError: If :meth:`~openquake.job.Job.is_job_completed` returns ``True`` for ``job_id``. """ if Job.is_job_completed(job_id): raise JobCompletedError(job_id)
def test_logs_a_warning_if_none_of_the_default_configs_exist(self): class call_logger(object): def __init__(self, method): self.called = False self.method = method def __call__(self, *args, **kwargs): try: return self.method(*args, **kwargs) finally: self.called = True good_defaults = Job._Job__defaults Job._Job__defaults = ["/tmp/sbfalds"] LOG.warning = call_logger(LOG.warning) self.assertFalse(LOG.warning.called) Job.default_configs() self.assertTrue(LOG.warning.called) good_defaults = Job._Job__defaults Job.__defaults = good_defaults
def test_set_status(self): self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db') session = get_db_session("reslt", "writer") status = 'running' self.job.set_status(status) job = session.query(OqJob).filter(OqJob.id == self.job.job_id).one() self.assertEqual(status, job.status)
def test_prepares_blocks_using_the_exposure_and_filtering(self): a_job = Job( {EXPOSURE: test.test_file(EXPOSURE_TEST_FILE), INPUT_REGION: test.test_file(REGION_EXPOSURE_TEST_FILE)} ) self.generated_files.append(a_job.super_config_path) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block( ( shapes.Site(9.15, 45.16667), shapes.Site(9.15333, 45.122), shapes.Site(9.14777, 45.17999), shapes.Site(9.15765, 45.13005), shapes.Site(9.15934, 45.133), shapes.Site(9.15876, 45.13805), ) ) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def job_from_file(config_file_path): """ Create a Job instance from the given configuration file. The results are configured to go to XML files. *No* database record will be stored for the job. This allows running test on jobs without requiring a database. """ job = Job.from_file(config_file_path, 'xml') cleanup_loggers() return job
def compute_uhs_task(job_id, realization, site, result_dir): """Compute Uniform Hazard Spectra for a given site of interest and 1 or more Probability of Exceedance values. The bulk of the computation will be done by utilizing the `UHSCalculator` class in the Java code. UHS results (for each poe) will be written as a 1D array into temporary HDF5 files. (The files will later be collected and 'reduced' into final result files.) :param int job_id: ID of the job record in the DB/KVS. :param realization: Logic tree sample number (from 1 to N, where N is the NUMBER_OF_LOGIC_TREE_SAMPLES param defined in the job config. :param site: The site of interest (a :class:`openquake.shapes.Site` object). :param result_dir: NFS result directory path. For each poe, a subfolder will be created to contain intermediate calculation results. (Each call to this task will generate 1 result file per poe.) :returns: A list of the resulting file names (1 per poe). """ utils_tasks.check_job_status(job_id) the_job = Job.from_kvs(job_id) log_msg = ( "Computing UHS for job_id=%s, site=%s, realization=%s." " UHS results will be serialized to `%s`.") log_msg %= (the_job.job_id, site, realization, result_dir) LOG.info(log_msg) uhs_results = compute_uhs(the_job, site) return write_uhs_results(result_dir, realization, site, uhs_results)
def test_job_db_record_for_output_type_xml(self): self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'xml') OqJob.objects.get(id=self.job.job_id)
def test_classical_psha_based_job(self): job = Job.from_file(TEST_JOB_FILE_CLASSICAL) self.assertTrue(job.launch())
def test_job_with_only_hazard_config_only_has_hazard_section(self): FLAGS.include_defaults = False job_with_only_hazard = Job.from_file(helpers.get_data_path(HAZARD_ONLY)) self.assertEqual(["HAZARD"], job_with_only_hazard.sections) FLAGS.include_defaults = True
def test_a_job_has_an_identifier(self): self.assertEqual(1, Job({}, 1).id)
def test_job_runs_with_a_good_config(self): job = Job.from_file(TEST_JOB_FILE) self.assertTrue(job.launch())
def test_job_with_only_hazard_config_only_has_hazard_section(self): FLAGS.include_defaults = False job_with_only_hazard = Job.from_file(test.do_test_file(HAZARD_ONLY)) self.assertEqual(["HAZARD"], job_with_only_hazard.sections) FLAGS.include_defaults = True
def test_can_store_and_read_jobs_from_kvs(self): self.job = Job.from_file(os.path.join(test.DATA_DIR, CONFIG_FILE)) self.generated_files.append(self.job.super_config_path) self.assertEqual(self.job, Job.from_kvs(self.job.id))
def test_can_store_and_read_jobs_from_kvs(self): self.job = Job.from_file(os.path.join(helpers.DATA_DIR, CONFIG_FILE)) self.generated_files.append(self.job.super_config_path) self.assertEqual(self.job, Job.from_kvs(self.job.id))
def test_job_db_record_for_output_type_xml(self): self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'xml') session = get_db_session("uiapi", "writer") session.query(OqJob).filter(OqJob.id == self.job.job_id).one()
def test_set_status(self): self.job = Job.from_file(helpers.get_data_path(CONFIG_FILE), 'db') status = 'running' self.job.set_status(status) self.assertEqual(status, OqJob.objects.get(id=self.job.job_id).status)