def setUp(self): self.generated_files = [] self.job = Job.from_file(test.do_test_file(CONFIG_FILE)) self.job_with_includes = Job.from_file(test.do_test_file(CONFIG_WITH_INCLUDES)) self.generated_files.append(self.job.super_config_path) self.generated_files.append(self.job_with_includes.super_config_path)
def setUp(self): self.generated_files = [] self.job = Job.from_file(test.do_test_file(CONFIG_FILE)) self.job_with_includes = Job.from_file( test.do_test_file(CONFIG_WITH_INCLUDES)) self.generated_files.append(self.job.super_config_path) self.generated_files.append(self.job_with_includes.super_config_path)
def test_http_handler_writes_a_file(self): class StubbedHTTPConnection(StubbedGetter): def __enter__(self): return self def __exit__(self, *args): pass def request(self, req_type, path): self.remote_path = path return self def getresponse(self): return self def read(self): with open(self.remote_path, "r") as reader: return reader.read() expected_path = "/tmp/fake_file" remote_path = "http://localhost/%s" % test.do_test_file("config.gem") url = urlparse.urlparse(remote_path) http_handler = handlers.HTTPHandler(url, expected_path) guaranteed_file = http_handler.handle(getter=StubbedHTTPConnection) self.assertTrue(os.path.isfile(guaranteed_file)) os.unlink(guaranteed_file)
def test_prepares_blocks_using_the_input_region(self): """ This test might be currently catastrophically retarded. If it is blame Lars. """ block_path = test.do_test_file(BLOCK_SPLIT_TEST_FILE) print "In open job" a_job = Job.from_file(block_path) self.generated_files.append(a_job.super_config_path) verts = [float(x) for x in a_job.params['REGION_VERTEX'].split(",")] # Flips lon and lat, and builds a list of coord tuples coords = zip(verts[1::2], verts[::2]) expected = shapes.RegionConstraint.from_coordinates(coords) expected.cell_size = float(a_job.params['REGION_GRID_SPACING']) expected_sites = [] for site in expected: print site expected_sites.append(site) a_job._partition() blocks_keys = a_job.blocks_keys print blocks_keys self.assertEqual(1, len(blocks_keys)) self.assertEqual(job.Block(expected_sites), job.Block.from_kvs(blocks_keys[0]))
def setUp(self): self.gmf_string = open(test.do_test_file("gmfs.json")).readline() region = shapes.Region.from_coordinates( [(-118.30, 34.12), (-118.18, 34.12), (-118.18, 34.00), (-118.30, 34.00)] ) region.cell_size = 0.02 self.grid = region.grid
def setUp(self): self.gmf_string = open(test.do_test_file("gmfs.json")).readline() region = shapes.Region.from_coordinates([(-118.30, 34.12), (-118.18, 34.12), (-118.18, 34.00), (-118.30, 34.00)]) region.cell_size = 0.02 self.grid = region.grid
def test_file_handler_writes_a_file(self): expected_path = "/tmp/fake_file" remote_path = test.do_test_file("config.gem") url = urlparse.urlparse(remote_path) file_handler = handlers.FileHandler(url, expected_path) guaranteed_file = file_handler.handle() self.assertTrue(os.path.isfile(guaranteed_file)) os.unlink(guaranteed_file)
def test_prepares_blocks_using_the_exposure_and_filtering(self): a_job = Job({EXPOSURE: os.path.join(test.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE), INPUT_REGION: test.do_test_file(REGION_EXPOSURE_TEST_FILE)}) self.generated_files.append(a_job.super_config_path) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block((shapes.Site(9.15, 45.16667), shapes.Site(9.15333, 45.122), shapes.Site(9.14777, 45.17999))) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def test_ssh_handler_writes_a_file(self): class StubbedSFTPClient(StubbedGetter): def get(self, remote_path, local_path): with open(local_path, "w") as writer: with open(remote_path, "r") as reader: writer.write(reader.read()) expected_path = "/tmp/fake_file" remote_path = "sftp://localhost/%s" % test.do_test_file("config.gem") url = urlparse.urlparse(remote_path) sftp_handler = handlers.SFTPHandler(url, expected_path) guaranteed_file = sftp_handler.handle(getter=StubbedSFTPClient) self.assertTrue(os.path.isfile(guaranteed_file)) os.unlink(guaranteed_file)
def test_geotiff_output(self): """Generate a geotiff file with a smiley face.""" path = test.do_test_file("test.1.tiff") switzerland = shapes.Region.from_coordinates( [(10.0, 100.0), (100.0, 100.0), (100.0, 10.0), (10.0, 10.0)]) image_grid = switzerland.grid gwriter = geotiff.GeoTiffFile(path, image_grid) for xpoint in range(0, 320): for ypoint in range(0, 320): gwriter.write((xpoint, ypoint), int(xpoint*254/320)) gwriter.close() comp_path = os.path.join(test.DATA_DIR, "test.tiff") retval = subprocess.call(["tiffcmp", "-t", path, comp_path], stderr=subprocess.STDOUT, stdout=subprocess.PIPE) self.assertTrue(retval == 0)
def test_prepares_blocks_using_the_exposure_and_filtering(self): a_job = Job({ EXPOSURE: os.path.join(test.SCHEMA_EXAMPLES_DIR, EXPOSURE_TEST_FILE), INPUT_REGION: test.do_test_file(REGION_EXPOSURE_TEST_FILE) }) self.generated_files.append(a_job.super_config_path) a_job._partition() blocks_keys = a_job.blocks_keys expected_block = job.Block( (shapes.Site(9.15, 45.16667), shapes.Site(9.15333, 45.122), shapes.Site(9.14777, 45.17999))) self.assertEqual(1, len(blocks_keys)) self.assertEqual(expected_block, job.Block.from_kvs(blocks_keys[0]))
def test_geotiff_output(self): """Generate a geotiff file with a smiley face.""" path = test.do_test_file("test.1.tiff") switzerland = shapes.Region.from_coordinates([(10.0, 100.0), (100.0, 100.0), (100.0, 10.0), (10.0, 10.0)]) image_grid = switzerland.grid gwriter = geotiff.GeoTiffFile(path, image_grid) for xpoint in range(0, 320): for ypoint in range(0, 320): gwriter.write((xpoint, ypoint), int(xpoint * 254 / 320)) gwriter.close() comp_path = os.path.join(test.DATA_DIR, "test.tiff") retval = subprocess.call(["tiffcmp", "-t", path, comp_path], stderr=subprocess.STDOUT, stdout=subprocess.PIPE) self.assertTrue(retval == 0)
def test_loss_curve_plot_generation_multiple_sites_render_multi(self): """Create SVG plots for loss curves read from an NRML file. The file contains data for several sites. For each site, a separate SVG file is created.""" path = test.do_test_output_file(LOSS_CURVE_PLOT_FILENAME) loss_curve_path = test.do_test_file(LOSS_CURVE_PLOT_INPUTFILE) plotter = curve.RiskCurvePlotter(path, loss_curve_path, mode='loss', curve_title="This is a test loss curve", render_multi=True) # delete expected output files, if existing for svg_file in plotter.filenames(): if os.path.isfile(svg_file): os.remove(svg_file) plotter.plot(autoscale_y=True) for svg_file in plotter.filenames(): self.assertTrue(os.path.getsize(svg_file) > 0)
def test_loss_ratio_curve_plot_generation_multiple_sites(self): """Create SVG plots for loss ratio curves read from an NRML file. The file contains data for several sites. For each site, a separate SVG file is created.""" path = test.do_test_output_file(LOSS_RATIO_CURVE_PLOT_FILENAME) loss_ratio_curve_path = test.do_test_file( LOSS_RATIO_CURVE_PLOT_INPUTFILE) plotter = curve.RiskCurvePlotter(path, loss_ratio_curve_path, mode='loss_ratio') # delete expected output files, if existing for svg_file in plotter.filenames(): if os.path.isfile(svg_file): os.remove(svg_file) plotter.plot(autoscale_y=False) # assert that for each site in the NRML file an SVG has been created for svg_file in plotter.filenames(): self.assertTrue(os.path.getsize(svg_file) > 0)
def test_hazardcurve_plot_generation_multiple_sites_multiple_curves(self): """Create SVG plots for hazard curves read from an NRML file. The file contains data for several sites, and several end branches of the logic tree. For each site, a separate SVG file is created.""" path = test.do_test_output_file(HAZARDCURVE_PLOT_FILENAME) hazardcurve_path = test.do_test_file(HAZARDCURVE_PLOT_INPUTFILE) plotter = curve.HazardCurvePlotter(path, hazardcurve_path, curve_title='Example Hazard Curves') # delete expected output files, if existing for svg_file in plotter.filenames(): if os.path.isfile(svg_file): os.remove(svg_file) plotter.plot() # assert that for each site in the NRML file an SVG has been created # and is not empty for svg_file in plotter.filenames(): self.assertTrue(os.path.getsize(svg_file) > 0) os.remove(svg_file)
def test_loss_curve_plot_generation_multiple_sites_render_multi(self): """Create SVG plots for loss curves read from an NRML file. The file contains data for several sites. For each site, a separate SVG file is created.""" path = test.do_test_output_file(LOSS_CURVE_PLOT_FILENAME) loss_curve_path = test.do_test_file(LOSS_CURVE_PLOT_INPUTFILE) plotter = curve.RiskCurvePlotter( path, loss_curve_path, mode='loss', curve_title="This is a test loss curve", render_multi=True) # delete expected output files, if existing for svg_file in plotter.filenames(): if os.path.isfile(svg_file): os.remove(svg_file) plotter.plot(autoscale_y=True) for svg_file in plotter.filenames(): self.assertTrue(os.path.getsize(svg_file) > 0)
def test_job_with_only_hazard_config_only_has_hazard_section(self): FLAGS.include_defaults = False job_with_only_hazard = Job.from_file(test.do_test_file(HAZARD_ONLY)) self.assertEqual(["HAZARD"], job_with_only_hazard.sections) FLAGS.include_defaults = True