def test_wrong_sites_csv(self): sites_csv = general.writetmp( 'site_id,lon,lat\n1,1.0,2.1\n2,3.0,4.1\n3,5.0,6.1') source = general.writetmp(""" [general] calculation_mode = scenario [geometry] sites_csv = %s [misc] maximum_distance=1 truncation_level=3 random_seed=5 [site_params] reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]} investigation_time = 50. export_dir = %s """ % (os.path.basename(sites_csv), TMP)) oq = readinput.get_oqparam(source) with self.assertRaises(InvalidFile) as ctx: readinput.get_mesh(oq) self.assertIn('expected site_id=0, got 1', str(ctx.exception)) os.unlink(sites_csv)
def test_wrong_sites_csv(self): sites_csv = general.gettemp( 'site_id,lon,lat\n1,1.0,2.1\n2,3.0,4.1\n3,5.0,6.1') source = general.gettemp(""" [general] calculation_mode = scenario [geometry] sites_csv = %s [misc] maximum_distance=1 truncation_level=3 random_seed=5 [site_params] reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]} investigation_time = 50. export_dir = %s """ % (os.path.basename(sites_csv), TMP)) oq = readinput.get_oqparam(source) with self.assertRaises(InvalidFile) as ctx: readinput.get_mesh(oq) self.assertIn('expected site_id=0, got 1', str(ctx.exception)) os.unlink(sites_csv)
def run_tiles(num_tiles, job_ini, poolsize=0): """ Run a hazard calculation by splitting the sites into tiles. WARNING: this is experimental and meant only for internal users """ t0 = time.time() oq = readinput.get_oqparam(job_ini) num_sites = len(readinput.get_mesh(oq)) task_args = [(job_ini, slc) for slc in general.split_in_slices(num_sites, num_tiles)] if poolsize == 0: # no pool Starmap = parallel.Sequential elif os.environ.get('OQ_DISTRIBUTE') == 'celery': Starmap = parallel.Processmap # celery plays only with processes else: # multiprocessing plays only with threads Starmap = parallel.Threadmap parent_child = [None, None] def agg(calc_ids, calc_id): if not calc_ids: # first calculation parent_child[0] = calc_id parent_child[1] = calc_id logs.dbcmd('update_parent_child', parent_child) logging.warn('Finished calculation %d of %d', len(calc_ids) + 1, num_tiles) return calc_ids + [calc_id] calc_ids = Starmap(engine.run_tile, task_args, poolsize).reduce(agg, []) datadir = datastore.get_datadir() for calc_id in calc_ids: print(os.path.join(datadir, 'calc_%d.hdf5' % calc_id)) print('Total calculation time: %.1f h' % ((time.time() - t0) / 3600.))
def read_exposure_sitecol(self): """ Read the exposure (if any) and then the site collection, possibly extracted from the exposure. """ if 'exposure' in self.oqparam.inputs: logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assets_by_site = ( readinput.get_sitecol_assets(self.oqparam, self.exposure)) self.cost_types = self.exposure.cost_types self.taxonomies = numpy.array( sorted(self.exposure.taxonomies), '|S100') num_assets = self.count_assets() mesh = readinput.get_mesh(self.oqparam) if self.datastore.parent: parent_mesh = self.datastore.parent['sitemesh'].value if mesh is None: mesh = Mesh(parent_mesh['lon'], parent_mesh['lat']) if mesh is not None: sites = readinput.get_site_collection(self.oqparam, mesh) with self.monitor('assoc_assets_sites'): self.sitecol, self.assets_by_site = \ self.assoc_assets_sites(sites) ok_assets = self.count_assets() num_sites = len(self.sitecol) logging.warn('Associated %d assets to %d sites, %d discarded', ok_assets, num_sites, num_assets - ok_assets) if (self.is_stochastic and self.datastore.parent and self.datastore.parent['sitecol'] != self.sitecol): logging.warn( 'The hazard sites are different from the risk sites %s!=%s' % (self.datastore.parent['sitecol'], self.sitecol)) else: # no exposure logging.info('Reading the site collection') with self.monitor('reading site collection', autoflush=True): self.sitecol = readinput.get_site_collection(self.oqparam) # save mesh and asset collection self.save_mesh() if hasattr(self, 'assets_by_site'): self.assetcol = riskinput.build_asset_collection( self.assets_by_site, self.oqparam.time_event)