def test_stats_models(): conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'zhu_2015.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') # Check slopefile trimming conf['zhu_2015']['slopefile'] = 'global_gted_maxslope_30c.flt' conf = correct_config_filepaths(data_path, conf) # Run with divfactor of 1 conf['zhu_2015']['divfactor'] = '1.' shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True) #maplayers1 = lm.calculate() conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'zhu_2017_coastal.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') conf['zhu_2017_coastal']['slopefile'] = 'global_gted_maxslope_30c.flt' conf = correct_config_filepaths(data_path, conf) # Run with divfactor of 1 conf['zhu_2017_coastal']['divfactor'] = '1.' shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True) maplayers2 = lm.calculate() # Change shakemap name so that it doesn't stomp on the other maplayers2['model']['description']['shakemap'] = '19891018000415_ver2' #model_list = [maplayers1, maplayers2] #test_dict1 = assess_models.concatenateModels(model_list) #test_dict2 = assess_models.concatenateModels(model_list, astitle='model') # I think this test is freezing on travis # tmp = assess_models.modelSummary(test_dict2, showplots=False, # summary_figure=False, # individual_plots=False) # np.testing.assert_allclose(tmp[0][0], 0.025677016713957716) # np.testing.assert_allclose(tmp[1][0], 0.00098462898029272805) hagg = stats.computeHagg(maplayers2['model']['grid']) np.testing.assert_allclose(hagg['hagg_0.00g'], 65.85571, atol=0.001) parea = stats.computeParea(maplayers2['model']['grid'], probthresh=0.2) np.testing.assert_allclose(parea, 77.03092, atol=0.001) stats2 = stats.computeStats(maplayers2['model']['grid'], probthresh=0.2, shakefile=shakefile, shakethreshtype='pga', shakethresh=20., statprobthresh=0.0) np.testing.assert_allclose(stats2['Max'], 0.41278, atol=0.001) np.testing.assert_allclose(stats2['Median'], 0.00032317817, rtol=0.001) np.testing.assert_allclose(stats2['Std'], 0.04855, atol=0.0001) np.testing.assert_allclose(stats2['hagg_0.20g'], 55.47086, atol=0.001)
def test_maps(): lq = LM.LogisticModel(shakefile, modelLQ, saveinputs=True) maplayers = lq.calculate() ls = LM.LogisticModel(shakefile, modelLS, saveinputs=False) maplayers2 = ls.calculate() # Test create_kmz tempdir = tempfile.TemporaryDirectory() create_kmz(maplayers['model'], outfile=os.path.join(tempdir.name, 'test.kmz')) create_kmz(maplayers2['model'], outfile=os.path.join(tempdir.name, 'test.kmz'), mask=0.003)
def test_maps(): lq = LM.LogisticModel(shakefile, modelLQ, saveinputs=True) maplayers = lq.calculate() ls = LM.LogisticModel(shakefile, modelLS, saveinputs=False) maplayers2 = ls.calculate() # suptitle is None makemaps.modelMap(maplayers, shakefile, suptitle=None, savepdf=False, savepng=False) # outputdir=tempdir) # shakefile is None makemaps.modelMap(maplayers, suptitle=None, savepdf=False, savepng=False) # scaletype == 'binned' makemaps.modelMap(maplayers, scaletype='binned', savepdf=False, savepng=False) # scaletype == 'binned' and logscale=!False makemaps.modelMap(maplayers, scaletype='binned', logscale=[False, False, True, True], savepdf=False, savepng=False) # logscale=!False makemaps.modelMap(maplayers, logscale=[False, False, True, True], savepdf=False, savepng=False) # Make a copy of current defaults default_file = os.path.join(os.path.expanduser("~"), ".gfail_defaults") if os.path.exists(default_file): shutil.copy(default_file, default_file + '_bak') try: # Clear paths to avoid problems with stats.py trying to find pop_file rc, so, se = get_command_output('gfail -reset') # Then run GFSummary makemaps.GFSummary([maplayers, maplayers2], [modelLQ, modelLS], os.path.join(upone, 'pelican', 'theme'), shakefile, pop_file=None) except Exception as e: print(e) # Put defaults back if os.path.exists(default_file + '_bak'): shutil.copy(default_file + '_bak', default_file)
def test_zhu2015(): conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'zhu_2015.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') # Check slopefile trimming conf['zhu_2015']['slopefile'] = 'global_gted_maxslope_30c.flt' conf = correct_config_filepaths(data_path, conf) # Run with divfactor of 1 conf['zhu_2015']['divfactor'] = '1.' shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True) maplayers = lm.calculate() pgrid = maplayers['model']['grid'] test_data = pgrid.getData() if changetarget: # To change target data: pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict()) pgrd.save( os.path.join(datadir, 'loma_prieta', 'targets', 'zhu2015.grd')) # Load target target_file = os.path.join(datadir, 'loma_prieta', 'targets', 'zhu2015.grd') target_grid = GMTGrid.load(target_file) target_data = target_grid.getData() # Assert np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
def test_zoom(): # boundaries == 'zoom' shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'zhu_2015.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') conf = correct_config_filepaths(data_path, conf) lq = LM.LogisticModel(shakefile, conf, saveinputs=True) maplayers = lq.calculate() makemaps.modelMap(maplayers, boundaries='zoom', zthresh=0.3, savepdf=False, savepng=False) # bounaries dictionary bounds = {'xmin': -122.54, 'xmax': -120.36, 'ymin': 36.1, 'ymax': 37.0} makemaps.modelMap(maplayers, boundaries=bounds, savepdf=False, savepng=False)
def test_stats_models(): conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'zhu_2015.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') # Check slopefile trimming conf['zhu_2015']['slopefile'] = 'global_gted_maxslope_30c.flt' conf = correct_config_filepaths(data_path, conf) # Run with divfactor of 1 conf['zhu_2015']['divfactor'] = '1.' shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True) #maplayers1 = lm.calculate() conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'zhu_2017_coastal.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') conf['zhu_2017_coastal']['slopefile'] = 'global_gted_maxslope_30c.flt' conf = correct_config_filepaths(data_path, conf) # Run with divfactor of 1 conf['zhu_2017_coastal']['divfactor'] = '1.' shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True) maplayers2 = lm.calculate() # Change shakemap name so that it doesn't stomp on the other maplayers2['model']['description']['shakemap'] = '19891018000415_ver2' hagg = stats.computeHagg(maplayers2['model']['grid']) np.testing.assert_allclose(hagg['hagg_0.00g'], 5.12723506006, atol=0.001) stats2 = stats.computeStats(maplayers2['model']['grid'], shakefile=shakefile, shakethreshtype='pga', shakethresh=20., probthresh=0.0) np.testing.assert_allclose(stats2['Max'], 0.4105819792026343, atol=0.001) np.testing.assert_allclose(stats2['Median'], 0.34855563636356035, rtol=0.01) np.testing.assert_allclose(stats2['Std'], 0.04494649549605273, atol=0.001) np.testing.assert_allclose(stats2['hagg_0.20g'], 2.845917180389227, atol=0.001)
def test_maps(): lq = LM.LogisticModel(shakefile, modelLQ, saveinputs=True) maplayers = lq.calculate() ls = LM.LogisticModel(shakefile, modelLS, saveinputs=False) maplayers2 = ls.calculate() # suptitle is None makemaps.modelMap(maplayers, shakefile, suptitle=None, savepdf=False, savepng=False) # outputdir=tempdir) # shakefile is None makemaps.modelMap(maplayers, suptitle=None, savepdf=False, savepng=False) # scaletype == 'binned' makemaps.modelMap(maplayers, scaletype='binned', savepdf=False, savepng=False) # scaletype == 'binned' and logscale=!False makemaps.modelMap(maplayers, scaletype='binned', logscale=[False, False, True, True], savepdf=False, savepng=False) # logscale=!False makemaps.modelMap(maplayers, logscale=[False, False, True, True], savepdf=False, savepng=False) # Test create_kmz tempdir = tempfile.TemporaryDirectory() makemaps.create_kmz(maplayers['model'], outfile=os.path.join(tempdir.name, 'test.kmz')) makemaps.create_kmz(maplayers2['model'], outfile=os.path.join(tempdir.name, 'test.kmz'), mask=0.003)
def test_parseConfigLayers(): lq = LM.LogisticModel(shakefile, modelLQ, saveinputs=True) maplayers = lq.calculate() utilities.parseConfigLayers(maplayers, config, keys=None) # no lims del config['test_model']['display_options']['lims'] utilities.parseConfigLayers(maplayers, config, keys=None) # no colors del config['test_model']['display_options']['colors'] utilities.parseConfigLayers(maplayers, config, keys=None) # no logscale del config['test_model']['display_options']['logscale'] utilities.parseConfigLayers(maplayers, config, keys=None) # no maskthresholds del config['test_model']['display_options']['maskthresholds'] utilities.parseConfigLayers(maplayers, config, keys=None)
def test_jessee_2018(): conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'jessee_2018.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') conf = correct_config_filepaths(data_path, conf) shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') undertainty_file = os.path.join(datadir, 'loma_prieta', 'uncertainty.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True, uncertfile=undertainty_file) maplayers = lm.calculate() pgrid = maplayers['model']['grid'] stdgrid = maplayers['std']['grid'] test_data = pgrid.getData() test_data_std = stdgrid.getData() if changetarget: # To change target data: pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict()) pgrd.save( os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018.grd')) stdgrd = GMTGrid(stdgrid.getData(), stdgrid.getGeoDict()) stdgrd.save( os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018_std.grd')) # Load target target_file = os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018.grd') target_grid = GMTGrid.load(target_file) target_data = target_grid.getData() std_file = os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018_std.grd') target_grid_std = GMTGrid.load(std_file) target_data_std = target_grid_std.getData() # Assert np.testing.assert_allclose(target_data, test_data, rtol=1e-3) np.testing.assert_allclose(target_data_std, test_data_std, rtol=1e-3)
def run_gfail(args): """Runs ground failure. Args: args: dictionary or argument parser Namespace output by bin/gfail program. Returns: list: Names of created files. """ # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST filenames = [] # If args is a dictionary, convert to a Namespace if isinstance(args, dict): args = Namespace(**args) if args.set_default_paths: set_default_paths(args) print('default paths set, continuing...\n') if args.list_default_paths: list_default_paths() return if args.reset_default_paths: reset_default_paths() return if args.make_webpage: # Turn on GIS and HDF5 flags gis = True hdf5 = True kmz = True else: gis = args.gis hdf5 = args.hdf5 kmz = args.kmz # Figure out what models will be run if args.shakefile is not None: # user intends to actually run some models shakefile = args.shakefile # make output location for things if args.output_filepath is None: outdir = os.getcwd() else: outdir = args.output_filepath if hdf5 or gis or kmz: if not os.path.exists(outdir): os.makedirs(outdir) # download if is url # cleanup = False if not os.path.isfile(shakefile): if isURL(shakefile): # getGridURL returns a named temporary file object shakefile = getGridURL(shakefile) # cleanup = True # Be sure to delete it after else: raise NameError('Could not find "%s" as a file or a valid url' % shakefile) eventid = getHeaderData(shakefile)[0]['event_id'] # Get entire path so won't break if running gfail with relative path shakefile = os.path.abspath(shakefile) if args.extract_contents: outfolder = outdir else: # Nest in a folder named by eventid outfolder = os.path.join(outdir, eventid) if not os.path.exists(outfolder): os.makedirs(outfolder) # Copy shake grid into output directory # --- this is base on advice from Mike that when running in production # the shake grids are not archived and so if we need/want to have # the exact grid used for the calculation later if there's every a # question about how the calculation was done, the safest thing is # to store a copy of it here. shake_copy = os.path.join(outfolder, "grid.xml") shutil.copyfile(shakefile, shake_copy) if args.uncertfile is not None: uncertfile = os.path.abspath(args.uncertfile) unc_copy = os.path.join(outfolder, "uncertainty.xml") shutil.copyfile(uncertfile, unc_copy) else: uncertfile = None # Write shakefile to a file for use later shakename = os.path.join(outfolder, "shakefile.txt") shake_file = open(shakename, "wt") shake_file.write(shake_copy) shake_file.close() filenames.append(shakename) # Check that shakemap bounds do not cross 180/-180 line if args.set_bounds is None: sd = ShakeGrid.getFileGeoDict(shakefile) if sd.xmin > sd.xmax: print('\nShakeMap crosses 180/-180 line, setting bounds so ' 'only side with more land area is run') if sd.xmax + 180. > 180 - sd.xmin: set_bounds = '%s, %s, %s, %s' % ( sd.ymin, sd.ymax, -180., sd.xmax) else: set_bounds = '%s, %s, %s, %s' % (sd.ymin, sd.ymax, sd.xmin, 180.) print('Bounds applied: %s' % set_bounds) else: set_bounds = args.set_bounds else: set_bounds = args.set_bounds config = args.config if args.config_filepath is not None: # only add config_filepath if full filepath not given and file # ext is .ini if (not os.path.isabs(config) and os.path.splitext(config)[-1] == '.ini'): config = os.path.join(args.config_filepath, config) if os.path.splitext(config)[-1] == '.ini': temp = ConfigObj(config) if len(temp) == 0: raise Exception( 'Could not find specified .ini file: %s' % config) if args.data_path is not None: temp = correct_config_filepaths(args.data_path, temp) configs = [temp] conffail = [] else: # input is a list of config files f = open(config, 'r') configlist = f.readlines() configs = [] conffail = [] for conf in configlist: conf = conf.strip() if not os.path.isabs(conf): # only add config_filepath if full filepath not given conf = os.path.join(args.config_filepath, conf) try: temp = ConfigObj(conf) if temp: if args.data_path is not None: temp = correct_config_filepaths( args.data_path, temp) configs.append(temp) else: conffail.append(conf) except BaseException: conffail.append(conf) print('\nRunning the following models:') for conf in configs: print('\t%s' % conf.keys()[0]) if len(conffail) > 0: print('Could not find or read in the following config files:\n') for conf in conffail: print('\t%s' % conf) print('\nContinuing...\n') if set_bounds is not None: if 'zoom' in set_bounds: temp = set_bounds.split(',') print('Using %s threshold of %1.1f to cut model bounds' % (temp[1].strip(), float(temp[2].strip()))) bounds = get_bounds(shakefile, temp[1].strip(), float(temp[2].strip())) else: temp = eval(set_bounds) latmin = temp[0] latmax = temp[1] lonmin = temp[2] lonmax = temp[3] bounds = {'xmin': lonmin, 'xmax': lonmax, 'ymin': latmin, 'ymax': latmax} print('Applying bounds of lonmin %1.2f, lonmax %1.2f, ' 'latmin %1.2f, latmax %1.2f' % (bounds['xmin'], bounds['xmax'], bounds['ymin'], bounds['ymax'])) else: bounds = None if args.make_webpage: results = [] # pre-read in ocean trimming file polygons so only do this step once if args.trimfile is not None: if not os.path.exists(args.trimfile): print('trimfile defined does not exist: %s\n' 'Ocean will not be trimmed.' % args.trimfile) trimfile = None elif os.path.splitext(args.trimfile)[1] != '.shp': print('trimfile must be a shapefile, ' 'ocean will not be trimmed') trimfile = None else: trimfile = args.trimfile else: trimfile = None # Get finite fault ready, if exists ffault = None point = True if args.finite_fault is not None: point = False try: if os.path.splitext(args.finite_fault)[-1] == '.txt': ffault = text_to_json(args.finite_fault) elif os.path.splitext(args.finite_fault)[-1] == '.json': ffault = args.finite_fault else: print('Could not read in finite fault, will ' 'try to download from comcat') ffault = None except BaseException: print('Could not read in finite fault, will try to ' 'download from comcat') ffault = None if ffault is None: # Try to get finite fault file, if it exists try: returned_ev = get_event_comcat(shakefile) if returned_ev is not None: testjd, detail, temp = returned_ev evinfo = testjd['input']['event_information'] if 'faultfiles' in evinfo: ffilename = evinfo['faultfiles'] if len(ffilename) > 0: # Download the file with tempfile.NamedTemporaryFile( delete=False, mode='w') as f: temp.getContent(ffilename, filename=f.name) ffault = text_to_json(f.name) os.remove(f.name) point = False else: point = True else: print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False except Exception as e: print(e) print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False # Loop over config files for conf in configs: modelname = conf.keys()[0] print('\nNow running %s:' % modelname) notcov, newbnds = check_input_extents( conf, shakefile=shakefile, bounds=bounds ) if len(notcov) > 0: print('\nThe following input layers do not cover' ' the area of interest:\n\t%s' % '\n\t'.join(notcov)) if newbnds is None: print('\nCannnot make bounds that work. ' 'Skipping to next model\n') continue else: pnt = '%s, %s, %s, %s' % ( newbnds['xmin'], newbnds['xmax'], newbnds['ymin'], newbnds['ymax']) print('Running model for new bounds that are fully covered' ' by input layer: %s' % pnt) bounds2 = newbnds else: bounds2 = bounds modelfunc = conf[modelname]['funcname'] if modelfunc == 'LogisticModel': lm = LM.LogisticModel(shakefile, conf, uncertfile=uncertfile, saveinputs=args.save_inputs, bounds=bounds2, trimfile=trimfile) maplayers = lm.calculate() elif modelfunc == 'godt2008': maplayers = godt2008(shakefile, conf, uncertfile=uncertfile, saveinputs=args.save_inputs, bounds=bounds2, trimfile=trimfile) else: print('Unknown model function specified in config for %s ' 'model, skipping to next config' % modelfunc) continue # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M') # filename = ('%s_%s_%s' % (eventid, modelname, time1)) if args.appendname is not None: filename = ('%s_%s_%s' % (eventid, modelname, args.appendname)) else: filename = ('%s_%s' % (eventid, modelname)) if hdf5: filenameh = filename + '.hdf5' if os.path.exists(filenameh): os.remove(filenameh) savelayers(maplayers, os.path.join(outfolder, filenameh)) filenames.append(filenameh) if gis or kmz: for key in maplayers: # Rename 'std' key to 'beta_sigma' if key == 'std': key_label = 'beta_sigma' else: key_label = key if gis: filen = os.path.join(outfolder, '%s_%s.bil' % (filename, key_label)) fileh = os.path.join(outfolder, '%s_%s.hdr' % (filename, key_label)) fileg = os.path.join(outfolder, '%s_%s.tif' % (filename, key_label)) GDALGrid.copyFromGrid( maplayers[key]['grid']).save(filen) cflags = '-co COMPRESS=DEFLATE -co predictor=2' srs = '-a_srs EPSG:4326' cmd = 'gdal_translate %s %s -of GTiff %s %s' % ( srs, cflags, filen, fileg) rc, so, se = get_command_output(cmd) # Delete bil file and its header os.remove(filen) os.remove(fileh) filenames.append(fileg) if kmz and (not key.startswith('quantile') and not key.startswith('std')) : plotorder, logscale, lims, colormaps, maskthresh = \ parseConfigLayers(maplayers, conf, keys=['model']) maxprob = np.nanmax(maplayers[key]['grid'].getData()) if key == 'model': qdict = { k: maplayers[k] for k in maplayers.keys() if k.startswith('quantile') } else: qdict = None if maskthresh is None: maskthresh = [0.] if maxprob >= maskthresh[0]: filen = os.path.join(outfolder, '%s_%s.kmz' % (filename, key_label)) filek = create_kmz(maplayers[key], filen, mask=maskthresh[0], levels=lims[0], qdict=qdict) filenames.append(filek) else: print('No unmasked pixels present, skipping kmz ' 'file creation') if args.make_webpage: # Compile into list of results for later results.append(maplayers) # # Make binary output for ShakeCast # filef = os.path.join(outfolder, '%s_model.flt' # % filename) # # And get name of header # filefh = os.path.join(outfolder, '%s_model.hdr' # % filename) # # Make file # write_floats(filef, maplayers['model']['grid']) # filenames.append(filef) # filenames.append(filefh) eventid = getHeaderData(shakefile)[0]['event_id'] if not hasattr(args, 'eventsource'): args.eventsource = 'us' if not hasattr(args, 'eventsourcecode'): args.eventsourcecode = eventid if args.make_webpage: if len(results) == 0: raise Exception('No models were run. Cannot make webpages.') outputs = hazdev( results, configs, shakefile, outfolder=outfolder, pop_file=args.popfile, pager_alert=args.property_alertlevel, eventsource=args.eventsource, eventsourcecode=args.eventsourcecode, point=point, gf_version=args.gf_version, pdlcall=args.pdlcall) filenames = filenames + outputs # # create transparent png file # outputs = create_png(outdir) # filenames = filenames + outputs # # # create info file # infofile = create_info(outdir) # filenames = filenames + infofile print('\nFiles created:\n') for filen in filenames: print('%s' % filen) return filenames
def run_gfail(args): """Runs ground failure. Args: args: dictionary or argument parser Namespace output by bin/gfail program. Returns: list: Names of created files. """ # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST filenames = [] # If args is a dictionary, convert to a Namespace if isinstance(args, dict): args = Namespace(**args) if args.set_default_paths: set_default_paths(args) print('default paths set, continuing...\n') if args.list_default_paths: list_default_paths() return if args.reset_default_paths: reset_default_paths() return if args.make_webpage: # Turn on GIS and HDF5 flags gis = True hdf5 = True else: gis = args.gis hdf5 = args.hdf5 # Figure out what models will be run if args.shakefile is not None: # user intends to actually run some models shakefile = args.shakefile # make output location for things if args.output_filepath is None: outdir = os.getcwd() else: outdir = args.output_filepath if (hdf5 or args.make_static_pngs or args.make_static_pdfs or args.make_interactive_plots or gis): if not os.path.exists(outdir): os.makedirs(outdir) # download if is url # cleanup = False if not os.path.isfile(shakefile): if isURL(shakefile): # getGridURL returns a named temporary file object shakefile = getGridURL(shakefile) # cleanup = True # Be sure to delete it after else: raise NameError('Could not find "%s" as a file or a valid url' % (shakefile)) eventid = getHeaderData(shakefile)[0]['event_id'] # Get entire path so won't break if running gfail with relative path shakefile = os.path.abspath(shakefile) if args.extract_contents: outfolder = outdir else: # Nest in a folder named by eventid outfolder = os.path.join(outdir, eventid) if not os.path.exists(outfolder): os.makedirs(outfolder) # Copy shake grid into output directory # --- this is base on advice from Mike that when running in production # the shake grids are not archived and so if we need/want to have # the exact grid used for the calculation later if there's every a # question about how the calculation was done, the safest thing is # to store a copy of it here. shake_copy = os.path.join(outfolder, "grid.xml") shutil.copyfile(shakefile, shake_copy) # Write shakefile to a file for use later shakename = os.path.join(outfolder, "shakefile.txt") shake_file = open(shakename, "wt") shake_file.write(shake_copy) shake_file.close() filenames.append(shakename) config = args.config if args.config_filepath is not None: # only add config_filepath if full filepath not given and file # ext is .ini if (not os.path.isabs(config) and os.path.splitext(config)[-1] == '.ini'): config = os.path.join(args.config_filepath, config) if os.path.splitext(config)[-1] == '.ini': temp = ConfigObj(config) if len(temp) == 0: raise Exception( 'Could not find specified .ini file: %s' % config) if args.data_path is not None: temp = correct_config_filepaths(args.data_path, temp) configs = [temp] conffail = [] else: # input is a list of config files f = open(config, 'r') configlist = f.readlines() configs = [] conffail = [] for conf in configlist: conf = conf.strip() if not os.path.isabs(conf): # only add config_filepath if full filepath not given conf = os.path.join(args.config_filepath, conf) try: temp = ConfigObj(conf) if temp: if args.data_path is not None: temp = correct_config_filepaths( args.data_path, temp) configs.append(temp) else: conffail.append(conf) except: conffail.append(conf) print('\nRunning the following models:') for conf in configs: print('\t%s' % conf.keys()[0]) if len(conffail) > 0: print('Could not find or read in the following config files:\n') for conf in conffail: print('\t%s' % conf) print('\nContinuing...\n') if args.set_bounds is not None: if 'zoom' in args.set_bounds: temp = args.set_bounds.split(',') print('Using %s threshold of %1.1f to cut model bounds' % (temp[1].strip(), float(temp[2].strip()))) bounds = get_bounds(shakefile, temp[1].strip(), float(temp[2].strip())) else: temp = eval(args.set_bounds) latmin = temp[0] latmax = temp[1] lonmin = temp[2] lonmax = temp[3] bounds = {'xmin': lonmin, 'xmax': lonmax, 'ymin': latmin, 'ymax': latmax} print('Applying bounds of lonmin %1.2f, lonmax %1.2f, ' 'latmin %1.2f, latmax %1.2f' % (bounds['xmin'], bounds['xmax'], bounds['ymin'], bounds['ymax'])) else: bounds = None if args.make_webpage or args.make_summary: results = [] # pre-read in ocean trimming file polygons so only do this step once if args.trimfile is not None: if not os.path.exists(args.trimfile): print('trimfile defined does not exist: %s\n' 'Ocean will not be trimmed.' % args.trimfile) trimfile = None elif os.path.splitext(args.trimfile)[1] != '.shp': print('trimfile must be a shapefile, ' 'ocean will not be trimmed') trimfile = None else: trimfile = args.trimfile else: trimfile = None # Get finite fault ready, if exists ffault = None point = True if args.finite_fault is not None: point = False try: if os.path.splitext(args.finite_fault)[-1] == '.txt': ffault = text_to_json(args.finite_fault) elif os.path.splitext(args.finite_fault)[-1] == '.json': ffault = args.finite_fault else: print('Could not read in finite fault, will ' 'try to download from comcat') ffault = None except: print('Could not read in finite fault, will try to ' 'download from comcat') ffault = None if ffault is None: # Try to get finite fault file, if it exists try: returned_ev = get_event_comcat(shakefile) if returned_ev is not None: testjd, detail, temp = returned_ev if 'faultfiles' in testjd['input']['event_information']: ffilename = testjd['input']['event_information']['faultfiles'] if len(ffilename) > 0: # Download the file with tempfile.NamedTemporaryFile(delete=False, mode='w') as f: temp.getContent(ffilename, filename=f.name) ffault = text_to_json(f.name) os.remove(f.name) point = False else: point = True else: print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False except Exception as e: print(e) print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False # Loop over config files for conf in configs: modelname = conf.keys()[0] print('\nNow running %s:' % modelname) modelfunc = conf[modelname]['funcname'] if modelfunc == 'LogisticModel': lm = LM.LogisticModel(shakefile, conf, uncertfile=args.uncertfile, saveinputs=args.save_inputs, bounds=bounds, numstd=float(args.std), trimfile=trimfile) maplayers = lm.calculate() elif modelfunc == 'godt2008': maplayers = godt2008(shakefile, conf, uncertfile=args.uncertfile, saveinputs=args.save_inputs, bounds=bounds, numstd=float(args.std), trimfile=trimfile) else: print('Unknown model function specified in config for %s ' 'model, skipping to next config' % modelfunc) continue # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M') # filename = ('%s_%s_%s' % (eventid, modelname, time1)) if args.appendname is not None: filename = ('%s_%s_%s' % (eventid, modelname, args.appendname)) else: filename = ('%s_%s' % (eventid, modelname)) if hdf5: filenameh = filename + '.hdf5' if os.path.exists(filenameh): os.remove(filenameh) savelayers(maplayers, os.path.join(outfolder, filenameh)) filenames.append(filenameh) if args.make_static_pdfs or args.make_static_pngs: plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayers, conf) mapconfig = ConfigObj(args.mapconfig) kwargs = parseMapConfig( mapconfig, fileext=args.mapdata_filepath) junk, filenames1 = modelMap( maplayers, shakefile, suptitle=conf[modelname]['shortref'], boundaries=None, zthresh=0., lims=lims, plotorder=plotorder, maskthreshes=maskthreshes, maproads=False, mapcities=True, colormaps=colormaps, savepdf=args.make_static_pdfs, savepng=args.make_static_pngs, printparam=True, inventory_shapefile=None, outputdir=outfolder, outfilename=filename, scaletype='continuous', logscale=logscale, **kwargs) for filen in filenames1: filenames.append(filen) # make model only plots too if len(maplayers) > 1: plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayers, conf, keys=['model']) junk, filenames1 = modelMap( maplayers, shakefile, suptitle=conf[modelname]['shortref'], boundaries=None, zthresh=0., lims=lims, plotorder=plotorder, maskthreshes=maskthreshes, maproads=False, mapcities=True, savepdf=args.make_static_pdfs, savepng=args.make_static_pngs, printparam=True, inventory_shapefile=None, outputdir=outfolder, outfilename=filename + '-just_model', colormaps=colormaps, scaletype='continuous', logscale=logscale, **kwargs) for filen in filenames1: filenames.append(filen) if args.make_interactive_plots: plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayers, conf) junk, filenames1 = interactiveMap( maplayers, plotorder=plotorder, shakefile=shakefile, inventory_shapefile=None, maskthreshes=maskthreshes, colormaps=colormaps, isScenario=False, scaletype='continuous', lims=lims, logscale=logscale, ALPHA=0.7, outputdir=outfolder, outfilename=filename, tiletype='Stamen Terrain', separate=True, faultfile=ffault) for filen in filenames1: filenames.append(filen) if gis: for key in maplayers: # Get simplified name of key for file naming RIDOF = '[+-]?(?=\d*[.eE])(?=\.?\d)'\ '\d*\.?\d*(?:[eE][+-]?\d+)?' OPERATORPAT = '[\+\-\*\/]*' keyS = re.sub(OPERATORPAT, '', key) # remove floating point numbers keyS = re.sub(RIDOF, '', keyS) # remove parentheses keyS = re.sub('[()]*', '', keyS) # remove any blank spaces keyS = keyS.replace(' ', '') filen = os.path.join(outfolder, '%s_%s.bil' % (filename, keyS)) fileh = os.path.join(outfolder, '%s_%s.hdr' % (filename, keyS)) fileg = os.path.join(outfolder, '%s_%s.tif' % (filename, keyS)) GDALGrid.copyFromGrid(maplayers[key]['grid']).save(filen) cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff %s %s' % ( filen, fileg) rc, so, se = get_command_output(cmd) # Delete bil file and its header os.remove(filen) os.remove(fileh) filenames.append(fileg) if args.make_webpage: # Compile into list of results for later results.append(maplayers) # Make binary output for ShakeCast filef = os.path.join(outfolder, '%s_model.flt' % filename) # And get name of header filefh = os.path.join(outfolder, '%s_model.hdr' % filename) # Make file write_floats(filef, maplayers['model']['grid']) filenames.append(filef) filenames.append(filefh) if args.make_summary and not args.make_webpage: # Compile into list of results for later results.append(maplayers) eventid = getHeaderData(shakefile)[0]['event_id'] if not hasattr(args, 'eventsource'): args.eventsource = 'us' if not hasattr(args, 'eventsourcecode'): args.eventsourcecode = eventid if args.make_webpage: outputs = hazdev( results, configs, shakefile, outfolder=outfolder, pop_file=args.popfile, pager_alert=args.property_alertlevel, eventsource=args.eventsource, eventsourcecode=args.eventsourcecode) filenames = filenames + outputs if args.make_summary: outputs = GFSummary( results, configs, args.web_template, shakefile, outfolder=outfolder, cleanup=True, faultfile=ffault, point=point, pop_file=args.popfile) filenames = filenames + outputs # # create transparent png file # outputs = create_png(outdir) # filenames = filenames + outputs # # # create info file # infofile = create_info(outdir) # filenames = filenames + infofile print('\nFiles created:\n') for filen in filenames: print('%s' % filen) return filenames
def test_logisticmodel(): modelLQ = { 'TestModelLQ': { 'description': 'This is a test liquefaction model', 'gfetype': 'liquefaction', 'baselayer': 'vs30', 'slopemin': 0., 'slopemax': 5., 'layers': { 'vs30': { 'file': vs30file, 'units': 'm/s', 'longref': 'more words', 'shortref': 'words' }, 'cti1': { 'file': ctifile, 'units': 'unitless', 'longref': 'more words', 'shortref': 'words' } }, 'interpolations': { 'vs30': 'nearest', 'cti1': 'linear' }, 'terms': { 'b1': 'log((pga/100.0)*(power(MW,2.)))', 'b2': 'cti1', 'b3': 'log(vs30)' }, 'coefficients': { 'b0': 15., 'b1': 2., 'b2': 0.3, 'b3': -4. } } } modelLS = { 'TestModelLS': { 'description': 'This is a test landslide model', 'gfetype': 'landslide', 'shortref': 'Jessee', 'baselayer': 'slope', 'slopemin': 5., 'slopemax': 90., 'layers': { 'friction': { 'file': cofile, 'units': 'kPa', 'longref': 'more words', 'shortref': 'words' }, 'slope': { 'file': slopefile, 'units': 'degrees', 'longref': 'more words', 'shortref': 'words' }, 'precip': { 'file': precipfolder, 'units': 'mm', 'longref': 'more words', 'shortref': 'words' } }, 'interpolations': { 'friction': 'linear', 'slope': 'linear', 'precip': 'nearest' }, 'terms': { 'b1': 'pga', 'b2': 'slope', 'b3': 'precipMONTH', 'b4': 'pga*slope*MW' }, 'coefficients': { 'b0': -7., 'b1': 0.06, 'b2': 0.0008, 'b3': 0.02, 'b4': 1.e-05, 'b6': 0.1 } } } ls = LM.LogisticModel(shakefile, modelLS, uncertfile=None, slopefile=slopefile) LS = ls.calculate() #lsu = LM.LogisticModel(shakefile, modelLS, # uncertfile=uncertfile, # slopefile=slopefile) #try: # lsu.getEquations() #except: # raise Exception('LogisticModel.getEquations did not work') #LSU = lsu.calculate() lq = LM.LogisticModel(shakefile, modelLQ, uncertfile=None, saveinputs=True) LQ = lq.calculate() # See if getGeoDict works assert ls.getGeoDict() == fakegeodict targetLS = np.array([[0.61358336819225268, 0.99999969213372109], [0.50746944427265206, 0.010791994705496567]]) #targetLSU = np.array([[0.48852712099785173, 0.99999827441447309], # [0.28923565862849882, 0.0097842502221282737]]) targetLQ = np.array([[0.5803309852347005, 0.27771418649141888], [0.053465704369553384, 0.013015247124965424]]) # Check if results are as expected by manual calculation np.testing.assert_allclose(LS['model']['grid'].getData(), targetLS, rtol=1e-05) # Need to check one of the uncertainties at least #np.testing.assert_allclose(LSU['std']['grid'].getData(), # targetLSU, rtol=1e-05) np.testing.assert_allclose(LQ['model']['grid'].getData(), targetLQ, rtol=1e-05)