def test_parseConfigLayers(): lq = LM.LogisticModel(shakefile, modelLQ, saveinputs=True) maplayers = lq.calculate() utilities.parseConfigLayers(maplayers, config, keys=None) # no lims del config['test_model']['display_options']['lims'] utilities.parseConfigLayers(maplayers, config, keys=None) # no colors del config['test_model']['display_options']['colors'] utilities.parseConfigLayers(maplayers, config, keys=None) # no logscale del config['test_model']['display_options']['logscale'] utilities.parseConfigLayers(maplayers, config, keys=None) # no maskthresholds del config['test_model']['display_options']['maskthresholds'] utilities.parseConfigLayers(maplayers, config, keys=None)
def run_gfail(args): """Runs ground failure. Args: args: dictionary or argument parser Namespace output by bin/gfail program. Returns: list: Names of created files. """ # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST filenames = [] # If args is a dictionary, convert to a Namespace if isinstance(args, dict): args = Namespace(**args) if args.set_default_paths: set_default_paths(args) print('default paths set, continuing...\n') if args.list_default_paths: list_default_paths() return if args.reset_default_paths: reset_default_paths() return if args.make_webpage: # Turn on GIS and HDF5 flags gis = True hdf5 = True kmz = True else: gis = args.gis hdf5 = args.hdf5 kmz = args.kmz # Figure out what models will be run if args.shakefile is not None: # user intends to actually run some models shakefile = args.shakefile # make output location for things if args.output_filepath is None: outdir = os.getcwd() else: outdir = args.output_filepath if hdf5 or gis or kmz: if not os.path.exists(outdir): os.makedirs(outdir) # download if is url # cleanup = False if not os.path.isfile(shakefile): if isURL(shakefile): # getGridURL returns a named temporary file object shakefile = getGridURL(shakefile) # cleanup = True # Be sure to delete it after else: raise NameError('Could not find "%s" as a file or a valid url' % shakefile) eventid = getHeaderData(shakefile)[0]['event_id'] # Get entire path so won't break if running gfail with relative path shakefile = os.path.abspath(shakefile) if args.extract_contents: outfolder = outdir else: # Nest in a folder named by eventid outfolder = os.path.join(outdir, eventid) if not os.path.exists(outfolder): os.makedirs(outfolder) # Copy shake grid into output directory # --- this is base on advice from Mike that when running in production # the shake grids are not archived and so if we need/want to have # the exact grid used for the calculation later if there's every a # question about how the calculation was done, the safest thing is # to store a copy of it here. shake_copy = os.path.join(outfolder, "grid.xml") shutil.copyfile(shakefile, shake_copy) if args.uncertfile is not None: uncertfile = os.path.abspath(args.uncertfile) unc_copy = os.path.join(outfolder, "uncertainty.xml") shutil.copyfile(uncertfile, unc_copy) else: uncertfile = None # Write shakefile to a file for use later shakename = os.path.join(outfolder, "shakefile.txt") shake_file = open(shakename, "wt") shake_file.write(shake_copy) shake_file.close() filenames.append(shakename) # Check that shakemap bounds do not cross 180/-180 line if args.set_bounds is None: sd = ShakeGrid.getFileGeoDict(shakefile) if sd.xmin > sd.xmax: print('\nShakeMap crosses 180/-180 line, setting bounds so ' 'only side with more land area is run') if sd.xmax + 180. > 180 - sd.xmin: set_bounds = '%s, %s, %s, %s' % ( sd.ymin, sd.ymax, -180., sd.xmax) else: set_bounds = '%s, %s, %s, %s' % (sd.ymin, sd.ymax, sd.xmin, 180.) print('Bounds applied: %s' % set_bounds) else: set_bounds = args.set_bounds else: set_bounds = args.set_bounds config = args.config if args.config_filepath is not None: # only add config_filepath if full filepath not given and file # ext is .ini if (not os.path.isabs(config) and os.path.splitext(config)[-1] == '.ini'): config = os.path.join(args.config_filepath, config) if os.path.splitext(config)[-1] == '.ini': temp = ConfigObj(config) if len(temp) == 0: raise Exception( 'Could not find specified .ini file: %s' % config) if args.data_path is not None: temp = correct_config_filepaths(args.data_path, temp) configs = [temp] conffail = [] else: # input is a list of config files f = open(config, 'r') configlist = f.readlines() configs = [] conffail = [] for conf in configlist: conf = conf.strip() if not os.path.isabs(conf): # only add config_filepath if full filepath not given conf = os.path.join(args.config_filepath, conf) try: temp = ConfigObj(conf) if temp: if args.data_path is not None: temp = correct_config_filepaths( args.data_path, temp) configs.append(temp) else: conffail.append(conf) except BaseException: conffail.append(conf) print('\nRunning the following models:') for conf in configs: print('\t%s' % conf.keys()[0]) if len(conffail) > 0: print('Could not find or read in the following config files:\n') for conf in conffail: print('\t%s' % conf) print('\nContinuing...\n') if set_bounds is not None: if 'zoom' in set_bounds: temp = set_bounds.split(',') print('Using %s threshold of %1.1f to cut model bounds' % (temp[1].strip(), float(temp[2].strip()))) bounds = get_bounds(shakefile, temp[1].strip(), float(temp[2].strip())) else: temp = eval(set_bounds) latmin = temp[0] latmax = temp[1] lonmin = temp[2] lonmax = temp[3] bounds = {'xmin': lonmin, 'xmax': lonmax, 'ymin': latmin, 'ymax': latmax} print('Applying bounds of lonmin %1.2f, lonmax %1.2f, ' 'latmin %1.2f, latmax %1.2f' % (bounds['xmin'], bounds['xmax'], bounds['ymin'], bounds['ymax'])) else: bounds = None if args.make_webpage: results = [] # pre-read in ocean trimming file polygons so only do this step once if args.trimfile is not None: if not os.path.exists(args.trimfile): print('trimfile defined does not exist: %s\n' 'Ocean will not be trimmed.' % args.trimfile) trimfile = None elif os.path.splitext(args.trimfile)[1] != '.shp': print('trimfile must be a shapefile, ' 'ocean will not be trimmed') trimfile = None else: trimfile = args.trimfile else: trimfile = None # Get finite fault ready, if exists ffault = None point = True if args.finite_fault is not None: point = False try: if os.path.splitext(args.finite_fault)[-1] == '.txt': ffault = text_to_json(args.finite_fault) elif os.path.splitext(args.finite_fault)[-1] == '.json': ffault = args.finite_fault else: print('Could not read in finite fault, will ' 'try to download from comcat') ffault = None except BaseException: print('Could not read in finite fault, will try to ' 'download from comcat') ffault = None if ffault is None: # Try to get finite fault file, if it exists try: returned_ev = get_event_comcat(shakefile) if returned_ev is not None: testjd, detail, temp = returned_ev evinfo = testjd['input']['event_information'] if 'faultfiles' in evinfo: ffilename = evinfo['faultfiles'] if len(ffilename) > 0: # Download the file with tempfile.NamedTemporaryFile( delete=False, mode='w') as f: temp.getContent(ffilename, filename=f.name) ffault = text_to_json(f.name) os.remove(f.name) point = False else: point = True else: print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False except Exception as e: print(e) print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False # Loop over config files for conf in configs: modelname = conf.keys()[0] print('\nNow running %s:' % modelname) notcov, newbnds = check_input_extents( conf, shakefile=shakefile, bounds=bounds ) if len(notcov) > 0: print('\nThe following input layers do not cover' ' the area of interest:\n\t%s' % '\n\t'.join(notcov)) if newbnds is None: print('\nCannnot make bounds that work. ' 'Skipping to next model\n') continue else: pnt = '%s, %s, %s, %s' % ( newbnds['xmin'], newbnds['xmax'], newbnds['ymin'], newbnds['ymax']) print('Running model for new bounds that are fully covered' ' by input layer: %s' % pnt) bounds2 = newbnds else: bounds2 = bounds modelfunc = conf[modelname]['funcname'] if modelfunc == 'LogisticModel': lm = LM.LogisticModel(shakefile, conf, uncertfile=uncertfile, saveinputs=args.save_inputs, bounds=bounds2, trimfile=trimfile) maplayers = lm.calculate() elif modelfunc == 'godt2008': maplayers = godt2008(shakefile, conf, uncertfile=uncertfile, saveinputs=args.save_inputs, bounds=bounds2, trimfile=trimfile) else: print('Unknown model function specified in config for %s ' 'model, skipping to next config' % modelfunc) continue # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M') # filename = ('%s_%s_%s' % (eventid, modelname, time1)) if args.appendname is not None: filename = ('%s_%s_%s' % (eventid, modelname, args.appendname)) else: filename = ('%s_%s' % (eventid, modelname)) if hdf5: filenameh = filename + '.hdf5' if os.path.exists(filenameh): os.remove(filenameh) savelayers(maplayers, os.path.join(outfolder, filenameh)) filenames.append(filenameh) if gis or kmz: for key in maplayers: # Rename 'std' key to 'beta_sigma' if key == 'std': key_label = 'beta_sigma' else: key_label = key if gis: filen = os.path.join(outfolder, '%s_%s.bil' % (filename, key_label)) fileh = os.path.join(outfolder, '%s_%s.hdr' % (filename, key_label)) fileg = os.path.join(outfolder, '%s_%s.tif' % (filename, key_label)) GDALGrid.copyFromGrid( maplayers[key]['grid']).save(filen) cflags = '-co COMPRESS=DEFLATE -co predictor=2' srs = '-a_srs EPSG:4326' cmd = 'gdal_translate %s %s -of GTiff %s %s' % ( srs, cflags, filen, fileg) rc, so, se = get_command_output(cmd) # Delete bil file and its header os.remove(filen) os.remove(fileh) filenames.append(fileg) if kmz and (not key.startswith('quantile') and not key.startswith('std')) : plotorder, logscale, lims, colormaps, maskthresh = \ parseConfigLayers(maplayers, conf, keys=['model']) maxprob = np.nanmax(maplayers[key]['grid'].getData()) if key == 'model': qdict = { k: maplayers[k] for k in maplayers.keys() if k.startswith('quantile') } else: qdict = None if maskthresh is None: maskthresh = [0.] if maxprob >= maskthresh[0]: filen = os.path.join(outfolder, '%s_%s.kmz' % (filename, key_label)) filek = create_kmz(maplayers[key], filen, mask=maskthresh[0], levels=lims[0], qdict=qdict) filenames.append(filek) else: print('No unmasked pixels present, skipping kmz ' 'file creation') if args.make_webpage: # Compile into list of results for later results.append(maplayers) # # Make binary output for ShakeCast # filef = os.path.join(outfolder, '%s_model.flt' # % filename) # # And get name of header # filefh = os.path.join(outfolder, '%s_model.hdr' # % filename) # # Make file # write_floats(filef, maplayers['model']['grid']) # filenames.append(filef) # filenames.append(filefh) eventid = getHeaderData(shakefile)[0]['event_id'] if not hasattr(args, 'eventsource'): args.eventsource = 'us' if not hasattr(args, 'eventsourcecode'): args.eventsourcecode = eventid if args.make_webpage: if len(results) == 0: raise Exception('No models were run. Cannot make webpages.') outputs = hazdev( results, configs, shakefile, outfolder=outfolder, pop_file=args.popfile, pager_alert=args.property_alertlevel, eventsource=args.eventsource, eventsourcecode=args.eventsourcecode, point=point, gf_version=args.gf_version, pdlcall=args.pdlcall) filenames = filenames + outputs # # create transparent png file # outputs = create_png(outdir) # filenames = filenames + outputs # # # create info file # infofile = create_info(outdir) # filenames = filenames + infofile print('\nFiles created:\n') for filen in filenames: print('%s' % filen) return filenames
def hazdev(maplayerlist, configs, shakemap, outfolder=None, alpha=0.7, shakethreshtype='pga', probthresh=None, shakethresh=10., prefLS='Nowicki Jessee and others (2017)', prefLQ='Zhu and others (2017)', pop_file=None, defaultcolors=True, point=True, pager_alert='', eventsource='', eventsourcecode=''): """Create all files needed for product page creation Assumes gfail has been run already with -w flag Args: maplayerlist (list): List of model outputs from gfail. configs (list): List of dictionaries of config files corresponding to each model in maplayerlist and in the same order. shakemap (str): path to shakemap .xml file. outfolder (str): Location in which to save outputs. If None, will use current directory. alpha (float): Transparency to use for overlay pngs, value from 0 to 1. shakethreshtype (str): Type of ground motion to use for shakethresh, 'pga', 'pgv', or 'mmi'. probthresh: Optional. Float or list of probability thresholds to apply before computing stats. shakethresh: Float or list of shaking thresholds in %g for pga, cm/s for pgv, float for mmi. Used for Hagg and Exposure computation. prefLS (str): shortref of "preferred" landslide model. prefLQ (str): shortref of "preferred" liquefaction model. pop_filt (str): file path to population file used to compute population-based alert levels. defaultcolors (bool): If True, will use DFCOLORS for all layers instead of determining new ones. This will crash if any of the layers have a different number of bins than the number of DFCOLORS point (bool): if True, event is a point source and warning should be displayed pager_alert (str): PAGER alert level, e.g., 'green'. 'pending', ... Returns: Files that need to be sent to comcat for hazdev to create the product webpage including: - info.json - transparent png overlays of all models """ event_id = maplayerlist[0]['model']['description']['event_id'] if pop_file is None: # Read in default paths to get location of the population grid default_file = os.path.join(os.path.expanduser('~'), '.gfail_defaults') defaults = ConfigObj(default_file) pop_file = defaults['popfile'] if outfolder is None: outfolder = os.path.join(os.getcwd(), event_id) filenames = [] # Separate the LS and LQ models concLS = [] concLQ = [] lsmodels = [] lqmodels = [] logLS = [] limLS = [] colLS = [] logLQ = [] limLQ = [] colLQ = [] for conf, maplayer in zip(configs, maplayerlist): mdict = maplayer['model']['description'] # config = ConfigObj(conf) if 'landslide' in mdict['parameters']['modeltype'].lower(): title = maplayer['model']['description']['name'] plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayer, conf, keys=['model']) logLS.append(logscale[0]) limLS.append(lims[0]) colLS.append(colormaps[0]) concLS.append(title) if 'godt' in maplayer['model']['description']['name'].lower(): statprobthresh = None id1 = 'godt_2008' maxP = 1. else: # Since logistic models can't equal one, need to eliminate # placeholder zeros before computing stats if 'jessee' in maplayer['model']['description']['name'].lower( ): id1 = 'jessee_2017' statprobthresh = 0.002 maxP = 0.26 else: id1 = 'nowicki_2014_global' statprobthresh = 0.0 maxP = 1. if 'std' in list(maplayer.keys()): stdgrid2D = maplayer['std']['grid'] else: stdgrid2D = None stats = computeStats(maplayer['model']['grid'], stdgrid2D=stdgrid2D, probthresh=probthresh, shakefile=shakemap, shakethresh=shakethresh, statprobthresh=statprobthresh, pop_file=pop_file, shakethreshtype=shakethreshtype, stdtype='mean', maxP=maxP) metadata = maplayer['model']['description'] if len(maplayer) > 1: inputs = {} inkeys = list(maplayer.keys()) for key in inkeys: if key != 'model': newkey = maplayer[key]['label'] inputs[newkey] = maplayer[key]['description'] metadata['inputs'] = inputs if title == prefLS: on = True ls_haz_alert, ls_pop_alert, _, _, ls_alert, _ = get_alert( stats['hagg_0.10g'], 0., stats['exp_pop_0.10g'], 0.) lsext = get_zoomextent(maplayer['model']['grid']) ls_haz_value = set_num_precision(stats['hagg_0.10g'], 2, 'float') ls_pop_value = set_num_precision(stats['exp_pop_0.10g'], 2, 'int') else: on = False ls_haz_alert = None ls_pop_alert = None ls_haz_value = None ls_pop_value = None ls_alert = None lsext = None ls_haz_std = None ls_pop_std = None ls_hlim = None ls_elim = None ls_hp = None ls_hq = None ls_ep = None ls_eq = None if stdgrid2D is not None and title == prefLS: ls_haz_std = float("%.4f" % stats['hagg_std_0.10g']) ls_pop_std = float("%.4f" % stats['exp_std_0.10g']) ls_hlim = float("%.4f" % stats['hlim_0.10g']) ls_elim = float("%.4f" % stats['elim_0.10g']) ls_hp = float("%.4f" % stats['p_hagg_0.10g']) ls_hq = float("%.4f" % stats['q_hagg_0.10g']) ls_ep = float("%.4f" % stats['p_exp_0.10g']) ls_eq = float("%.4f" % stats['q_exp_0.10g']) edict = { 'id': id1, 'title': metadata['name'], 'overlay': '%s.png' % id1, 'extent': '%s_extent.json' % id1, 'units': metadata['units'], 'preferred': on, 'alert': ls_alert, 'hazard_alert': { 'color': ls_haz_alert, 'value': ls_haz_value, 'std': ls_haz_std, 'parameter': 'Aggregate Hazard', 'units': 'km^2' }, 'population_alert': { 'color': ls_pop_alert, 'value': ls_pop_value, 'std': ls_pop_std, 'parameter': 'Population exposure', 'units': 'people' }, 'bin_edges': list(lims[0]), 'probability': { 'max': float("%.2f" % stats['Max']), 'std': float("%.2f" % stats['Std']), 'hagg0.1g': float("%.2f" % stats['hagg_0.10g']), 'popexp0.1g': float("%.2f" % stats['exp_pop_0.10g'], ), 'hagg0.1g_std': ls_haz_std, 'popexp0.1g_std': ls_pop_std, 'hlim0.1g': ls_hlim, 'elim0.1g': ls_elim, 'p_hagg': ls_hp, 'q_hagg': ls_hq, 'p_exp': ls_ep, 'q_exp': ls_eq }, 'longref': metadata['longref'], 'parameters': metadata['parameters'], 'zoomext': lsext } lsmodels.append(edict) elif 'liquefaction' in mdict['parameters']['modeltype'].lower(): title = maplayer['model']['description']['name'] plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayer, conf, keys=['model']) logLQ.append(logscale[0]) limLQ.append(lims[0]) colLQ.append(colormaps[0]) concLQ.append(title) if '2015' in maplayer['model']['description']['name'].lower(): id1 = 'zhu_2015' statprobthresh = 0.0 maxP = 1. elif '2017' in maplayer['model']['description']['name'].lower(): id1 = 'zhu_2017_general' statprobthresh = 0.005 maxP = 0.487 if 'std' in list(maplayer.keys()): stdgrid2D = maplayer['std']['grid'] else: stdgrid2D = None stats = computeStats(maplayer['model']['grid'], stdgrid2D=stdgrid2D, probthresh=probthresh, shakefile=shakemap, shakethresh=shakethresh, pop_file=pop_file, shakethreshtype=shakethreshtype, statprobthresh=statprobthresh, stdtype='mean', maxP=maxP) metadata = maplayer['model']['description'] if len(maplayer) > 1: inputs = {} inkeys = list(maplayer.keys()) for key in inkeys: if key != 'model': newkey = maplayer[key]['label'] inputs[newkey] = maplayer[key]['description'] metadata['inputs'] = inputs if title == prefLQ: on = True _, _, lq_haz_alert, lq_pop_alert, _, lq_alert = get_alert( 0., stats['hagg_0.10g'], 0., stats['exp_pop_0.10g']) lqext = get_zoomextent(maplayer['model']['grid']) lq_haz_value = set_num_precision(stats['hagg_0.10g'], 2, 'float') lq_pop_value = set_num_precision(stats['exp_pop_0.10g'], 2, 'int') else: on = False lq_haz_alert = None lq_pop_alert = None lq_haz_value = None lq_pop_value = None lq_alert = None lqext = None lq_haz_std = None lq_pop_std = None lq_hlim = None lq_elim = None lq_hp = None lq_hq = None lq_ep = None lq_eq = None if stdgrid2D is not None and title == prefLQ: lq_haz_std = float("%.2f" % stats['hagg_std_0.10g']) lq_pop_std = float("%.2f" % stats['exp_std_0.10g']) lq_hlim = float("%.4f" % stats['hlim_0.10g']) lq_elim = float("%.4f" % stats['elim_0.10g']) lq_hp = float("%.4f" % stats['p_hagg_0.10g']) lq_hq = float("%.4f" % stats['q_hagg_0.10g']) lq_ep = float("%.4f" % stats['p_exp_0.10g']) lq_eq = float("%.4f" % stats['q_exp_0.10g']) edict = { 'id': id1, 'title': metadata['name'], 'overlay': '%s.png' % id1, 'extent': '%s_extent.json' % id1, 'units': metadata['units'], 'preferred': on, 'alert': lq_alert, 'hazard_alert': { 'color': lq_haz_alert, 'value': lq_haz_value, 'std': lq_haz_std, 'parameter': 'Aggregate Hazard', 'units': 'km^2' }, 'population_alert': { 'color': lq_pop_alert, 'value': lq_pop_value, 'std': lq_pop_std, 'parameter': 'Population exposure', 'units': 'people' }, 'bin_edges': list(lims[0]), 'probability': { 'max': float("%.2f" % stats['Max']), 'std': float("%.2f" % stats['Std']), 'hagg0.1g': float("%.2f" % stats['hagg_0.10g']), 'popexp0.1g': float("%.2f" % stats['exp_pop_0.10g']), 'hagg0.1g_std': lq_haz_std, 'popexp0.1g_std': lq_pop_std, 'hlim0.1g': lq_hlim, 'elim0.1g': lq_elim, 'p_hagg': lq_hp, 'q_hagg': lq_hq, 'p_exp': lq_ep, 'q_exp': lq_eq }, 'longref': metadata['longref'], 'parameters': metadata['parameters'], 'zoomext': lqext } lqmodels.append(edict) else: raise Exception("model type is undefined, check " "maplayer['model']['parameters']" "['modeltype'] to ensure it is defined") if defaultcolors: for ls in lsmodels: ls['bin_colors'] = DFCOLORS for lq in lqmodels: lq['bin_colors'] = DFCOLORS else: defaultcolormap = cm.CMRmap_r # Get colors and stuff into dictionaries sync, colorlistLS, reflims = setupsync(prefLS, concLS, limLS, colLS, defaultcolormap, logscale=logLS, alpha=alpha) if reflims is None: raise Exception('Check input config files, they must all have the ' 'same number of bin edges') else: # Stuff colors into dictionary for ls in lsmodels: ls['bin_colors'] = list(colorlistLS) sync, colorlistLQ, reflims = setupsync(prefLQ, concLQ, limLQ, colLQ, defaultcolormap, logscale=logLQ, alpha=alpha) if reflims is None: raise Exception('Check input config files, they must all have the ' 'same number of bin edges') else: # Stuff colors into dictionary for lq in lqmodels: lq['bin_colors'] = list(colorlistLQ) # Create pngs pngfiles = create_png(outfolder, lsmodels, lqmodels) filenames.append(pngfiles) # If PAGER alert is pending, overwrite our alerts if pager_alert == 'pending': for ls in lsmodels: ls['alert'] = 'pending' ls['hazard_alert']['color'] = 'pending' ls['population_alert']['color'] = 'pending' for lq in lqmodels: lq['alert'] = 'pending' lq['hazard_alert']['color'] = 'pending' lq['population_alert']['color'] = 'pending' # Create info.json infojson = create_info(outfolder, lsmodels, lqmodels, eventsource, eventsourcecode, point) filenames.append(infojson) return filenames
def run_gfail(args): """Runs ground failure. Args: args: dictionary or argument parser Namespace output by bin/gfail program. Returns: list: Names of created files. """ # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST filenames = [] # If args is a dictionary, convert to a Namespace if isinstance(args, dict): args = Namespace(**args) if args.set_default_paths: set_default_paths(args) print('default paths set, continuing...\n') if args.list_default_paths: list_default_paths() return if args.reset_default_paths: reset_default_paths() return if args.make_webpage: # Turn on GIS and HDF5 flags gis = True hdf5 = True else: gis = args.gis hdf5 = args.hdf5 # Figure out what models will be run if args.shakefile is not None: # user intends to actually run some models shakefile = args.shakefile # make output location for things if args.output_filepath is None: outdir = os.getcwd() else: outdir = args.output_filepath if (hdf5 or args.make_static_pngs or args.make_static_pdfs or args.make_interactive_plots or gis): if not os.path.exists(outdir): os.makedirs(outdir) # download if is url # cleanup = False if not os.path.isfile(shakefile): if isURL(shakefile): # getGridURL returns a named temporary file object shakefile = getGridURL(shakefile) # cleanup = True # Be sure to delete it after else: raise NameError('Could not find "%s" as a file or a valid url' % (shakefile)) eventid = getHeaderData(shakefile)[0]['event_id'] # Get entire path so won't break if running gfail with relative path shakefile = os.path.abspath(shakefile) if args.extract_contents: outfolder = outdir else: # Nest in a folder named by eventid outfolder = os.path.join(outdir, eventid) if not os.path.exists(outfolder): os.makedirs(outfolder) # Copy shake grid into output directory # --- this is base on advice from Mike that when running in production # the shake grids are not archived and so if we need/want to have # the exact grid used for the calculation later if there's every a # question about how the calculation was done, the safest thing is # to store a copy of it here. shake_copy = os.path.join(outfolder, "grid.xml") shutil.copyfile(shakefile, shake_copy) # Write shakefile to a file for use later shakename = os.path.join(outfolder, "shakefile.txt") shake_file = open(shakename, "wt") shake_file.write(shake_copy) shake_file.close() filenames.append(shakename) config = args.config if args.config_filepath is not None: # only add config_filepath if full filepath not given and file # ext is .ini if (not os.path.isabs(config) and os.path.splitext(config)[-1] == '.ini'): config = os.path.join(args.config_filepath, config) if os.path.splitext(config)[-1] == '.ini': temp = ConfigObj(config) if len(temp) == 0: raise Exception( 'Could not find specified .ini file: %s' % config) if args.data_path is not None: temp = correct_config_filepaths(args.data_path, temp) configs = [temp] conffail = [] else: # input is a list of config files f = open(config, 'r') configlist = f.readlines() configs = [] conffail = [] for conf in configlist: conf = conf.strip() if not os.path.isabs(conf): # only add config_filepath if full filepath not given conf = os.path.join(args.config_filepath, conf) try: temp = ConfigObj(conf) if temp: if args.data_path is not None: temp = correct_config_filepaths( args.data_path, temp) configs.append(temp) else: conffail.append(conf) except: conffail.append(conf) print('\nRunning the following models:') for conf in configs: print('\t%s' % conf.keys()[0]) if len(conffail) > 0: print('Could not find or read in the following config files:\n') for conf in conffail: print('\t%s' % conf) print('\nContinuing...\n') if args.set_bounds is not None: if 'zoom' in args.set_bounds: temp = args.set_bounds.split(',') print('Using %s threshold of %1.1f to cut model bounds' % (temp[1].strip(), float(temp[2].strip()))) bounds = get_bounds(shakefile, temp[1].strip(), float(temp[2].strip())) else: temp = eval(args.set_bounds) latmin = temp[0] latmax = temp[1] lonmin = temp[2] lonmax = temp[3] bounds = {'xmin': lonmin, 'xmax': lonmax, 'ymin': latmin, 'ymax': latmax} print('Applying bounds of lonmin %1.2f, lonmax %1.2f, ' 'latmin %1.2f, latmax %1.2f' % (bounds['xmin'], bounds['xmax'], bounds['ymin'], bounds['ymax'])) else: bounds = None if args.make_webpage or args.make_summary: results = [] # pre-read in ocean trimming file polygons so only do this step once if args.trimfile is not None: if not os.path.exists(args.trimfile): print('trimfile defined does not exist: %s\n' 'Ocean will not be trimmed.' % args.trimfile) trimfile = None elif os.path.splitext(args.trimfile)[1] != '.shp': print('trimfile must be a shapefile, ' 'ocean will not be trimmed') trimfile = None else: trimfile = args.trimfile else: trimfile = None # Get finite fault ready, if exists ffault = None point = True if args.finite_fault is not None: point = False try: if os.path.splitext(args.finite_fault)[-1] == '.txt': ffault = text_to_json(args.finite_fault) elif os.path.splitext(args.finite_fault)[-1] == '.json': ffault = args.finite_fault else: print('Could not read in finite fault, will ' 'try to download from comcat') ffault = None except: print('Could not read in finite fault, will try to ' 'download from comcat') ffault = None if ffault is None: # Try to get finite fault file, if it exists try: returned_ev = get_event_comcat(shakefile) if returned_ev is not None: testjd, detail, temp = returned_ev if 'faultfiles' in testjd['input']['event_information']: ffilename = testjd['input']['event_information']['faultfiles'] if len(ffilename) > 0: # Download the file with tempfile.NamedTemporaryFile(delete=False, mode='w') as f: temp.getContent(ffilename, filename=f.name) ffault = text_to_json(f.name) os.remove(f.name) point = False else: point = True else: print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False except Exception as e: print(e) print('Unable to determine source type, unknown if finite' ' fault or point source') ffault = None point = False # Loop over config files for conf in configs: modelname = conf.keys()[0] print('\nNow running %s:' % modelname) modelfunc = conf[modelname]['funcname'] if modelfunc == 'LogisticModel': lm = LM.LogisticModel(shakefile, conf, uncertfile=args.uncertfile, saveinputs=args.save_inputs, bounds=bounds, numstd=float(args.std), trimfile=trimfile) maplayers = lm.calculate() elif modelfunc == 'godt2008': maplayers = godt2008(shakefile, conf, uncertfile=args.uncertfile, saveinputs=args.save_inputs, bounds=bounds, numstd=float(args.std), trimfile=trimfile) else: print('Unknown model function specified in config for %s ' 'model, skipping to next config' % modelfunc) continue # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M') # filename = ('%s_%s_%s' % (eventid, modelname, time1)) if args.appendname is not None: filename = ('%s_%s_%s' % (eventid, modelname, args.appendname)) else: filename = ('%s_%s' % (eventid, modelname)) if hdf5: filenameh = filename + '.hdf5' if os.path.exists(filenameh): os.remove(filenameh) savelayers(maplayers, os.path.join(outfolder, filenameh)) filenames.append(filenameh) if args.make_static_pdfs or args.make_static_pngs: plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayers, conf) mapconfig = ConfigObj(args.mapconfig) kwargs = parseMapConfig( mapconfig, fileext=args.mapdata_filepath) junk, filenames1 = modelMap( maplayers, shakefile, suptitle=conf[modelname]['shortref'], boundaries=None, zthresh=0., lims=lims, plotorder=plotorder, maskthreshes=maskthreshes, maproads=False, mapcities=True, colormaps=colormaps, savepdf=args.make_static_pdfs, savepng=args.make_static_pngs, printparam=True, inventory_shapefile=None, outputdir=outfolder, outfilename=filename, scaletype='continuous', logscale=logscale, **kwargs) for filen in filenames1: filenames.append(filen) # make model only plots too if len(maplayers) > 1: plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayers, conf, keys=['model']) junk, filenames1 = modelMap( maplayers, shakefile, suptitle=conf[modelname]['shortref'], boundaries=None, zthresh=0., lims=lims, plotorder=plotorder, maskthreshes=maskthreshes, maproads=False, mapcities=True, savepdf=args.make_static_pdfs, savepng=args.make_static_pngs, printparam=True, inventory_shapefile=None, outputdir=outfolder, outfilename=filename + '-just_model', colormaps=colormaps, scaletype='continuous', logscale=logscale, **kwargs) for filen in filenames1: filenames.append(filen) if args.make_interactive_plots: plotorder, logscale, lims, colormaps, maskthreshes = \ parseConfigLayers(maplayers, conf) junk, filenames1 = interactiveMap( maplayers, plotorder=plotorder, shakefile=shakefile, inventory_shapefile=None, maskthreshes=maskthreshes, colormaps=colormaps, isScenario=False, scaletype='continuous', lims=lims, logscale=logscale, ALPHA=0.7, outputdir=outfolder, outfilename=filename, tiletype='Stamen Terrain', separate=True, faultfile=ffault) for filen in filenames1: filenames.append(filen) if gis: for key in maplayers: # Get simplified name of key for file naming RIDOF = '[+-]?(?=\d*[.eE])(?=\.?\d)'\ '\d*\.?\d*(?:[eE][+-]?\d+)?' OPERATORPAT = '[\+\-\*\/]*' keyS = re.sub(OPERATORPAT, '', key) # remove floating point numbers keyS = re.sub(RIDOF, '', keyS) # remove parentheses keyS = re.sub('[()]*', '', keyS) # remove any blank spaces keyS = keyS.replace(' ', '') filen = os.path.join(outfolder, '%s_%s.bil' % (filename, keyS)) fileh = os.path.join(outfolder, '%s_%s.hdr' % (filename, keyS)) fileg = os.path.join(outfolder, '%s_%s.tif' % (filename, keyS)) GDALGrid.copyFromGrid(maplayers[key]['grid']).save(filen) cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff %s %s' % ( filen, fileg) rc, so, se = get_command_output(cmd) # Delete bil file and its header os.remove(filen) os.remove(fileh) filenames.append(fileg) if args.make_webpage: # Compile into list of results for later results.append(maplayers) # Make binary output for ShakeCast filef = os.path.join(outfolder, '%s_model.flt' % filename) # And get name of header filefh = os.path.join(outfolder, '%s_model.hdr' % filename) # Make file write_floats(filef, maplayers['model']['grid']) filenames.append(filef) filenames.append(filefh) if args.make_summary and not args.make_webpage: # Compile into list of results for later results.append(maplayers) eventid = getHeaderData(shakefile)[0]['event_id'] if not hasattr(args, 'eventsource'): args.eventsource = 'us' if not hasattr(args, 'eventsourcecode'): args.eventsourcecode = eventid if args.make_webpage: outputs = hazdev( results, configs, shakefile, outfolder=outfolder, pop_file=args.popfile, pager_alert=args.property_alertlevel, eventsource=args.eventsource, eventsourcecode=args.eventsourcecode) filenames = filenames + outputs if args.make_summary: outputs = GFSummary( results, configs, args.web_template, shakefile, outfolder=outfolder, cleanup=True, faultfile=ffault, point=point, pop_file=args.popfile) filenames = filenames + outputs # # create transparent png file # outputs = create_png(outdir) # filenames = filenames + outputs # # # create info file # infofile = create_info(outdir) # filenames = filenames + infofile print('\nFiles created:\n') for filen in filenames: print('%s' % filen) return filenames