예제 #1
0
def hazus_liq(shakefile,
              config,
              uncertfile=None,
              saveinputs=False,
              modeltype=None,
              displmodel=None,
              probtype=None,
              bounds=None):
    """
    Method for computing the probability of liquefaction using the Hazus method
    using the Wills et al. (2015) Vs30 map of California to define the
    susceptibility classes and the Fan et al. global water table model. 
    """
    layers = config['hazus_liq_cal']['layers']
    vs30_file = layers['vs30']['file']
    wtd_file = layers['watertable']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile)
    fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0]

    #---------------------------------------------------------------------------
    # Loading
    #---------------------------------------------------------------------------
    shakemap = ShakeGrid.load(shakefile,
                              fgeodict,
                              resample=True,
                              method='linear',
                              doPadding=True)
    PGA = shakemap.getLayer('pga').getData() / 100  # convert to g
    griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
        shakefile)
    mag = eventdict['magnitude']

    # Correction factor for moment magnitudes other than M=7.5
    k_m = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188

    #---------------------------------------------------------------------------
    # Susceptibility from Vs30
    #---------------------------------------------------------------------------
    vs30_grid = GMTGrid.load(vs30_file)

    vs30 = vs30_grid.getData()
    p_ml = np.zeros_like(vs30)
    a = np.zeros_like(vs30)
    b = np.zeros_like(vs30)
    for k, v in config['hazus_liq_cal']['parameters'].items():
        ind = np.where(vs30 == float(v[0]))
        if v[1] == "VH":
            p_ml[ind] = 0.25
            a[ind] = 9.09
            b[ind] = -0.82
        if v[1] == "H":
            p_ml[ind] = 0.2
            a[ind] = 7.67
            b[ind] = -0.92
        if v[1] == "M":
            p_ml[ind] = 0.1
            a[ind] = 6.67
            b[ind] = -1.0
        if v[1] == "L":
            p_ml[ind] = 0.05
            a[ind] = 5.57
            b[ind] = -1.18
        if v[1] == "VL":
            p_ml[ind] = 0.02
            a[ind] = 4.16
            b[ind] = -1.08

    # Conditional liquefaction probability for a given susceptibility category
    # at a specified PGA
    p_liq_pga = a * PGA + b
    p_liq_pga = p_liq_pga.clip(min=0, max=1)

    #---------------------------------------------------------------------------
    # Water table
    #---------------------------------------------------------------------------
    wtd_grid = GMTGrid.load(wtd_file,
                            fgeodict,
                            resample=True,
                            method=layers['watertable']['interpolation'],
                            doPadding=True)
    tmp = wtd_grid._data
    tmp = np.nan_to_num(tmp)

    # Convert to ft
    wt_ft = tmp * 3.28084

    # Correction factor for groundwater depths other than five feet
    k_w = 0.022 * wt_ft + 0.93

    #---------------------------------------------------------------------------
    # Combine to get conditional liquefaction probability
    #---------------------------------------------------------------------------
    p_liq_sc = p_liq_pga * p_ml / k_m / k_w

    #---------------------------------------------------------------------------
    # Turn output and inputs into into grids and put in maplayers dictionary
    #---------------------------------------------------------------------------
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
    modelsref = config['hazus_liq_cal']['shortref']
    modellref = config['hazus_liq_cal']['longref']
    modeltype = 'Hazus/Wills'
    maplayers['model'] = {
        'grid': GDALGrid(p_liq_sc, fgeodict),
        'label': 'Probability',
        'type': 'output',
        'description': {
            'name': modelsref,
            'longref': modellref,
            'units': 'coverage',
            'shakemap': shakedetail,
            'parameters': {
                'modeltype': modeltype
            }
        }
    }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA, fgeodict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        maplayers['vs30'] = {
            'grid': GDALGrid(vs30, fgeodict),
            'label': 'Vs30 (m/s)',
            'type': 'input',
            'description': {
                'units': 'm/s'
            }
        }
        maplayers['wtd'] = {
            'grid': GDALGrid(wtd_grid._data, fgeodict),
            'label': 'wtd (m)',
            'type': 'input',
            'description': {
                'units': 'm'
            }
        }
    return maplayers
예제 #2
0
def run_gfail(args):
    """Runs ground failure.

    Args:
        args: dictionary or argument parser Namespace output by bin/gfail
            program.

    Returns:
        list: Names of created files.

    """
    # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST
    filenames = []
    # If args is a dictionary, convert to a Namespace
    if isinstance(args, dict):
        args = Namespace(**args)

    if args.set_default_paths:
        set_default_paths(args)
        print('default paths set, continuing...\n')

    if args.list_default_paths:
        list_default_paths()
        return

    if args.reset_default_paths:
        reset_default_paths()
        return

    if args.make_webpage:
        # Turn on GIS and HDF5 flags
        gis = True
        hdf5 = True
        kmz = True
    else:
        gis = args.gis
        hdf5 = args.hdf5
        kmz = args.kmz

    # Figure out what models will be run
    if args.shakefile is not None:  # user intends to actually run some models
        shakefile = args.shakefile

        # make output location for things
        if args.output_filepath is None:
            outdir = os.getcwd()
        else:
            outdir = args.output_filepath

        if hdf5 or gis or kmz:
            if not os.path.exists(outdir):
                os.makedirs(outdir)

        # download if is url
        # cleanup = False
        if not os.path.isfile(shakefile):
            if isURL(shakefile):
                # getGridURL returns a named temporary file object
                shakefile = getGridURL(shakefile)
                # cleanup = True  # Be sure to delete it after
            else:
                raise NameError('Could not find "%s" as a file or a valid url'
                                % shakefile)
        eventid = getHeaderData(shakefile)[0]['event_id']

        # Get entire path so won't break if running gfail with relative path
        shakefile = os.path.abspath(shakefile)

        if args.extract_contents:
            outfolder = outdir
        else:  # Nest in a folder named by eventid
            outfolder = os.path.join(outdir, eventid)
            if not os.path.exists(outfolder):
                os.makedirs(outfolder)

        # Copy shake grid into output directory
        # --- this is base on advice from Mike that when running in production
        #     the shake grids are not archived and so if we need/want to have
        #     the exact grid used for the calculation later if there's every a
        #     question about how the calculation was done, the safest thing is
        #     to store a copy of it here.
        shake_copy = os.path.join(outfolder, "grid.xml")
        shutil.copyfile(shakefile, shake_copy)

        if args.uncertfile is not None:
            uncertfile = os.path.abspath(args.uncertfile)
            unc_copy = os.path.join(outfolder, "uncertainty.xml")
            shutil.copyfile(uncertfile, unc_copy)
        else:
            uncertfile = None

        # Write shakefile to a file for use later
        shakename = os.path.join(outfolder, "shakefile.txt")
        shake_file = open(shakename, "wt")
        shake_file.write(shake_copy)
        shake_file.close()
        filenames.append(shakename)

        # Check that shakemap bounds do not cross 180/-180 line

        if args.set_bounds is None:
            sd = ShakeGrid.getFileGeoDict(shakefile)
            if sd.xmin > sd.xmax:
                print('\nShakeMap crosses 180/-180 line, setting bounds so '
                      'only side with more land area is run')
                if sd.xmax + 180. > 180 - sd.xmin:
                    set_bounds = '%s, %s, %s, %s' % (
                        sd.ymin, sd.ymax, -180., sd.xmax)
                else:
                    set_bounds = '%s, %s, %s, %s' % (sd.ymin, sd.ymax, sd.xmin,
                                                     180.)
                print('Bounds applied: %s' % set_bounds)
            else:
                set_bounds = args.set_bounds
        else:
            set_bounds = args.set_bounds

        config = args.config

        if args.config_filepath is not None:
            # only add config_filepath if full filepath not given and file
            # ext is .ini
            if (not os.path.isabs(config) and
                    os.path.splitext(config)[-1] == '.ini'):
                config = os.path.join(args.config_filepath, config)

        if os.path.splitext(config)[-1] == '.ini':
            temp = ConfigObj(config)
            if len(temp) == 0:
                raise Exception(
                    'Could not find specified .ini file: %s' % config)
            if args.data_path is not None:
                temp = correct_config_filepaths(args.data_path, temp)
            configs = [temp]
            conffail = []
        else:
            # input is a list of config files
            f = open(config, 'r')
            configlist = f.readlines()
            configs = []
            conffail = []
            for conf in configlist:
                conf = conf.strip()
                if not os.path.isabs(conf):
                    # only add config_filepath if full filepath not given
                    conf = os.path.join(args.config_filepath, conf)
                try:
                    temp = ConfigObj(conf)
                    if temp:
                        if args.data_path is not None:
                            temp = correct_config_filepaths(
                                args.data_path, temp)
                        configs.append(temp)
                    else:
                        conffail.append(conf)
                except BaseException:
                    conffail.append(conf)

        print('\nRunning the following models:')

        for conf in configs:
            print('\t%s' % conf.keys()[0])
        if len(conffail) > 0:
            print('Could not find or read in the following config files:\n')
            for conf in conffail:
                print('\t%s' % conf)
            print('\nContinuing...\n')

        if set_bounds is not None:
            if 'zoom' in set_bounds:
                temp = set_bounds.split(',')
                print('Using %s threshold of %1.1f to cut model bounds'
                      % (temp[1].strip(), float(temp[2].strip())))
                bounds = get_bounds(shakefile, temp[1].strip(),
                                    float(temp[2].strip()))
            else:
                temp = eval(set_bounds)
                latmin = temp[0]
                latmax = temp[1]
                lonmin = temp[2]
                lonmax = temp[3]
                bounds = {'xmin': lonmin, 'xmax': lonmax,
                          'ymin': latmin, 'ymax': latmax}
            print('Applying bounds of lonmin %1.2f, lonmax %1.2f, '
                  'latmin %1.2f, latmax %1.2f'
                  % (bounds['xmin'], bounds['xmax'],
                     bounds['ymin'], bounds['ymax']))
        else:
            bounds = None

        if args.make_webpage:
            results = []

        # pre-read in ocean trimming file polygons so only do this step once
        if args.trimfile is not None:
            if not os.path.exists(args.trimfile):
                print('trimfile defined does not exist: %s\n'
                      'Ocean will not be trimmed.' % args.trimfile)
                trimfile = None
            elif os.path.splitext(args.trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, '
                      'ocean will not be trimmed')
                trimfile = None
            else:
                trimfile = args.trimfile
        else:
            trimfile = None

        # Get finite fault ready, if exists

        ffault = None
        point = True
        if args.finite_fault is not None:
            point = False
            try:
                if os.path.splitext(args.finite_fault)[-1] == '.txt':
                    ffault = text_to_json(args.finite_fault)
                elif os.path.splitext(args.finite_fault)[-1] == '.json':
                    ffault = args.finite_fault
                else:
                    print('Could not read in finite fault, will '
                          'try to download from comcat')
                    ffault = None
            except BaseException:
                print('Could not read in finite fault, will try to '
                      'download from comcat')
                ffault = None

        if ffault is None:
            # Try to get finite fault file, if it exists
            try:
                returned_ev = get_event_comcat(shakefile)
                if returned_ev is not None:
                    testjd, detail, temp = returned_ev
                    evinfo = testjd['input']['event_information']
                    if 'faultfiles' in evinfo:
                        ffilename = evinfo['faultfiles']
                        if len(ffilename) > 0:
                            # Download the file
                            with tempfile.NamedTemporaryFile(
                                    delete=False, mode='w') as f:
                                temp.getContent(ffilename, filename=f.name)
                                ffault = text_to_json(f.name)
                                os.remove(f.name)
                            point = False
                        else:
                            point = True
                else:
                    print('Unable to determine source type, unknown if finite'
                          ' fault or point source')
                    ffault = None
                    point = False

            except Exception as e:
                print(e)
                print('Unable to determine source type, unknown if finite'
                      ' fault or point source')
                ffault = None
                point = False

        # Loop over config files
        for conf in configs:
            modelname = conf.keys()[0]
            print('\nNow running %s:' % modelname)
            notcov, newbnds = check_input_extents(
                conf, shakefile=shakefile,
                bounds=bounds
            )
            if len(notcov) > 0:
                print('\nThe following input layers do not cover'
                      ' the area of interest:\n\t%s' % '\n\t'.join(notcov))
                if newbnds is None:
                    print('\nCannnot make bounds that work. '
                          'Skipping to next model\n')
                    continue
                else:
                    pnt = '%s, %s, %s, %s' % (
                        newbnds['xmin'], newbnds['xmax'],
                        newbnds['ymin'], newbnds['ymax'])
                    print('Running model for new bounds that are fully covered'
                          ' by input layer: %s' % pnt)
                    bounds2 = newbnds
            else:
                bounds2 = bounds

            modelfunc = conf[modelname]['funcname']
            if modelfunc == 'LogisticModel':
                lm = LM.LogisticModel(shakefile, conf,
                                      uncertfile=uncertfile,
                                      saveinputs=args.save_inputs,
                                      bounds=bounds2,
                                      trimfile=trimfile)

                maplayers = lm.calculate()
            elif modelfunc == 'godt2008':
                maplayers = godt2008(shakefile, conf,
                                     uncertfile=uncertfile,
                                     saveinputs=args.save_inputs,
                                     bounds=bounds2,
                                     trimfile=trimfile)
            else:
                print('Unknown model function specified in config for %s '
                      'model, skipping to next config' % modelfunc)
                continue

            # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
            # filename = ('%s_%s_%s' % (eventid, modelname, time1))

            if args.appendname is not None:
                filename = ('%s_%s_%s' % (eventid, modelname, args.appendname))
            else:
                filename = ('%s_%s' % (eventid, modelname))
            if hdf5:
                filenameh = filename + '.hdf5'
                if os.path.exists(filenameh):
                    os.remove(filenameh)
                savelayers(maplayers, os.path.join(outfolder, filenameh))
                filenames.append(filenameh)

            if gis or kmz:
                for key in maplayers:
                    # Rename 'std' key to 'beta_sigma'
                    if key == 'std':
                        key_label = 'beta_sigma'
                    else:
                        key_label = key
                    if gis:
                        filen = os.path.join(outfolder, '%s_%s.bil'
                                             % (filename, key_label))
                        fileh = os.path.join(outfolder, '%s_%s.hdr'
                                             % (filename, key_label))
                        fileg = os.path.join(outfolder, '%s_%s.tif'
                                             % (filename, key_label))

                        GDALGrid.copyFromGrid(
                            maplayers[key]['grid']).save(filen)
                        cflags = '-co COMPRESS=DEFLATE -co predictor=2'
                        srs = '-a_srs EPSG:4326'
                        cmd = 'gdal_translate %s %s -of GTiff %s %s' % (
                            srs, cflags, filen, fileg)
                        rc, so, se = get_command_output(cmd)
                        # Delete bil file and its header
                        os.remove(filen)
                        os.remove(fileh)
                        filenames.append(fileg)
                    if kmz and (not key.startswith('quantile') and not key.startswith('std')) :
                        plotorder, logscale, lims, colormaps, maskthresh = \
                            parseConfigLayers(maplayers, conf, keys=['model'])
                        maxprob = np.nanmax(maplayers[key]['grid'].getData())
                        if key == 'model':
                            qdict = {
                                k: maplayers[k] for k in maplayers.keys()
                                if k.startswith('quantile')
                            }
                        else:
                            qdict = None
                        if maskthresh is None:
                            maskthresh = [0.]
                        if maxprob >= maskthresh[0]:
                            filen = os.path.join(outfolder, '%s_%s.kmz'
                                                 % (filename, key_label))
                            filek = create_kmz(maplayers[key], filen,
                                               mask=maskthresh[0],
                                               levels=lims[0],
                                               qdict=qdict)
                            filenames.append(filek)
                        else:
                            print('No unmasked pixels present, skipping kmz '
                                  'file creation')

            if args.make_webpage:
                # Compile into list of results for later
                results.append(maplayers)

                #  # Make binary output for ShakeCast
                #  filef = os.path.join(outfolder, '%s_model.flt'
                #                       % filename)
                #  # And get name of header
                #  filefh = os.path.join(outfolder, '%s_model.hdr'
                #                        % filename)
                #  # Make file
                #  write_floats(filef, maplayers['model']['grid'])
                #  filenames.append(filef)
                #  filenames.append(filefh)

        eventid = getHeaderData(shakefile)[0]['event_id']
        if not hasattr(args, 'eventsource'):
            args.eventsource = 'us'
        if not hasattr(args, 'eventsourcecode'):
            args.eventsourcecode = eventid

        if args.make_webpage:
            if len(results) == 0:
                raise Exception('No models were run. Cannot make webpages.')
            outputs = hazdev(
                results, configs,
                shakefile, outfolder=outfolder,
                pop_file=args.popfile,
                pager_alert=args.property_alertlevel,
                eventsource=args.eventsource,
                eventsourcecode=args.eventsourcecode,
                point=point, gf_version=args.gf_version,
                pdlcall=args.pdlcall)
            filenames = filenames + outputs

#        # create transparent png file
#        outputs = create_png(outdir)
#        filenames = filenames + outputs
#
#        # create info file
#        infofile = create_info(outdir)
#        filenames = filenames + infofile

        print('\nFiles created:\n')
        for filen in filenames:
            print('%s' % filen)

        return filenames
예제 #3
0
def getShakefiles(event, outdir, uncert=False, version=None,
                  source='preferred'):
    """
    Download the shakemap grid.xml file and the

    Args:
        event event id or URL
    """
    shakefile = os.path.join(outdir, 'grid.xml')
    if uncert:
        uncertfile = os.path.join(outdir, 'uncertainty.xml')
    else:
        uncertfile = None
    if version is not None:
        includeSuperseded = True
    else:
        includeSuperseded = False
    # If args.event is a url to a shakemap, download from that url
    if isURL(event):
        if version is not None or source != 'preferred':
            raise Exception('Cannot set shakemap version or source when URL '
                            'of gridfile is provided')
        try:
            shakefile = getGridURL(event, shakefile)
        except Exception as e:
            raise Exception('Could not download shakemap file from provided '
                            'URL: %s' % e)
        # Now get corresponding event detail
        event = getHeaderData(shakefile)[0]['event_id']
        version = getHeaderData(shakefile)[0]['shakemap_version']
        source = getHeaderData(shakefile)[0]['shakemap_originator']
        try:
            detail = get_event_by_id(
                event, includesuperseded=includeSuperseded)
        except BaseException:
            # Maybe originator is missing from event id, try another way

            try:
                temp = getHeaderData(shakefile)[0]
                temp2 = '%s%s' % (
                    temp['shakemap_originator'], temp['shakemap_id'])
                detail = get_event_by_id(
                    temp2, includesuperseded=includeSuperseded)
                event = temp2
            except Exception as e:
                msg = 'Could not get event detail for shakemap at provided URL: %s'
                print(msg % e)

    else:
        detail = get_event_by_id(event, includesuperseded=includeSuperseded)

    # Get most recent version
    if version is None:  # Get current preferred
        shakemap = detail.getProducts('shakemap', source=source)[0]
        shakemap.getContent('grid.xml', shakefile)
    # or get version requested
    else:
        allversions = detail.getProducts('shakemap', version='all',
                                         source=source)
        # First try with properties, more reliable
        vers = []
        for allv in allversions:
            if 'version' in allv.properties:
                vers.append(int(allv['version']))
            else:
                vers.append(-999)
        idx = np.where(np.array(vers) == version)[0]
        if len(idx) < 1:
            # Try using libcomcat version, less reliable...
            vers = [allv.version for allv in allversions]
            idx = np.where(np.array(vers) == version)[0]
            if len(idx) == 1:
                # Check info.json to make sure it's right version
                infobytes, url = allversions[idx[0]
                                             ].getContentBytes('info.json')
                info = json.loads(infobytes.decode('utf-8'))
                if info['processing']['shakemap_versions']['map_version'] != version:
                    idx = []
        if len(idx) < 1:
            msg = 'Could not find version %d of Shakemap from source %s'
            raise Exception(msg % (version, source))
        if len(idx) > 1:
            msg = 'Found more than one ShakeMap with matching source and version. \
Choosing first one.'
            warnings.warn(msg)
            print(msg)

        shakemap = allversions[idx[0]]
        shakemap.getContent('grid.xml', shakefile)

    if uncert:
        uncertfile = getUncert(shakemap, uncertfile)

    return detail, shakefile, uncertfile
예제 #4
0
    def __init__(self,config,shakefile,model):
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model,config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.
        self.model = model
        cmodel = config['logistic_models'][model]
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)#key = layer name, value = file name
        self.terms,timeField = validateTerms(cmodel,self.coeffs,self.layers)
        self.interpolations = validateInterpolations(cmodel,self.layers)
        self.units = validateUnits(cmodel,self.layers)

        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile,adjust='res')
        griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile)
        YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        DAY = eventdict['event_timestamp'].day
        HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guaranteed not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile,samplegeodict=sampledict,resample=True,doPadding=True,adjust='res')

        #load the predictor layers into a dictionary
        self.layerdict = {} #key = layer name, value = grid object
        for layername,layerfile in self.layers.items():
            if isinstance(layerfile,list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername,layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        x = 1
        self.nuggets = [str(self.coeffs['b0'])]
        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = self.shakemap.getGeoDict()
예제 #5
0
def get_event_comcat(shakefile, timewindow=60, degwindow=0.3, magwindow=0.2):
    """
    Find an event in comcat, searching first by event id and if that
    fails searching by magnitude, time, and location.

    Args:
        shakefile (str): path to shakemap .xml file of event to find
        timewindow (float): width of time window to search around time defined
            in shakefile (in seconds)
        degwindow (float): width of area to search around location specified in
            shakefile (in degrees).
        magwindow (float): width of magnitude window to search around the
            magnitude specified in shakefile.

    Returns:
        None if event not found, else tuple (info, detail, shakemap) where,
            * info: json formatted dictionary of info.json for the event
            * detail: event detail from comcat
            * shakemap: shakemap of event found (from comcat)

    """
    header_dicts = getHeaderData(shakefile)
    grid_dict = header_dicts[0]
    event_dict = header_dicts[1]
    version = grid_dict['shakemap_version']
    try:
        eid = event_dict['event_id']
        net = 'us'
        if 'event_network' in event_dict:
            net = event_dict['event_network']
        if not eid.startswith(net):
            eid = net + eid
        detail = get_event_by_id(eid, includesuperseded=True)
    except:
        lat = event_dict['lat']
        lon = event_dict['lon']
        mag = event_dict['magnitude']
        time = event_dict['event_timestamp']
        starttime = time - timedelta(seconds=timewindow)
        endtime = time + timedelta(seconds=timewindow)
        minlat = lat - degwindow
        minlon = lon - degwindow
        maxlat = lat + degwindow
        maxlon = lon + degwindow
        minmag = max(0, mag - magwindow)
        maxmag = min(10, mag + magwindow)
        events = search(starttime=starttime,
                        endtime=endtime,
                        minmagnitude=minmag,
                        maxmagnitude=maxmag,
                        minlatitude=minlat,
                        minlongitude=minlon,
                        maxlatitude=maxlat,
                        maxlongitude=maxlon)
        if not len(events):
            return None
        detail = events[0].getDetailEvent()
    allversions = detail.getProducts('shakemap', version=VersionOption.ALL)
    # Find the right version
    vers = [allv.version for allv in allversions]
    idx = np.where(np.array(vers) == version)[0][0]
    shakemap = allversions[idx]
    infobytes, url = shakemap.getContentBytes('info.json')
    info = json.loads(infobytes.decode('utf-8'))
    return info, detail, shakemap
예제 #6
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 bounds=None,
                 slopemod=None,
                 trimfile=None):
        """
        Sets up the logistic model

        Args:
            shakefile (str): Path to shakemap grid.xml file for the event.
            config: configobj object defining the model and its inputs. Only
                one model should be described in each config file.
            uncertfile (str): Path to uncertainty.xml file.
            saveinputs (bool): Save input layers as Grid2D objects in addition
                to the model? If false (the default), it will just output the
                model.
            slopefile (str): Optional path to slopefile that will be resampled
                to the other input files for applying thresholds. OVERWRITES
                VALUE IN CONFIG.
            bounds (dict): Default of None uses ShakeMap boundaries, otherwise
                a dictionary of boundaries to cut to like

                .. code-block:: python

                    bounds = {
                        'xmin': lonmin, 'xmax': lonmax,
                        'ymin': latmin, 'ymax': latmax
                    }
            slopemod (str): How slope input should be modified to be in
                degrees: e.g., ``np.arctan(slope) * 180. / np.pi`` or
                ``slope/100.`` (note that this may be in the config file
                already).
            trimfile (str): shapefile of earth's landmasses to use to cut
                offshore areas.
        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception('No config file found or problem with config '
                            'file format')
        if len(mnames) > 1:
            raise Exception('Config file contains more than one model which '
                            'is no longer allowed, update your config file '
                            'to the newer format')

        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        # key = layer name, value = file name
        self.layers = validateLayers(cmodel)
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        #self.numstd = numstd
        self.clips = validateClips(cmodel, self.layers, self.gmused)
        self.notes = ''

        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to '
                            'one of the files in the layer section.')
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                # print('Slopefile not specified in config, no slope '
                #      'thresholds will be applied\n')
                self.slopefile = None
        else:
            self.slopefile = slopefile
        if slopemod is None:
            try:
                self.slopemod = cmodel['slopemod']
            except:
                self.slopemod = None

        # See if trimfile exists
        if trimfile is not None:
            if not os.path.exists(trimfile):
                print('trimfile defined does not exist: %s\nOcean will not be '
                      'trimmed' % trimfile)
                self.trimfile = None
            elif os.path.splitext(trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, ocean will not be '
                      'trimmed')
                self.trimfile = None
            else:
                self.trimfile = trimfile
        else:
            self.trimfile = None

        # Get month of event
        griddict, eventdict, specdict, fields, uncertainties = \
            getHeaderData(shakefile)
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]

        # Figure out how/if need to cut anything
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if geodict.xmin < geodict.xmax:  # only if signs are not opposite
                if (geodict.xmin > bounds['xmin']
                        or geodict.xmax < bounds['xmax']
                        or geodict.ymin > bounds['ymin']
                        or geodict.ymax < bounds['ymax']):
                    print('Specified bounds are outside shakemap area, using '
                          'ShakeMap bounds instead.')
                    bounds = None

        if bounds is not None:
            tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                                  bounds['xmax'],
                                                  bounds['ymin'],
                                                  bounds['ymax'],
                                                  geodict.dx,
                                                  geodict.dy,
                                                  inside=False)
            # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
            if geodict.xmin > geodict.xmax:
                if tempgdict.xmin < 0:
                    geodict._xmin -= 360.
                else:
                    geodict._xmax += 360.
            gdict = geodict.getBoundsWithin(tempgdict)
        else:
            gdict = geodict

        # Now find the layer that is our base layer and get the largest bounds
        # we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        else:
            raise Exception('All predictor variable grids must be a valid '
                            'GMT or ESRI file type.')

        # Do we need to subdivide baselayer?
        if 'divfactor' in self.config[self.model].keys():
            divfactor = float(self.config[self.model]['divfactor'])
            if divfactor != 1.:
                # adjust sampledict so everything will be resampled
                newxmin = sampledict.xmin - sampledict.dx / \
                    2. + sampledict.dx/(2.*divfactor)
                newymin = sampledict.ymin - sampledict.dy / \
                    2. + sampledict.dy/(2.*divfactor)
                newxmax = sampledict.xmax + sampledict.dx / \
                    2. - sampledict.dx/(2.*divfactor)
                newymax = sampledict.ymax + sampledict.dy / \
                    2. - sampledict.dy/(2.*divfactor)
                newdx = sampledict.dx / divfactor
                newdy = sampledict.dy / divfactor

                sampledict = GeoDict.createDictFromBox(newxmin,
                                                       newxmax,
                                                       newymin,
                                                       newymax,
                                                       newdx,
                                                       newdy,
                                                       inside=True)

        # Find slope thresholds, if applicable
        self.slopemin = 'none'
        self.slopemax = 'none'
        if self.slopefile is not None:
            try:
                self.slopemin = float(config[self.model]['slopemin'])
                self.slopemax = float(config[self.model]['slopemax'])
            except:
                print('Could not find slopemin and/or slopemax in config, '
                      'limits. No slope thresholds will be applied.')
                self.slopemin = 'none'
                self.slopemax = 'none'

        # Make temporary directory for hdf5 pytables file storage
        self.tempdir = tempfile.mkdtemp()

        # now load the shakemap, resampling and padding if necessary
        temp = ShakeGrid.load(shakefile)  # , adjust='res')
        self.shakedict = temp.getShakeDict()
        self.eventdict = temp.getEventDict()
        self.shakemap = {}

        # Read both PGA and PGV in, may need them for thresholds
        for gm in ['pga', 'pgv']:
            junkfile = os.path.join(self.tempdir, 'temp.bil')
            GDALGrid.copyFromGrid(temp.getLayer(gm)).save(junkfile)
            if gm in self.interpolations.keys():
                intermeth = self.interpolations[gm]
            else:
                intermeth = 'bilinear'
            junkgrid = quickcut(junkfile,
                                sampledict,
                                precise=True,
                                method=intermeth)
            if gm in self.clips:
                junkgrid.setData(
                    np.clip(junkgrid.getData(), self.clips[gm][0],
                            self.clips[gm][1]))
            self.shakemap[gm] = TempHdf(
                junkgrid, os.path.join(self.tempdir, '%s.hdf5' % gm))
            os.remove(junkfile)
        del (temp)

        # get updated geodict
        sampledict = junkgrid.getGeoDict()

        # take uncertainties into account, if available
        if uncertfile is not None:
            self.uncert = {}
            try:
                # Only read in the ones that will be needed
                temp = ShakeGrid.load(uncertfile)
                already = []
                for gm in self.gmused:
                    if 'pgv' in gm:
                        gmsimp = 'pgv'
                    elif 'pga' in gm:
                        gmsimp = 'pga'
                    elif 'mmi' in gm:
                        gmsimp = 'mmi'
                    if gmsimp in already:
                        continue
                    junkfile = os.path.join(self.tempdir, 'temp.bil')
                    GDALGrid.copyFromGrid(temp.getLayer('std%s' %
                                                        gmsimp)).save(junkfile)
                    if gmsimp in self.interpolations.keys():
                        intermeth = self.interpolations[gmsimp]
                    else:
                        intermeth = 'bilinear'
                    junkgrid = quickcut(junkfile,
                                        sampledict,
                                        precise=True,
                                        method=intermeth)
                    if gmsimp in self.clips:
                        junkgrid.setData(
                            np.clip(junkgrid.getData(), self.clips[gmsimp][0],
                                    self.clips[gmsimp][1]))
                    self.uncert['std' + gmsimp] = TempHdf(
                        junkgrid,
                        os.path.join(self.tempdir, 'std%s.hdf5' % gmsimp))
                    already.append(gmsimp)
                    os.remove(junkfile)
                del (temp)
            except:
                print('Could not read uncertainty file, ignoring '
                      'uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        # Load the predictor layers, save as hdf5 temporary files, put file
        # locations into a dictionary.

        # Will be replaced in the next section if a slopefile was defined
        self.nonzero = None

        # key = layer name, value = grid object
        self.layerdict = {}

        didslope = False
        for layername, layerfile in self.layers.items():
            start = timer()
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            temp = quickcut(layerfile,
                                            sampledict,
                                            precise=True,
                                            method=interp)
                            if layername in self.clips:
                                temp.setData(
                                    np.clip(temp.getData(),
                                            self.clips[layername][0],
                                            self.clips[layername][1]))
                            self.layerdict[layername] = TempHdf(
                                temp,
                                os.path.join(self.tempdir,
                                             '%s.hdf5' % layername))
                            del (temp)
            else:
                interp = self.interpolations[layername]
                temp = quickcut(layerfile,
                                sampledict,
                                precise=True,
                                method=interp)
                if layername in self.clips:
                    temp.setData(
                        np.clip(temp.getData(), self.clips[layername][0],
                                self.clips[layername][1]))
                if layername == 'rock':  # Convert unconsolidated sediments to a more reasonable coefficient
                    sub1 = temp.getData()
                    # Change to mixed sed rock coeff
                    sub1[sub1 <= -3.21] = -1.36
                    temp.setData(sub1)
                    self.notes += 'unconsolidated sediment coefficient changed\
                     to -1.36 (weaker) from -3.22 to better reflect that this \
                    unit is not actually strong\n'

                self.layerdict[layername] = TempHdf(
                    temp, os.path.join(self.tempdir, '%s.hdf5' % layername))
                td = temp.getGeoDict()
                if td != sampledict:
                    raise Exception(
                        'Geodictionaries of resampled files do not match')

                if layerfile == self.slopefile:
                    flag = 0
                    if self.slopemin == 'none' and self.slopemax == 'none':
                        flag = 1
                    if self.slopemod is None:
                        slope1 = temp.getData().astype(float)
                        slope = 0
                    else:
                        try:
                            slope = temp.getData().astype(float)
                            slope1 = eval(self.slopemod)
                        except:
                            print('slopemod provided not valid, continuing '
                                  'without slope thresholds.')
                            flag = 1
                    if flag == 0:
                        nonzero = np.array([(slope1 > self.slopemin) &
                                            (slope1 <= self.slopemax)])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                        del (slope)
                    else:
                        # Still remove areas where the slope equals exactly
                        # 0.0 to remove offshore liq areas.
                        nonzero = np.array([slope1 != 0.0])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                    didslope = True
                del (temp)

            print('Loading %s layer: %1.1f sec' % (layername, timer() - start))

        if didslope is False and self.slopefile is not None:
            # Slope didn't get read in yet
            temp = quickcut(self.slopefile,
                            sampledict,
                            precise=True,
                            method='bilinear')
            flag = 0
            if self.slopemin == 'none' and self.slopemax == 'none':
                flag = 1
            if self.slopemod is None:
                slope1 = temp.getData().astype(float)
                slope = 0
            else:
                try:
                    slope = temp.getData().astype(float)
                    slope1 = eval(self.slopemod)
                except:
                    print('slopemod provided not valid, continuing without '
                          'slope thresholds')
                    flag = 1
            if flag == 0:
                nonzero = np.array([
                    (slope1 > self.slopemin) & (slope1 <= self.slopemax)
                ])
                self.nonzero = nonzero[0, :, :]
                del (slope1)
                del (slope)
            else:
                # Still remove areas where the slope equals exactly
                # 0.0 to remove offshore liq areas.
                nonzero = np.array([slope1 != 0.0])
                self.nonzero = nonzero[0, :, :]
                del (slope1)

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = sampledict
예제 #7
0
def test():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    fatfile = os.path.join(homedir, '..', 'data', 'fatality.xml')
    ecofile = os.path.join(homedir, '..', 'data', 'economy.xml')
    cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt')
    event = 'northridge'
    shakefile = os.path.join(homedir, '..', 'data',
                             'eventdata', event, '%s_grid.xml' % event)
    popfile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_gpw.flt' % event)
    isofile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_isogrid.bil' % event)
    urbanfile = os.path.join(homedir, '..', 'data',
                             'eventdata', 'northridge', 'northridge_urban.bil')
    oceanfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json')
    oceangridfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil')
    timezonefile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp')

    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    casfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')

    tdir = tempfile.mkdtemp()
    basename = os.path.join(tdir, 'output')

    exp = Exposure(popfile, 2012, isofile)
    results = exp.calcExposure(shakefile)
    shakegrid = exp.getShakeGrid()
    popgrid = exp.getPopulationGrid()

    pdffile, pngfile, mapcities = draw_contour(
        shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename)
    shutil.rmtree(tdir)

    popyear = 2012

    shake_tuple = getHeaderData(shakefile)
    tsunami = shake_tuple[1]['magnitude'] >= TSUNAMI_MAG_THRESH

    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile, popyear, urbanfile, isofile)
    semiloss, resfat, nonresfat = semi.getLosses(shakefile)

    popgrowth = PopulationGrowth.fromDefault()
    econexp = EconExposure(popfile, 2012, isofile)
    fatmodel = EmpiricalLoss.fromDefaultFatality()
    expobject = Exposure(popfile, 2012, isofile, popgrowth)

    expdict = expobject.calcExposure(shakefile)
    fatdict = fatmodel.getLosses(expdict)
    econexpdict = econexp.calcExposure(shakefile)
    ecomodel = EmpiricalLoss.fromDefaultEconomic()
    ecodict = ecomodel.getLosses(expdict)
    shakegrid = econexp.getShakeGrid()
    pagerversion = 1
    cities = Cities.loadFromGeoNames(cityfile)
    impact1 = '''Red alert level for economic losses. Extensive damage is probable 
    and the disaster is likely widespread. Estimated economic losses are less 
    than 1% of GDP of Italy. Past events with this alert level have required 
    a national or international level response.'''
    impact2 = '''Orange alert level for shaking-related fatalities. Significant 
    casualties are likely.'''
    structcomment = '''Overall, the population in this region resides in structures 
    that are a mix of vulnerable and earthquake resistant construction. The predominant 
    vulnerable building types are unreinforced brick with mud and mid-rise nonductile 
    concrete frame with infill construction.'''
    histeq = [1, 2, 3]
    struct_comment = '''Overall, the population in this region resides
    in structures that are resistant to earthquake
    shaking, though some vulnerable structures
    exist.'''
    secondary_comment = '''Recent earthquakes in this area have caused secondary hazards 
    such as landslides that might have contributed to losses.'''
    hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador 
    on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 
    at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n', '')

    location = 'At the top of the world.'
    is_released = True

    doc = PagerData()
    eventcode = shakegrid.getEventDict()['event_id']
    versioncode = eventcode
    doc.setInputs(shakegrid, timezonefile, pagerversion,
                  versioncode, eventcode, tsunami, location, is_released)
    doc.setExposure(expdict, econexpdict)
    doc.setModelResults(fatmodel, ecomodel,
                        fatdict, ecodict,
                        semiloss, resfat, nonresfat)
    doc.setComments(impact1, impact2, struct_comment,
                    hist_comment, secondary_comment)
    doc.setMapInfo(cityfile, mapcities)
    doc.validate()

    # let's test the property methods
    tdoc(doc, shakegrid, impact1, impact2,
         expdict, struct_comment, hist_comment)

    # see if we can save this to a bunch of files then read them back in
    try:
        tdir = tempfile.mkdtemp()
        doc.saveToJSON(tdir)
        newdoc = PagerData()
        newdoc.loadFromJSON(tdir)
        tdoc(newdoc, shakegrid, impact1, impact2,
             expdict, struct_comment, hist_comment)

        # test the xml saving method
        xmlfile = doc.saveToLegacyXML(tdir)
    except Exception as e:
        assert 1 == 2
    finally:
        shutil.rmtree(tdir)
예제 #8
0
def getShakefiles(event,
                  outdir,
                  uncert=False,
                  version=None,
                  source='preferred'):
    """
    Download the shakemap grid.xml file and the

    Args:
        event event id or URL
    """
    shakefile = os.path.join(outdir, 'grid.xml')
    if uncert:
        uncertfile = os.path.join(outdir, 'uncertainty.xml')
    else:
        uncertfile = None
    # If args.event is a url to a shakemap, download from that url
    if isURL(event):
        if version is not None or source != 'preferred':
            raise Exception(
                'Cannot set shakemap version or source when URL of '
                'gridfile is provided')
        try:
            shakefile = getGridURL(event, shakefile)
        except Exception as e:
            raise Exception('Could not download shakemap file from provided '
                            'URL: %s' % e)
        # Now get corresponding event detail
        event = getHeaderData(shakefile)[0]['event_id']
        version = getHeaderData(shakefile)[0]['shakemap_version']
        source = getHeaderData(shakefile)[0]['shakemap_originator']
        try:
            detail = get_event_by_id(event, includesuperseded=True)
        except:  # Maybe originator is missing from event id, try another way
            try:
                temp = getHeaderData(shakefile)[0]
                temp2 = '%s%s' % (temp['shakemap_originator'],
                                  temp['shakemap_id'])
                detail = get_event_by_id(temp2, includesuperseded=True)
                event = temp2
            except Exception as e:
                msg = 'Could not get event detail for shakemap at provided URL: %s'
                print(msg % e)

    else:
        detail = get_event_by_id(event, includesuperseded=True)

    # Get most recent version
    if version is None:  # Get current preferred
        shakemap = detail.getProducts('shakemap', source=source)[0]
        shakemap.getContent('grid.xml', shakefile)
    # or get version requested
    else:
        allversions = detail.getProducts('shakemap',
                                         version='all',
                                         source=source)
        vers = [allv.version for allv in allversions]
        idx = np.where(np.array(vers) == version)[0]
        if len(idx) != 1:
            msg = 'Could not find version %d of Shakemap from source %s'
            raise Exception(msg % (version, source))
        shakemap = allversions[idx[0]]
        shakemap.getContent('grid.xml', shakefile)

    if uncert:
        uncertfile = getUncert(shakemap, uncertfile)

    return detail, shakefile, uncertfile
예제 #9
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 slopediv=1.,
                 bounds=None,
                 numstd=1):
        """Set up the logistic model
        # ADD BOUNDS TO THIS MODEL
        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs. Only one
          model should be described in each config file.
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param uncertfile: Full file path to xml file of shakemap uncertainties
        :type uncertfile: string
        :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model
          if false, it will just output the model
        :type saveinputs: boolean
        :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying
          thresholds OVERWRITES VALUE IN CONFIG
        :type slopefile: string
        :param slopediv: number to divide slope by to get to degrees (usually will be default
          of 1.)
        :type slopediv: float
        :param numstd: number of +/- standard deviations to use if uncertainty is computed (uncertfile is not None)

        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception(
                'No config file found or problem with config file format')
        if len(mnames) > 1:
            raise Exception(
                'Config file contains more than one model which is no longer allowed,\
                            update your config file to the newer format')
        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(
            cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        self.numstd = numstd
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception(
                'You must specify a base layer corresponding to one of the files in the layer section.'
            )
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                print(
                    'Could not find slopefile term in config, no slope thresholds will be applied\n'
                )
                self.slopefile = None
        else:
            self.slopefile = slopefile
        self.slopediv = slopediv

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
            shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception(
                'All predictor variable grids must be a valid GMT or ESRI file type'
            )

        #now load the shakemap, resampling and padding if necessary
        if ShakeGrid.getFileGeoDict(shakefile, adjust='res') == sampledict:
            self.shakemap = ShakeGrid.load(shakefile, adjust='res')
            flag = 1
        else:
            self.shakemap = ShakeGrid.load(shakefile,
                                           samplegeodict=sampledict,
                                           resample=True,
                                           doPadding=True,
                                           adjust='res')
            flag = 0

        # take uncertainties into account
        if uncertfile is not None:
            try:
                if flag == 1:
                    self.uncert = ShakeGrid.load(uncertfile, adjust='res')
                else:
                    self.uncert = ShakeGrid.load(uncertfile,
                                                 samplegeodict=sampledict,
                                                 resample=True,
                                                 doPadding=True,
                                                 adjust='res')
            except:
                print(
                    'Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                if GMTGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GMTGrid.load(layerfile)
                                else:
                                    lyr = GMTGrid.load(layerfile,
                                                       sampledict,
                                                       resample=True,
                                                       method=interp,
                                                       doPadding=True)
                            elif ftype == 'esri':
                                if GDALGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GDALGrid.load(layerfile)
                                else:
                                    lyr = GDALGrid.load(layerfile,
                                                        sampledict,
                                                        resample=True,
                                                        method=interp,
                                                        doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                                    layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    if GMTGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GMTGrid.load(layerfile)
                    else:
                        lyr = GMTGrid.load(layerfile,
                                           sampledict,
                                           resample=True,
                                           method=interp,
                                           doPadding=True)
                elif ftype == 'esri':
                    if GDALGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GDALGrid.load(layerfile)
                    else:
                        lyr = GDALGrid.load(layerfile,
                                            sampledict,
                                            resample=True,
                                            method=interp,
                                            doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                        layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                elif "self.shakemap.getLayer('pgv').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                elif "self.shakemap.getLayer('mmi').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config[self.model]['slopemin'])
            self.slopemax = float(config[self.model]['slopemax'])
        except:
            print(
                'could not find slopemin and/or slopemax in config, no limits will be applied'
            )
            self.slopemin = 0.
            self.slopemax = 90.
예제 #10
0
    def __init__(self, config, shakefile, model, uncertfile=None):
        """Set up the logistic model

        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param model: Name of model defined in config that should be run for the event of interest
        :type model: string
        :param uncertfile:
        :type uncertfile:

        """
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model, config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.

        self.model = model
        cmodel = config['logistic_models'][model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in
                       value.lower() or 'mmi' in value.lower()]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res')

        # take uncertainties into account
        if uncertfile is not None:
            try:
                self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True,
                                             adjust='res')
            except:
                print('Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) - self.uncert.getLayer('stdpga').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) + self.uncert.getLayer('stdpga').getData()))")
                elif "self.layerdict['pgv'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) - self.uncert.getLayer('stdpgv').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) + self.uncert.getLayer('stdpgv').getData()))")
                elif "self.layerdict['mmi'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) - self.uncert.getLayer('stdmmi').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) + self.uncert.getLayer('stdmmi').getData()))")
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config['logistic_models'][model]['slopemin'])
            self.slopemax = float(config['logistic_models'][model]['slopemax'])
        except:
            print('could not find slopemin and/or slopemax in config, no limits will be applied')
            self.slopemin = 0.
            self.slopemax = 90.
예제 #11
0
def run_gfail(args):
    """Runs ground failure.

    Args:
        args: dictionary or argument parser Namespace output by bin/gfail
            program.

    Returns:
        list: Names of created files.

    """
    # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST
    filenames = []
    # If args is a dictionary, convert to a Namespace
    if isinstance(args, dict):
        args = Namespace(**args)

    if args.set_default_paths:
        set_default_paths(args)
        print('default paths set, continuing...\n')

    if args.list_default_paths:
        list_default_paths()
        return

    if args.reset_default_paths:
        reset_default_paths()
        return

    if args.make_webpage:
        # Turn on GIS and HDF5 flags
        gis = True
        hdf5 = True
    else:
        gis = args.gis
        hdf5 = args.hdf5

    # Figure out what models will be run
    if args.shakefile is not None:  # user intends to actually run some models
        shakefile = args.shakefile

        # make output location for things
        if args.output_filepath is None:
            outdir = os.getcwd()
        else:
            outdir = args.output_filepath

        if (hdf5 or args.make_static_pngs or
                args.make_static_pdfs or
                args.make_interactive_plots or
                gis):
            if not os.path.exists(outdir):
                os.makedirs(outdir)

        # download if is url
        # cleanup = False
        if not os.path.isfile(shakefile):
            if isURL(shakefile):
                # getGridURL returns a named temporary file object
                shakefile = getGridURL(shakefile)
                # cleanup = True  # Be sure to delete it after
            else:
                raise NameError('Could not find "%s" as a file or a valid url'
                                % (shakefile))
        eventid = getHeaderData(shakefile)[0]['event_id']

        # Get entire path so won't break if running gfail with relative path
        shakefile = os.path.abspath(shakefile)

        if args.extract_contents:
            outfolder = outdir
        else:  # Nest in a folder named by eventid
            outfolder = os.path.join(outdir, eventid)
            if not os.path.exists(outfolder):
                os.makedirs(outfolder)

        # Copy shake grid into output directory
        # --- this is base on advice from Mike that when running in production
        #     the shake grids are not archived and so if we need/want to have
        #     the exact grid used for the calculation later if there's every a
        #     question about how the calculation was done, the safest thing is
        #     to store a copy of it here.
        shake_copy = os.path.join(outfolder, "grid.xml")
        shutil.copyfile(shakefile, shake_copy)

        # Write shakefile to a file for use later
        shakename = os.path.join(outfolder, "shakefile.txt")
        shake_file = open(shakename, "wt")
        shake_file.write(shake_copy)
        shake_file.close()
        filenames.append(shakename)

        config = args.config

        if args.config_filepath is not None:
            # only add config_filepath if full filepath not given and file
            # ext is .ini
            if (not os.path.isabs(config) and
                    os.path.splitext(config)[-1] == '.ini'):
                config = os.path.join(args.config_filepath, config)

        if os.path.splitext(config)[-1] == '.ini':
            temp = ConfigObj(config)
            if len(temp) == 0:
                raise Exception(
                    'Could not find specified .ini file: %s' % config)
            if args.data_path is not None:
                temp = correct_config_filepaths(args.data_path, temp)
            configs = [temp]
            conffail = []
        else:
            # input is a list of config files
            f = open(config, 'r')
            configlist = f.readlines()
            configs = []
            conffail = []
            for conf in configlist:
                conf = conf.strip()
                if not os.path.isabs(conf):
                    # only add config_filepath if full filepath not given
                    conf = os.path.join(args.config_filepath, conf)
                try:
                    temp = ConfigObj(conf)
                    if temp:
                        if args.data_path is not None:
                            temp = correct_config_filepaths(
                                args.data_path, temp)
                        configs.append(temp)
                    else:
                        conffail.append(conf)
                except:
                    conffail.append(conf)

        print('\nRunning the following models:')

        for conf in configs:
            print('\t%s' % conf.keys()[0])
        if len(conffail) > 0:
            print('Could not find or read in the following config files:\n')
            for conf in conffail:
                print('\t%s' % conf)
            print('\nContinuing...\n')

        if args.set_bounds is not None:
            if 'zoom' in args.set_bounds:
                temp = args.set_bounds.split(',')
                print('Using %s threshold of %1.1f to cut model bounds'
                      % (temp[1].strip(), float(temp[2].strip())))
                bounds = get_bounds(shakefile, temp[1].strip(),
                                    float(temp[2].strip()))
            else:
                temp = eval(args.set_bounds)
                latmin = temp[0]
                latmax = temp[1]
                lonmin = temp[2]
                lonmax = temp[3]
                bounds = {'xmin': lonmin, 'xmax': lonmax,
                          'ymin': latmin, 'ymax': latmax}
            print('Applying bounds of lonmin %1.2f, lonmax %1.2f, '
                  'latmin %1.2f, latmax %1.2f'
                  % (bounds['xmin'], bounds['xmax'],
                     bounds['ymin'], bounds['ymax']))
        else:
            bounds = None

        if args.make_webpage or args.make_summary:
            results = []

        # pre-read in ocean trimming file polygons so only do this step once
        if args.trimfile is not None:
            if not os.path.exists(args.trimfile):
                print('trimfile defined does not exist: %s\n'
                      'Ocean will not be trimmed.' % args.trimfile)
                trimfile = None
            elif os.path.splitext(args.trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, '
                      'ocean will not be trimmed')
                trimfile = None
            else:
                trimfile = args.trimfile
        else:
            trimfile = None

        # Get finite fault ready, if exists

        ffault = None
        point = True
        if args.finite_fault is not None:
            point = False
            try:
                if os.path.splitext(args.finite_fault)[-1] == '.txt':
                    ffault = text_to_json(args.finite_fault)
                elif os.path.splitext(args.finite_fault)[-1] == '.json':
                    ffault = args.finite_fault
                else:
                    print('Could not read in finite fault, will '
                          'try to download from comcat')
                    ffault = None
            except:
                print('Could not read in finite fault, will try to '
                      'download from comcat')
                ffault = None

        if ffault is None:
            # Try to get finite fault file, if it exists
            try:
                returned_ev = get_event_comcat(shakefile)
                if returned_ev is not None:
                    testjd, detail, temp = returned_ev
                    if 'faultfiles' in testjd['input']['event_information']:
                        ffilename = testjd['input']['event_information']['faultfiles']
                        if len(ffilename) > 0:
                            # Download the file
                            with tempfile.NamedTemporaryFile(delete=False, mode='w') as f:
                                temp.getContent(ffilename, filename=f.name)
                                ffault = text_to_json(f.name)
                                os.remove(f.name)
                            point = False
                        else:
                            point = True
                else:
                    print('Unable to determine source type, unknown if finite'
                          ' fault or point source')
                    ffault = None
                    point = False

            except Exception as e:
                print(e)
                print('Unable to determine source type, unknown if finite'
                      ' fault or point source')
                ffault = None
                point = False

        # Loop over config files
        for conf in configs:
            modelname = conf.keys()[0]
            print('\nNow running %s:' % modelname)
            modelfunc = conf[modelname]['funcname']
            if modelfunc == 'LogisticModel':
                lm = LM.LogisticModel(shakefile, conf,
                                      uncertfile=args.uncertfile,
                                      saveinputs=args.save_inputs,
                                      bounds=bounds,
                                      numstd=float(args.std),
                                      trimfile=trimfile)

                maplayers = lm.calculate()
            elif modelfunc == 'godt2008':
                maplayers = godt2008(shakefile, conf,
                                     uncertfile=args.uncertfile,
                                     saveinputs=args.save_inputs,
                                     bounds=bounds,
                                     numstd=float(args.std),
                                     trimfile=trimfile)
            else:
                print('Unknown model function specified in config for %s '
                      'model, skipping to next config' % modelfunc)
                continue

            # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
            # filename = ('%s_%s_%s' % (eventid, modelname, time1))

            if args.appendname is not None:
                filename = ('%s_%s_%s' % (eventid, modelname, args.appendname))
            else:
                filename = ('%s_%s' % (eventid, modelname))
            if hdf5:
                filenameh = filename + '.hdf5'
                if os.path.exists(filenameh):
                    os.remove(filenameh)
                savelayers(maplayers, os.path.join(outfolder, filenameh))
                filenames.append(filenameh)

            if args.make_static_pdfs or args.make_static_pngs:
                plotorder, logscale, lims, colormaps, maskthreshes = \
                    parseConfigLayers(maplayers, conf)
                mapconfig = ConfigObj(args.mapconfig)

                kwargs = parseMapConfig(
                    mapconfig, fileext=args.mapdata_filepath)
                junk, filenames1 = modelMap(
                    maplayers, shakefile,
                    suptitle=conf[modelname]['shortref'],
                    boundaries=None,
                    zthresh=0.,
                    lims=lims,
                    plotorder=plotorder,
                    maskthreshes=maskthreshes,
                    maproads=False,
                    mapcities=True,
                    colormaps=colormaps,
                    savepdf=args.make_static_pdfs,
                    savepng=args.make_static_pngs,
                    printparam=True,
                    inventory_shapefile=None,
                    outputdir=outfolder,
                    outfilename=filename,
                    scaletype='continuous',
                    logscale=logscale, **kwargs)
                for filen in filenames1:
                    filenames.append(filen)

                # make model only plots too
                if len(maplayers) > 1:
                    plotorder, logscale, lims, colormaps, maskthreshes = \
                        parseConfigLayers(maplayers, conf, keys=['model'])
                    junk, filenames1 = modelMap(
                        maplayers, shakefile,
                        suptitle=conf[modelname]['shortref'], boundaries=None,
                        zthresh=0., lims=lims, plotorder=plotorder,
                        maskthreshes=maskthreshes, maproads=False,
                        mapcities=True, savepdf=args.make_static_pdfs,
                        savepng=args.make_static_pngs, printparam=True,
                        inventory_shapefile=None, outputdir=outfolder,
                        outfilename=filename + '-just_model',
                        colormaps=colormaps, scaletype='continuous',
                        logscale=logscale, **kwargs)
                    for filen in filenames1:
                        filenames.append(filen)
            if args.make_interactive_plots:
                plotorder, logscale, lims, colormaps, maskthreshes = \
                    parseConfigLayers(maplayers, conf)
                junk, filenames1 = interactiveMap(
                    maplayers, plotorder=plotorder, shakefile=shakefile,
                    inventory_shapefile=None, maskthreshes=maskthreshes,
                    colormaps=colormaps, isScenario=False,
                    scaletype='continuous', lims=lims, logscale=logscale,
                    ALPHA=0.7, outputdir=outfolder, outfilename=filename,
                    tiletype='Stamen Terrain', separate=True,
                    faultfile=ffault)
                for filen in filenames1:
                    filenames.append(filen)
            if gis:

                for key in maplayers:
                    # Get simplified name of key for file naming
                    RIDOF = '[+-]?(?=\d*[.eE])(?=\.?\d)'\
                            '\d*\.?\d*(?:[eE][+-]?\d+)?'
                    OPERATORPAT = '[\+\-\*\/]*'
                    keyS = re.sub(OPERATORPAT, '', key)
                    # remove floating point numbers
                    keyS = re.sub(RIDOF, '', keyS)
                    # remove parentheses
                    keyS = re.sub('[()]*', '', keyS)
                    # remove any blank spaces
                    keyS = keyS.replace(' ', '')
                    filen = os.path.join(outfolder, '%s_%s.bil'
                                         % (filename, keyS))
                    fileh = os.path.join(outfolder, '%s_%s.hdr'
                                         % (filename, keyS))
                    fileg = os.path.join(outfolder, '%s_%s.tif'
                                         % (filename, keyS))

                    GDALGrid.copyFromGrid(maplayers[key]['grid']).save(filen)
                    cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff %s %s' % (
                        filen, fileg)
                    rc, so, se = get_command_output(cmd)
                    # Delete bil file and its header
                    os.remove(filen)
                    os.remove(fileh)
                    filenames.append(fileg)

            if args.make_webpage:
                # Compile into list of results for later
                results.append(maplayers)

                # Make binary output for ShakeCast
                filef = os.path.join(outfolder, '%s_model.flt'
                                     % filename)
                # And get name of header
                filefh = os.path.join(outfolder, '%s_model.hdr'
                                      % filename)
                # Make file
                write_floats(filef, maplayers['model']['grid'])
                filenames.append(filef)
                filenames.append(filefh)

            if args.make_summary and not args.make_webpage:
                # Compile into list of results for later
                results.append(maplayers)

        eventid = getHeaderData(shakefile)[0]['event_id']
        if not hasattr(args, 'eventsource'):
            args.eventsource = 'us'
        if not hasattr(args, 'eventsourcecode'):
            args.eventsourcecode = eventid
        
        if args.make_webpage:
            outputs = hazdev(
                results, configs,
                shakefile, outfolder=outfolder,
                pop_file=args.popfile,
                pager_alert=args.property_alertlevel,
                eventsource=args.eventsource,
                eventsourcecode=args.eventsourcecode)
            filenames = filenames + outputs

        if args.make_summary:
            outputs = GFSummary(
                results, configs, args.web_template,
                shakefile, outfolder=outfolder, cleanup=True,
                faultfile=ffault, point=point, pop_file=args.popfile)
            filenames = filenames + outputs

#        # create transparent png file
#        outputs = create_png(outdir)
#        filenames = filenames + outputs
#
#        # create info file
#        infofile = create_info(outdir)
#        filenames = filenames + infofile

        print('\nFiles created:\n')
        for filen in filenames:
            print('%s' % filen)

        return filenames
예제 #12
0
def holzer_liq(shakefile,
               config,
               uncertfile=None,
               saveinputs=False,
               modeltype=None,
               displmodel=None,
               probtype=None,
               bounds=None):
    """
    Method for computing the probability of liquefaction using the Holzer method
    using the Wills et al. (2015) Vs30 map of California to define the
    susceptibility classes and the Fan et al. global water table model. 
    """
    layers = config['holzer_liq_cal']['layers']
    vs30_file = layers['vs30']['file']
    wtd_file = layers['watertable']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile)
    fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0]

    #---------------------------------------------------------------------------
    # Loading info
    #---------------------------------------------------------------------------
    shakemap = ShakeGrid.load(shakefile,
                              fgeodict,
                              resample=True,
                              method='linear',
                              doPadding=True)
    PGA = shakemap.getLayer('pga').getData() / 100  # convert to g
    griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
        shakefile)
    mag = eventdict['magnitude']

    #---------------------------------------------------------------------------
    # Logistic funciton parameters from Vs30
    #---------------------------------------------------------------------------
    vs30_grid = GMTGrid.load(vs30_file)

    vs30 = vs30_grid.getData()
    a0 = np.zeros_like(vs30)
    b0 = np.zeros_like(vs30)
    c0 = np.zeros_like(vs30)
    a1 = np.zeros_like(vs30)
    b1 = np.zeros_like(vs30)
    c1 = np.zeros_like(vs30)
    for k, v in config['holzer_liq_cal']['parameters'].items():
        ind = np.where(vs30 == float(v[0]))
        a0[ind] = v[1]
        b0[ind] = v[2]
        c0[ind] = v[3]
        a1[ind] = v[4]
        b1[ind] = v[5]
        c1[ind] = v[6]

    #---------------------------------------------------------------------------
    # Water table
    #---------------------------------------------------------------------------
    wtd_grid = GMTGrid.load(wtd_file,
                            fgeodict,
                            resample=True,
                            method=layers['watertable']['interpolation'],
                            doPadding=True)
    tmp = wtd_grid._data
    tmp = np.nan_to_num(tmp)

    # Compute water weights
    w0, w1 = get_water_weights(tmp)

    #---------------------------------------------------------------------------
    # Compute probability of liquefaction
    #---------------------------------------------------------------------------
    prob0 = get_prob(PGA, a0, b0, c0, mag)
    prob1 = get_prob(PGA, a1, b1, c1, mag)
    prob = prob0 * w0 + prob1 * w1

    #---------------------------------------------------------------------------
    # Turn output and inputs into into grids and put in maplayers dictionary
    #---------------------------------------------------------------------------
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
    modelsref = config['holzer_liq_cal']['shortref']
    modellref = config['holzer_liq_cal']['longref']
    modeltype = 'Holzer/Wills'
    maplayers['model'] = {
        'grid': GDALGrid(prob, fgeodict),
        'label': 'Probability',
        'type': 'output',
        'description': {
            'name': modelsref,
            'longref': modellref,
            'units': 'coverage',
            'shakemap': shakedetail,
            'parameters': {
                'modeltype': modeltype
            }
        }
    }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA, fgeodict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        maplayers['vs30'] = {
            'grid': GDALGrid(vs30, fgeodict),
            'label': 'Vs30 (m/s)',
            'type': 'input',
            'description': {
                'units': 'm/s'
            }
        }
        maplayers['wtd'] = {
            'grid': GDALGrid(wtd_grid._data, fgeodict),
            'label': 'wtd (m)',
            'type': 'input',
            'description': {
                'units': 'm'
            }
        }
    return maplayers
예제 #13
0
def slhrf_liq(shakefile, config, uncertfile=None, saveinputs=False,
              modeltype=None, displmodel=None,
              probtype=None, bounds=None):
    """
    Method for computing the probability of liquefaction using the SLHRF,
    primarily relying on the Wills et al. (2015) Vs30 map of California and
    Hydrosheds distance to rivers. 
    """
    layers = config['slhrf_liq_cal']['layers']
    vs30_file = layers['vs30']['file']
    elev_file = layers['elev']['file']
    dc_file = layers['dc']['file']
    dr_file = layers['dr']['file']
    fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0]

    
    #---------------------------------------------------------------------------
    # Read in data layers
    #---------------------------------------------------------------------------
    shakemap = ShakeGrid.load(shakefile, fgeodict, resample=True,
                              method='linear', doPadding=True)
    PGA = shakemap.getLayer('pga').getData()/100 # convert to g
    griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile)
    mag = eventdict['magnitude']
    vs30_grid = GMTGrid.load(vs30_file)
    vs30 = vs30_grid.getData()
    elev = GDALGrid.load(elev_file, fgeodict, resample=True,
                        method=layers['elev']['interpolation'],
                        doPadding = True).getData()
    dc = GDALGrid.load(dc_file, fgeodict, resample=True,
                       method=layers['dc']['interpolation'],
                       doPadding = True).getData()
    dr = GDALGrid.load(dr_file, fgeodict, resample=True,
                       method=layers['dr']['interpolation'],
                       doPadding = True).getData()
    dw = np.minimum(dr, dc)


    #---------------------------------------------------------------------------
    # Evaluate the different factors
    #---------------------------------------------------------------------------
    Fgeo = np.zeros_like(vs30)
    for k,v in config['slhrf_liq_cal']['parameters'].items():
        ind = np.where(vs30 == float(v[0]))
        Fgeo[ind] = float(v[1])
    Fz = z_factor(elev)
    Fmag = mag_factor(mag)
    Fpga = pga_factor(PGA)
    Fdw = dw_factor(dw)
    Fnehrp = nehrp_factor(vs30)
    
    #---------------------------------------------------------------------------
    # Combine factors
    #---------------------------------------------------------------------------
    SLHRF = Fz * Fmag * Fpga * Fdw * Fgeo * Fnehrp

    # Transform into a 'probability'
    prob = 0.4 * (1 - np.exp(-0.2 * SLHRF**2) )

    #---------------------------------------------------------------------------
    # Turn output and inputs into into grids and put in maplayers dictionary
    #---------------------------------------------------------------------------
    maplayers = collections.OrderedDict()
    
    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
    modelsref = config['slhrf_liq_cal']['shortref']
    modellref = config['slhrf_liq_cal']['longref']
    modeltype = 'SLHRF/Wills'
    maplayers['model'] = {'grid': GDALGrid(prob, fgeodict), 
                          'label': 'Probability', 
                          'type': 'output',
                          'description': {'name': modelsref, 
                                          'longref': modellref, 
                                          'units': 'coverage',
                                          'shakemap': shakedetail, 
                                          'parameters': {'modeltype': modeltype}
                                          }
                          }

    if saveinputs is True:
        maplayers['slhrf'] = {'grid': GDALGrid(SLHRF, fgeodict),
                              'label': 'SLHRF', 
                              'type': 'input',
                              'description': {'units': 'none'}}
        maplayers['pga'] = {'grid': GDALGrid(PGA, fgeodict), 
                            'label': 'PGA (g)', 
                            'type': 'input',
                            'description': {'units': 'g', 'shakemap': shakedetail}}
        maplayers['vs30'] = {'grid': GDALGrid(vs30, fgeodict),
                             'label': 'Vs30 (m/s)', 
                             'type': 'input',
                             'description': {'units': 'm/s'}}
        maplayers['dw'] = {'grid': GDALGrid(dw, fgeodict),
                           'label': 'dw (km)', 
                           'type': 'input',
                           'description': {'units': 'km'}}
        maplayers['elev'] = {'grid': GDALGrid(elev, fgeodict),
                             'label': 'elev (m)', 
                             'type': 'input',
                             'description': {'units': 'm'}}
        maplayers['FPGA'] = {'grid': GDALGrid(Fpga, fgeodict),
                             'label': 'Fpga', 
                             'type': 'input',
                             'description': {'units': 'none'}}
        maplayers['FDW'] = {'grid': GDALGrid(Fdw, fgeodict),
                            'label': 'Fdw', 
                            'type': 'input',
                            'description': {'units': 'none'}}
        maplayers['FGEO'] = {'grid': GDALGrid(Fgeo, fgeodict),
                             'label': 'Fgeo', 
                             'type': 'input',
                             'description': {'units': 'none'}}
        maplayers['FZ'] = {'grid': GDALGrid(Fz, fgeodict),
                           'label': 'Fz', 
                           'type': 'input',
                           'description': {'units': 'none'}}
        maplayers['FNEHRP'] = {'grid': GDALGrid(Fnehrp, fgeodict),
                               'label': 'Fnehrp', 
                               'type': 'input',
                               'description': {'units': 'none'}}
    return maplayers
예제 #14
0
def main(pargs, config):
    # get the users home directory
    homedir = os.path.expanduser("~")

    # handle cancel messages
    if pargs.cancel:
        # we presume that pargs.gridfile in this context is an event ID.
        msg = _cancel(pargs.gridfile, config)
        print(msg)
        return True

    # what kind of thing is gridfile?
    is_file = os.path.isfile(pargs.gridfile)
    is_url, url_gridfile = _is_url(pargs.gridfile)
    is_pdl, pdl_gridfile = _check_pdl(pargs.gridfile, config)
    if is_file:
        gridfile = pargs.gridfile
    elif is_url:
        gridfile = url_gridfile
    elif is_pdl:
        gridfile = pdl_gridfile
    else:
        print("ShakeMap Grid file %s does not exist." % pargs.gridfile)
        return False

    pager_folder = os.path.join(homedir, config["output_folder"])
    pager_archive = os.path.join(homedir, config["archive_folder"])

    admin = PagerAdmin(pager_folder, pager_archive)

    # stdout will now be logged as INFO, stderr will be logged as WARNING
    mail_host = config["mail_hosts"][0]
    mail_from = config["mail_from"]
    developers = config["developers"]
    logfile = os.path.join(pager_folder, "pager.log")
    plog = PagerLogger(logfile, developers, mail_from, mail_host, debug=pargs.debug)
    logger = plog.getLogger()

    try:
        eid = None
        pager_version = None
        # get all the basic event information and print it, if requested
        shake_tuple = getHeaderData(gridfile)
        eid = shake_tuple[1]["event_id"].lower()
        etime = shake_tuple[1]["event_timestamp"]
        if not len(eid):
            eid = shake_tuple[0]["event_id"].lower()
        network = shake_tuple[1]["event_network"].lower()
        if network == "":
            network = "us"
        if not eid.startswith(network):
            eid = network + eid

        # Create a ComcatInfo object to hopefully tell us a number of things about this event
        try:
            ccinfo = ComCatInfo(eid)
            location = ccinfo.getLocation()
            tsunami = ccinfo.getTsunami()
            authid, allids = ccinfo.getAssociatedIds()
            authsource, othersources = ccinfo.getAssociatedSources()
        except:  # fail over to what we can determine locally
            location = shake_tuple[1]["event_description"]
            tsunami = shake_tuple[1]["magnitude"] >= TSUNAMI_MAG_THRESH
            authid = eid
            authsource = network
            allids = []

        # location field can be empty (None), which breaks a bunch of things
        if location is None:
            location = ""

        # Check to see if user wanted to override default tsunami criteria
        if pargs.tsunami != "auto":
            if pargs.tsunami == "on":
                tsunami = True
            else:
                tsunami = False

        # check to see if this event is a scenario
        is_scenario = False
        shakemap_type = shake_tuple[0]["shakemap_event_type"]
        if shakemap_type == "SCENARIO":
            is_scenario = True

        # if event is NOT a scenario and event time is in the future,
        # flag the event as a scenario and yell about it.
        if etime > datetime.datetime.utcnow():
            is_scenario = True
            logger.warning(
                "Event origin time is in the future! Flagging this as a scenario."
            )

        if is_scenario:
            if re.search("scenario", location.lower()) is None:
                location = "Scenario " + location

        # create the event directory (if it does not exist), and start logging there
        logger.info("Creating event directory")
        event_folder = admin.createEventFolder(authid, etime)

        # Stop processing if there is a "stop" file in the event folder
        stopfile = os.path.join(event_folder, "stop")
        if os.path.isfile(stopfile):
            fmt = '"stop" file found in %s.  Stopping processing, returning with 1.'
            logger.info(fmt % (event_folder))
            return True

        pager_version = get_pager_version(event_folder)
        version_folder = os.path.join(event_folder, "version.%03d" % pager_version)
        os.makedirs(version_folder)
        event_logfile = os.path.join(version_folder, "event.log")

        # this will turn off the global rotating log file
        # and switch to the one in the version folder.
        plog.setVersionHandler(event_logfile)

        # Copy the grid.xml file to the version folder
        # sometimes (usu when testing) the input grid isn't called grid.xml.  Rename it here.
        version_grid = os.path.join(version_folder, "grid.xml")
        shutil.copyfile(gridfile, version_grid)

        # Check to see if the tsunami flag has been previously set
        tsunami_toggle = {"on": 1, "off": 0}
        tsunami_file = os.path.join(event_folder, "tsunami")
        if os.path.isfile(tsunami_file):
            tsunami = tsunami_toggle[open(tsunami_file, "rt").read().strip()]

        # get the rest of the event info
        etime = shake_tuple[1]["event_timestamp"]
        elat = shake_tuple[1]["lat"]
        elon = shake_tuple[1]["lon"]
        emag = shake_tuple[1]["magnitude"]

        # get the year of the event
        event_year = shake_tuple[1]["event_timestamp"].year

        # find the population data collected most closely to the event_year
        pop_year, popfile = _get_pop_year(
            event_year, config["model_data"]["population_data"]
        )
        logger.info("Population year: %i Population file: %s\n" % (pop_year, popfile))

        # Get exposure results
        logger.info("Calculating population exposure.")
        isofile = config["model_data"]["country_grid"]
        expomodel = Exposure(popfile, pop_year, isofile)
        exposure = None
        exposure = expomodel.calcExposure(gridfile)

        # incidentally grab the country code of the epicenter
        numcode = expomodel._isogrid.getValue(elat, elon)
        if np.isnan(numcode):
            cdict = None
        else:
            cdict = Country().getCountry(int(numcode))
        if cdict is None:
            ccode = "UK"
        else:
            ccode = cdict["ISO2"]

        logger.info("Country code at epicenter is %s" % ccode)

        # get fatality results, if requested
        logger.info("Calculating empirical fatalities.")
        fatmodel = EmpiricalLoss.fromDefaultFatality()
        fatdict = fatmodel.getLosses(exposure)

        # get economic results, if requested
        logger.info("Calculating economic exposure.")
        econexpmodel = EconExposure(popfile, pop_year, isofile)
        ecomodel = EmpiricalLoss.fromDefaultEconomic()
        econexposure = econexpmodel.calcExposure(gridfile)
        ecodict = ecomodel.getLosses(econexposure)
        shakegrid = econexpmodel.getShakeGrid()

        # Get semi-empirical losses
        logger.info("Calculating semi-empirical fatalities.")
        urbanfile = config["model_data"]["urban_rural_grid"]
        if not os.path.isfile(urbanfile):
            raise PagerException("Urban-rural grid file %s does not exist." % urbanfile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, pop_year, urbanfile, isofile)
        semiloss, resfat, nonresfat = semi.getLosses(gridfile)

        # get all of the other components of PAGER
        logger.info("Getting all comments.")
        # get the fatality and economic comments
        impact1, impact2 = get_impact_comments(
            fatdict, ecodict, econexposure, event_year, ccode
        )
        # get comment describing vulnerable structures in the region.
        struct_comment = get_structure_comment(resfat, nonresfat, semi)
        # get the comment describing historic secondary hazards
        secondary_comment = get_secondary_comment(elat, elon, emag)
        # get the comment describing historical comments in the region
        historical_comment = get_historical_comment(elat, elon, emag, exposure, fatdict)

        # generate the probability plots
        logger.info("Drawing probability plots.")
        fat_probs_file, eco_probs_file = _draw_probs(
            fatmodel, fatdict, ecomodel, ecodict, version_folder
        )

        # generate the exposure map
        exposure_base = os.path.join(version_folder, "exposure")
        logger.info("Generating exposure map...")
        oceanfile = config["model_data"]["ocean_vectors"]
        oceangrid = config["model_data"]["ocean_grid"]
        cityfile = config["model_data"]["city_file"]
        borderfile = config["model_data"]["border_vectors"]
        shake_grid = expomodel.getShakeGrid()
        pop_grid = expomodel.getPopulationGrid()
        pdf_file, png_file, mapcities = draw_contour(
            shake_grid,
            pop_grid,
            oceanfile,
            oceangrid,
            cityfile,
            exposure_base,
            borderfile,
            is_scenario=is_scenario,
        )
        logger.info("Generated exposure map %s" % pdf_file)

        # figure out whether this event has been "released".
        is_released = _get_release_status(
            pargs,
            config,
            fatmodel,
            fatdict,
            ecomodel,
            ecodict,
            shake_tuple,
            event_folder,
        )

        # Create a data object to encapsulate everything we know about the PAGER
        # results, and then serialize that to disk in the form of a number of JSON files.
        logger.info("Making PAGER Data object.")
        doc = PagerData()
        timezone_file = config["model_data"]["timezones_file"]
        elapsed = pargs.elapsed
        doc.setInputs(
            shakegrid,
            timezone_file,
            pager_version,
            shakegrid.getEventDict()["event_id"],
            authid,
            tsunami,
            location,
            is_released,
            elapsed=elapsed,
        )
        logger.info("Setting inputs.")
        doc.setExposure(exposure, econexposure)
        logger.info("Setting exposure.")
        doc.setModelResults(
            fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat
        )
        logger.info("Setting comments.")
        doc.setComments(
            impact1, impact2, struct_comment, historical_comment, secondary_comment
        )
        logger.info("Setting map info.")
        doc.setMapInfo(cityfile, mapcities)
        logger.info("Validating.")
        doc.validate()

        # if we have determined that the event is a scenario (origin time is in the future)
        # and the shakemap is not flagged as such, set the shakemap type in the
        # pagerdata object to be 'SCENARIO'.
        if is_scenario:
            doc.setToScenario()

        json_folder = os.path.join(version_folder, "json")
        os.makedirs(json_folder)
        logger.info("Saving output to JSON.")
        doc.saveToJSON(json_folder)
        logger.info("Saving output to XML.")
        doc.saveToLegacyXML(version_folder)

        logger.info("Creating onePAGER pdf...")
        onepager_pdf, error = create_onepager(doc, version_folder)
        if onepager_pdf is None:
            raise PagerException("Could not create onePAGER output: \n%s" % error)

        # copy the contents.xml file to the version folder
        contentsfile = get_data_path("contents.xml")
        if contentsfile is None:
            raise PagerException("Could not find contents.xml file.")
        shutil.copy(contentsfile, version_folder)

        # send pdf as attachment to internal team of PAGER users
        if not is_released and not is_scenario:
            message_pager(config, onepager_pdf, doc)

        # run transfer, as appropriate and as specified by config
        # the PAGER product eventsource and eventsourcecode should
        # match the input ShakeMap settings for these properties.
        # This can possibly cause confusion if a regional ShakeMap is
        # trumped with one from NEIC, but this should happen less often
        # than an NEIC origin being made authoritative over a regional one.
        eventsource = network
        eventsourcecode = eid
        res, msg = transfer(
            config,
            doc,
            eventsourcecode,
            eventsource,
            version_folder,
            is_scenario=is_scenario,
        )
        logger.info(msg)
        if not res:
            logger.critical('Error transferring PAGER content. "%s"' % msg)

        print("Created onePAGER pdf %s" % onepager_pdf)
        logger.info("Created onePAGER pdf %s" % onepager_pdf)

        logger.info("Done.")
        return True
    except Exception as e:
        f = io.StringIO()
        traceback.print_exc(file=f)
        msg = e
        msg = "%s\n %s" % (str(msg), f.getvalue())
        hostname = socket.gethostname()
        msg = msg + "\n" + "Error occurred on %s\n" % (hostname)
        if gridfile is not None:
            msg = msg + "\n" + "Error on file: %s\n" % (gridfile)
        if eid is not None:
            msg = msg + "\n" + "Error on event: %s\n" % (eid)
        if pager_version is not None:
            msg = msg + "\n" + "Error on version: %i\n" % (pager_version)
        f.close()
        logger.critical(msg)
        logger.info("Sent error to email")
        return False