def run_gfail(args):
    """Runs ground failure.

    Args:
        args: dictionary or argument parser Namespace output by bin/gfail
            program.

    Returns:
        list: Names of created files.

    """
    # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST
    filenames = []
    # If args is a dictionary, convert to a Namespace
    if isinstance(args, dict):
        args = Namespace(**args)

    if args.set_default_paths:
        set_default_paths(args)
        print('default paths set, continuing...\n')

    if args.list_default_paths:
        list_default_paths()
        return

    if args.reset_default_paths:
        reset_default_paths()
        return

    if args.make_webpage:
        # Turn on GIS and HDF5 flags
        gis = True
        hdf5 = True
        kmz = True
    else:
        gis = args.gis
        hdf5 = args.hdf5
        kmz = args.kmz

    # Figure out what models will be run
    if args.shakefile is not None:  # user intends to actually run some models
        shakefile = args.shakefile

        # make output location for things
        if args.output_filepath is None:
            outdir = os.getcwd()
        else:
            outdir = args.output_filepath

        if hdf5 or gis or kmz:
            if not os.path.exists(outdir):
                os.makedirs(outdir)

        # download if is url
        # cleanup = False
        if not os.path.isfile(shakefile):
            if isURL(shakefile):
                # getGridURL returns a named temporary file object
                shakefile = getGridURL(shakefile)
                # cleanup = True  # Be sure to delete it after
            else:
                raise NameError('Could not find "%s" as a file or a valid url'
                                % shakefile)
        eventid = getHeaderData(shakefile)[0]['event_id']

        # Get entire path so won't break if running gfail with relative path
        shakefile = os.path.abspath(shakefile)

        if args.extract_contents:
            outfolder = outdir
        else:  # Nest in a folder named by eventid
            outfolder = os.path.join(outdir, eventid)
            if not os.path.exists(outfolder):
                os.makedirs(outfolder)

        # Copy shake grid into output directory
        # --- this is base on advice from Mike that when running in production
        #     the shake grids are not archived and so if we need/want to have
        #     the exact grid used for the calculation later if there's every a
        #     question about how the calculation was done, the safest thing is
        #     to store a copy of it here.
        shake_copy = os.path.join(outfolder, "grid.xml")
        shutil.copyfile(shakefile, shake_copy)

        if args.uncertfile is not None:
            uncertfile = os.path.abspath(args.uncertfile)
            unc_copy = os.path.join(outfolder, "uncertainty.xml")
            shutil.copyfile(uncertfile, unc_copy)
        else:
            uncertfile = None

        # Write shakefile to a file for use later
        shakename = os.path.join(outfolder, "shakefile.txt")
        shake_file = open(shakename, "wt")
        shake_file.write(shake_copy)
        shake_file.close()
        filenames.append(shakename)

        # Check that shakemap bounds do not cross 180/-180 line

        if args.set_bounds is None:
            sd = ShakeGrid.getFileGeoDict(shakefile)
            if sd.xmin > sd.xmax:
                print('\nShakeMap crosses 180/-180 line, setting bounds so '
                      'only side with more land area is run')
                if sd.xmax + 180. > 180 - sd.xmin:
                    set_bounds = '%s, %s, %s, %s' % (
                        sd.ymin, sd.ymax, -180., sd.xmax)
                else:
                    set_bounds = '%s, %s, %s, %s' % (sd.ymin, sd.ymax, sd.xmin,
                                                     180.)
                print('Bounds applied: %s' % set_bounds)
            else:
                set_bounds = args.set_bounds
        else:
            set_bounds = args.set_bounds

        config = args.config

        if args.config_filepath is not None:
            # only add config_filepath if full filepath not given and file
            # ext is .ini
            if (not os.path.isabs(config) and
                    os.path.splitext(config)[-1] == '.ini'):
                config = os.path.join(args.config_filepath, config)

        if os.path.splitext(config)[-1] == '.ini':
            temp = ConfigObj(config)
            if len(temp) == 0:
                raise Exception(
                    'Could not find specified .ini file: %s' % config)
            if args.data_path is not None:
                temp = correct_config_filepaths(args.data_path, temp)
            configs = [temp]
            conffail = []
        else:
            # input is a list of config files
            f = open(config, 'r')
            configlist = f.readlines()
            configs = []
            conffail = []
            for conf in configlist:
                conf = conf.strip()
                if not os.path.isabs(conf):
                    # only add config_filepath if full filepath not given
                    conf = os.path.join(args.config_filepath, conf)
                try:
                    temp = ConfigObj(conf)
                    if temp:
                        if args.data_path is not None:
                            temp = correct_config_filepaths(
                                args.data_path, temp)
                        configs.append(temp)
                    else:
                        conffail.append(conf)
                except BaseException:
                    conffail.append(conf)

        print('\nRunning the following models:')

        for conf in configs:
            print('\t%s' % conf.keys()[0])
        if len(conffail) > 0:
            print('Could not find or read in the following config files:\n')
            for conf in conffail:
                print('\t%s' % conf)
            print('\nContinuing...\n')

        if set_bounds is not None:
            if 'zoom' in set_bounds:
                temp = set_bounds.split(',')
                print('Using %s threshold of %1.1f to cut model bounds'
                      % (temp[1].strip(), float(temp[2].strip())))
                bounds = get_bounds(shakefile, temp[1].strip(),
                                    float(temp[2].strip()))
            else:
                temp = eval(set_bounds)
                latmin = temp[0]
                latmax = temp[1]
                lonmin = temp[2]
                lonmax = temp[3]
                bounds = {'xmin': lonmin, 'xmax': lonmax,
                          'ymin': latmin, 'ymax': latmax}
            print('Applying bounds of lonmin %1.2f, lonmax %1.2f, '
                  'latmin %1.2f, latmax %1.2f'
                  % (bounds['xmin'], bounds['xmax'],
                     bounds['ymin'], bounds['ymax']))
        else:
            bounds = None

        if args.make_webpage:
            results = []

        # pre-read in ocean trimming file polygons so only do this step once
        if args.trimfile is not None:
            if not os.path.exists(args.trimfile):
                print('trimfile defined does not exist: %s\n'
                      'Ocean will not be trimmed.' % args.trimfile)
                trimfile = None
            elif os.path.splitext(args.trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, '
                      'ocean will not be trimmed')
                trimfile = None
            else:
                trimfile = args.trimfile
        else:
            trimfile = None

        # Get finite fault ready, if exists

        ffault = None
        point = True
        if args.finite_fault is not None:
            point = False
            try:
                if os.path.splitext(args.finite_fault)[-1] == '.txt':
                    ffault = text_to_json(args.finite_fault)
                elif os.path.splitext(args.finite_fault)[-1] == '.json':
                    ffault = args.finite_fault
                else:
                    print('Could not read in finite fault, will '
                          'try to download from comcat')
                    ffault = None
            except BaseException:
                print('Could not read in finite fault, will try to '
                      'download from comcat')
                ffault = None

        if ffault is None:
            # Try to get finite fault file, if it exists
            try:
                returned_ev = get_event_comcat(shakefile)
                if returned_ev is not None:
                    testjd, detail, temp = returned_ev
                    evinfo = testjd['input']['event_information']
                    if 'faultfiles' in evinfo:
                        ffilename = evinfo['faultfiles']
                        if len(ffilename) > 0:
                            # Download the file
                            with tempfile.NamedTemporaryFile(
                                    delete=False, mode='w') as f:
                                temp.getContent(ffilename, filename=f.name)
                                ffault = text_to_json(f.name)
                                os.remove(f.name)
                            point = False
                        else:
                            point = True
                else:
                    print('Unable to determine source type, unknown if finite'
                          ' fault or point source')
                    ffault = None
                    point = False

            except Exception as e:
                print(e)
                print('Unable to determine source type, unknown if finite'
                      ' fault or point source')
                ffault = None
                point = False

        # Loop over config files
        for conf in configs:
            modelname = conf.keys()[0]
            print('\nNow running %s:' % modelname)
            notcov, newbnds = check_input_extents(
                conf, shakefile=shakefile,
                bounds=bounds
            )
            if len(notcov) > 0:
                print('\nThe following input layers do not cover'
                      ' the area of interest:\n\t%s' % '\n\t'.join(notcov))
                if newbnds is None:
                    print('\nCannnot make bounds that work. '
                          'Skipping to next model\n')
                    continue
                else:
                    pnt = '%s, %s, %s, %s' % (
                        newbnds['xmin'], newbnds['xmax'],
                        newbnds['ymin'], newbnds['ymax'])
                    print('Running model for new bounds that are fully covered'
                          ' by input layer: %s' % pnt)
                    bounds2 = newbnds
            else:
                bounds2 = bounds

            modelfunc = conf[modelname]['funcname']
            if modelfunc == 'LogisticModel':
                lm = LM.LogisticModel(shakefile, conf,
                                      uncertfile=uncertfile,
                                      saveinputs=args.save_inputs,
                                      bounds=bounds2,
                                      trimfile=trimfile)

                maplayers = lm.calculate()
            elif modelfunc == 'godt2008':
                maplayers = godt2008(shakefile, conf,
                                     uncertfile=uncertfile,
                                     saveinputs=args.save_inputs,
                                     bounds=bounds2,
                                     trimfile=trimfile)
            else:
                print('Unknown model function specified in config for %s '
                      'model, skipping to next config' % modelfunc)
                continue

            # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
            # filename = ('%s_%s_%s' % (eventid, modelname, time1))

            if args.appendname is not None:
                filename = ('%s_%s_%s' % (eventid, modelname, args.appendname))
            else:
                filename = ('%s_%s' % (eventid, modelname))
            if hdf5:
                filenameh = filename + '.hdf5'
                if os.path.exists(filenameh):
                    os.remove(filenameh)
                savelayers(maplayers, os.path.join(outfolder, filenameh))
                filenames.append(filenameh)

            if gis or kmz:
                for key in maplayers:
                    # Rename 'std' key to 'beta_sigma'
                    if key == 'std':
                        key_label = 'beta_sigma'
                    else:
                        key_label = key
                    if gis:
                        filen = os.path.join(outfolder, '%s_%s.bil'
                                             % (filename, key_label))
                        fileh = os.path.join(outfolder, '%s_%s.hdr'
                                             % (filename, key_label))
                        fileg = os.path.join(outfolder, '%s_%s.tif'
                                             % (filename, key_label))

                        GDALGrid.copyFromGrid(
                            maplayers[key]['grid']).save(filen)
                        cflags = '-co COMPRESS=DEFLATE -co predictor=2'
                        srs = '-a_srs EPSG:4326'
                        cmd = 'gdal_translate %s %s -of GTiff %s %s' % (
                            srs, cflags, filen, fileg)
                        rc, so, se = get_command_output(cmd)
                        # Delete bil file and its header
                        os.remove(filen)
                        os.remove(fileh)
                        filenames.append(fileg)
                    if kmz and (not key.startswith('quantile') and not key.startswith('std')) :
                        plotorder, logscale, lims, colormaps, maskthresh = \
                            parseConfigLayers(maplayers, conf, keys=['model'])
                        maxprob = np.nanmax(maplayers[key]['grid'].getData())
                        if key == 'model':
                            qdict = {
                                k: maplayers[k] for k in maplayers.keys()
                                if k.startswith('quantile')
                            }
                        else:
                            qdict = None
                        if maskthresh is None:
                            maskthresh = [0.]
                        if maxprob >= maskthresh[0]:
                            filen = os.path.join(outfolder, '%s_%s.kmz'
                                                 % (filename, key_label))
                            filek = create_kmz(maplayers[key], filen,
                                               mask=maskthresh[0],
                                               levels=lims[0],
                                               qdict=qdict)
                            filenames.append(filek)
                        else:
                            print('No unmasked pixels present, skipping kmz '
                                  'file creation')

            if args.make_webpage:
                # Compile into list of results for later
                results.append(maplayers)

                #  # Make binary output for ShakeCast
                #  filef = os.path.join(outfolder, '%s_model.flt'
                #                       % filename)
                #  # And get name of header
                #  filefh = os.path.join(outfolder, '%s_model.hdr'
                #                        % filename)
                #  # Make file
                #  write_floats(filef, maplayers['model']['grid'])
                #  filenames.append(filef)
                #  filenames.append(filefh)

        eventid = getHeaderData(shakefile)[0]['event_id']
        if not hasattr(args, 'eventsource'):
            args.eventsource = 'us'
        if not hasattr(args, 'eventsourcecode'):
            args.eventsourcecode = eventid

        if args.make_webpage:
            if len(results) == 0:
                raise Exception('No models were run. Cannot make webpages.')
            outputs = hazdev(
                results, configs,
                shakefile, outfolder=outfolder,
                pop_file=args.popfile,
                pager_alert=args.property_alertlevel,
                eventsource=args.eventsource,
                eventsourcecode=args.eventsourcecode,
                point=point, gf_version=args.gf_version,
                pdlcall=args.pdlcall)
            filenames = filenames + outputs

#        # create transparent png file
#        outputs = create_png(outdir)
#        filenames = filenames + outputs
#
#        # create info file
#        infofile = create_info(outdir)
#        filenames = filenames + infofile

        print('\nFiles created:\n')
        for filen in filenames:
            print('%s' % filen)

        return filenames
Exemple #2
0
def test_save():
    tdir = tempfile.mkdtemp()
    testfile = os.path.join(tdir, 'test.xml')
    try:
        print('Testing save/read functionality for shakemap grids...')
        pga = np.arange(0, 16, dtype=np.float32).reshape(4, 4)
        pgv = np.arange(1, 17, dtype=np.float32).reshape(4, 4)
        mmi = np.arange(2, 18, dtype=np.float32).reshape(4, 4)
        geodict = GeoDict({
            'xmin': 0.5,
            'xmax': 3.5,
            'ymin': 0.5,
            'ymax': 3.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 4,
            'nx': 4
        })
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        shakeDict = {
            'event_id': 'usabcd1234',
            'shakemap_id': 'usabcd1234',
            'shakemap_version': 1,
            'code_version': '4.0',
            'process_timestamp': datetime.utcnow(),
            'shakemap_originator': 'us',
            'map_status': 'RELEASED',
            'shakemap_event_type': 'ACTUAL'
        }
        eventDict = {
            'event_id': 'usabcd1234',
            'magnitude': 7.6,
            'depth': 1.4,
            'lat': 2.0,
            'lon': 2.0,
            'event_timestamp': datetime.utcnow(),
            'event_network': 'us',
            'event_description': 'sample event'
        }
        uncDict = {'pga': (0.0, 0), 'pgv': (0.0, 0), 'mmi': (0.0, 0)}
        shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)

        print('Testing save/read functionality...')
        shake.save(testfile, version=3)
        shake2 = ShakeGrid.load(testfile)
        for layer in ['pga', 'pgv', 'mmi']:
            tdata = shake2.getLayer(layer).getData()
            np.testing.assert_almost_equal(tdata, layers[layer])

        print('Passed save/read functionality for shakemap grids.')

        print('Testing getFileGeoDict method...')
        fgeodict = ShakeGrid.getFileGeoDict(testfile)
        print('Passed save/read functionality for shakemap grids.')

        print('Testing loading with bounds (no resampling or padding)...')
        sampledict = GeoDict({
            'xmin': -0.5,
            'xmax': 3.5,
            'ymin': -0.5,
            'ymax': 3.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 5,
            'nx': 5
        })
        shake3 = ShakeGrid.load(testfile,
                                samplegeodict=sampledict,
                                resample=False,
                                doPadding=False,
                                padValue=np.nan)
        tdata = shake3.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata, layers['pga'])

        print('Passed loading with bounds (no resampling or padding)...')

        print('Testing loading shakemap with padding, no resampling...')
        newdict = GeoDict({
            'xmin': -0.5,
            'xmax': 4.5,
            'ymin': -0.5,
            'ymax': 4.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 6,
            'nx': 6
        })
        shake4 = ShakeGrid.load(testfile,
                                samplegeodict=newdict,
                                resample=False,
                                doPadding=True,
                                padValue=np.nan)
        output = np.array([[np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
                           [np.nan, 0.0, 1.0, 2.0, 3.0, np.nan],
                           [np.nan, 4.0, 5.0, 6.0, 7.0, np.nan],
                           [np.nan, 8.0, 9.0, 10.0, 11.0, np.nan],
                           [np.nan, 12.0, 13.0, 14.0, 15.0, np.nan],
                           [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]])
        tdata = shake4.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata, output)
        print('Passed loading shakemap with padding, no resampling...')

        #make a bigger grid
        pga = np.arange(0, 36, dtype=np.float32).reshape(6, 6)
        pgv = np.arange(1, 37, dtype=np.float32).reshape(6, 6)
        mmi = np.arange(2, 38, dtype=np.float32).reshape(6, 6)
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        geodict = GeoDict({
            'xmin': 0.5,
            'xmax': 5.5,
            'ymin': 0.5,
            'ymax': 5.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 6,
            'nx': 6
        })
        shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)
        shake.save(testfile, version=3)

        print('Testing resampling, no padding...')
        littledict = GeoDict({
            'xmin': 2.0,
            'xmax': 4.0,
            'ymin': 2.0,
            'ymax': 4.0,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 3,
            'nx': 3
        })
        shake5 = ShakeGrid.load(testfile,
                                samplegeodict=littledict,
                                resample=True,
                                doPadding=False,
                                padValue=np.nan)
        output = np.array([[10.5, 11.5, 12.5], [16.5, 17.5, 18.5],
                           [22.5, 23.5, 24.5]])
        tdata = shake5.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata, output)
        print('Passed resampling, no padding...')

        print('Testing resampling and padding...')
        pga = np.arange(0, 16, dtype=np.float32).reshape(4, 4)
        pgv = np.arange(1, 17, dtype=np.float32).reshape(4, 4)
        mmi = np.arange(2, 18, dtype=np.float32).reshape(4, 4)
        geodict = GeoDict({
            'xmin': 0.5,
            'ymax': 3.5,
            'ymin': 0.5,
            'xmax': 3.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 4,
            'nx': 4
        })
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)
        shake.save(testfile, version=3)
        bigdict = GeoDict({
            'xmin': 0.0,
            'xmax': 4.0,
            'ymin': 0.0,
            'ymax': 4.0,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 5,
            'nx': 5
        })
        shake6 = ShakeGrid.load(testfile,
                                samplegeodict=bigdict,
                                resample=True,
                                doPadding=True,
                                padValue=np.nan)
        tdata = shake6.getLayer('pga').getData()
        output = np.array([[np.nan, np.nan, np.nan, np.nan, np.nan],
                           [np.nan, 2.5, 3.5, 4.5, np.nan],
                           [np.nan, 6.5, 7.5, 8.5, np.nan],
                           [np.nan, 10.5, 11.5, 12.5, np.nan],
                           [np.nan, np.nan, np.nan, np.nan, np.nan]])
        np.testing.assert_almost_equal(tdata, output)
        print('Passed resampling and padding...')
    except Exception as error:
        print('Failed to read grid.xml format file "%s". Error "%s".' %
              (xmlfile, str(error)))
        assert 0 == 1
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Exemple #3
0
    def calcExposure(self,shakefile):
        """Calculate population exposure to shaking, per country, plus total exposure across all countries.

        :param shakefile:
          Path to ShakeMap grid.xml file.
        :returns:
          Dictionary containing country code (ISO2) keys, and values of
          10 element arrays representing population exposure to MMI 1-10.
          Dictionary will contain an additional key 'TotalExposure', with value of exposure across all countries.
          Dictionary will also contain a field "maximum_border_mmi" which indicates the maximum MMI value along
          any edge of the ShakeMap.
        """
        #get shakemap geodict
        shakedict = ShakeGrid.getFileGeoDict(shakefile,adjust='res')
            
        #get population geodict
        popdict,t = self._pop_class.getFileGeoDict(self._popfile)

        #get country code geodict
        isodict,t = self._iso_class.getFileGeoDict(self._isofile)

        #special case for very high latitude events that may be outside the bounds
        #of our population data...
        if not popdict.intersects(shakedict):
            expdict = {'UK':np.zeros((10,)),'TotalExposure':np.zeros((10,))}
            return expdict
        
        if popdict == shakedict == isodict:
            #special case, probably for testing...
            self._shakegrid = ShakeGrid.load(shakefile,adjust='res')
            self._popgrid = self._pop_class.load(self._popfile)
            self._isogrid = self._iso_class.load(self._isofile)
        else:
            sampledict = popdict.getBoundsWithin(shakedict)
            self._shakegrid = ShakeGrid.load(shakefile,samplegeodict=sampledict,resample=True,
                                             method='linear',adjust='res')
            self._popgrid = self._pop_class.load(self._popfile,samplegeodict=sampledict,
                                                 resample=False,doPadding=True,padValue=np.nan)
            self._isogrid = self._iso_class.load(self._isofile,samplegeodict=sampledict,
                                                 resample=True,method='nearest',doPadding=True,padValue=0)

        mmidata = self._shakegrid.getLayer('mmi').getData()
        popdata = self._popgrid.getData()
        isodata = self._isogrid.getData()

        eventyear = self._shakegrid.getEventDict()['event_timestamp'].year

        #in order to avoid crazy far-future scenarios where PAGER models are probably invalid,
        #check to see if the time gap between the date of population data collection and event year
        #reaches either of a couple of different thresholds.
        if eventyear > self._popyear:
            tdiff = (eventyear - self._popyear)
            if tdiff > SCENARIO_WARNING and tdiff < SCENARIO_ERROR:
                msg = '''The input ShakeMap event year is more than %i years from the population date.
                PAGER results for events this far in the future may not be valid.''' % SCENARIO_WARNING
                warnings.warn(msg)
            if tdiff > SCENARIO_ERROR:
                msg = '''The input ShakeMap event year is more than %i years from the population date.
                PAGER results for events this far in the future are not valid. Stopping.''' % SCENARIO_ERROR
                raise PagerException(msg)
        
        ucodes = np.unique(isodata)
        for ccode in ucodes:
            cidx = (isodata == ccode)
            popdata[cidx] = self._popgrowth.adjustPopulation(popdata[cidx],ccode,self._popyear,eventyear)
        
        exposure_dict = calc_exposure(mmidata,popdata,isodata)
        newdict = {}
        #Get rolled up exposures
        total = np.zeros((10,),dtype=np.uint32)
        for isocode,value in exposure_dict.items():
            cdict = self._country.getCountry(int(isocode))
            if cdict is None:
                ccode = 'UK'
            else:
                ccode = cdict['ISO2']
            newdict[ccode] = value
            total += value

        newdict['TotalExposure'] = total

        #get the maximum MMI value along any of the four map edges
        nrows,ncols = mmidata.shape
        top = mmidata[0,0:ncols].max()
        bottom = mmidata[nrows-1,0:ncols].max()
        left = mmidata[0:nrows,0].max()
        right = mmidata[0:nrows,ncols-1].max()
        newdict['maximum_border_mmi'] = np.array([top,bottom,left,right]).max()
        
        return newdict
    def __init__(self,config,shakefile,model):
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model,config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.
        self.model = model
        cmodel = config['logistic_models'][model]
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)#key = layer name, value = file name
        self.terms,timeField = validateTerms(cmodel,self.coeffs,self.layers)
        self.interpolations = validateInterpolations(cmodel,self.layers)
        self.units = validateUnits(cmodel,self.layers)

        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile,adjust='res')
        griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile)
        YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        DAY = eventdict['event_timestamp'].day
        HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guaranteed not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile,samplegeodict=sampledict,resample=True,doPadding=True,adjust='res')

        #load the predictor layers into a dictionary
        self.layerdict = {} #key = layer name, value = grid object
        for layername,layerfile in self.layers.items():
            if isinstance(layerfile,list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername,layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        x = 1
        self.nuggets = [str(self.coeffs['b0'])]
        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = self.shakemap.getGeoDict()
Exemple #5
0
def godt2008(shakefile,
             config,
             uncertfile=None,
             saveinputs=False,
             displmodel=None,
             bounds=None,
             slopediv=100.,
             codiv=10.,
             numstd=None,
             trimfile=None):
    """
    This function runs the Godt and others (2008) global method for a given
    ShakeMap. The Factor of Safety is calculated using infinite slope analysis
    assumuing dry conditions. The method uses threshold newmark displacement
    and estimates areal coverage by doing the calculations for each slope
    quantile.

    Args:
        shakefile (str): Path to shakemap xml file.
        config (ConfigObj): ConfigObj of config file containing inputs required
            for running the model
        uncertfile (str): Path to shakemap uncertainty xml file (optional).
        saveinputs (bool): Whether or not to return the model input layers,
            False (default) returns only the model output (one layer).
        displmodel (str): Newmark displacement regression model to use

            * ``'J_PGA'`` (default) -- PGA-based model, equation 6 from
              Jibson (2007).
            * ``'J_PGA_M'`` -- PGA and M-based model, equation 7 from
              Jibson (2007).
            * ``'RS_PGA_M'`` -- PGA and M-based model from from Rathje and
              Saygili (2009).
            * ``'RS_PGA_PGV'`` -- PGA and PGV-based model, equation 6
              from Saygili and Rathje (2008).

        bounds (dict): Optional dictionary with keys 'xmin', 'xmax', 'ymin',
            'ymax' that defines a subset of the shakemap area to compute.
        slopediv (float): Divide slope by this number to get slope in degrees
            (Verdin datasets need to be divided by 100).
        codiv (float): Divide cohesion input layer by this number
            (For Godt method, need to divide by 10 because that is how it was
            calibrated).
        numstd (float): Number of (+/-) standard deviations to use if
            uncertainty is computed (uncertfile must be supplied).
        trimfile (str): shapefile of earth's land masses to trim offshore areas
            of model

    Returns:
        dict: Dictionary containing output and input layers (if
        saveinputs=True):

        .. code-block:: python

            {
                'grid': mapio grid2D object,
                'label': 'label for colorbar and top line of subtitle',
                'type': 'output or input to model',
                'description': {'name': 'short reference of model',
                                'longref': 'full model reference',
                                'units': 'units of output',
                                'shakemap': 'information about shakemap used',
                                'event_id': 'shakemap event id',
                                'parameters': 'dictionary of model parameters
                                               used'

                }
            }

    Raises:
         NameError: when unable to parse the config correctly (probably a
             formatting issue in the configfile) or when unable to find the
             shakefile (Shakemap filepath) -- these cause program to end.

    """
    # TODO:
    #    - Add 'all' -- averages Dn from all four equations, add term to
    #      convert PGA and PGV to Ia and use other equations, add Ambraseys and
    #      Menu (1988) option.

    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # See if trimfile exists
    if trimfile is not None:
        if not os.path.exists(trimfile):
            print('trimfile defined does not exist: %s\n'
                  'Ocean will not be trimmed' % trimfile)
            trimfile = None
        if os.path.splitext(trimfile)[1] != '.shp':
            print('trimfile must be a shapefile, ocean will not be trimmed')
            trimfile = None

    # Parse config
    try:  # May want to add error handling so if refs aren't given, just
        # includes unknown
        slopefilepath = config['godt_2008']['layers']['slope']['filepath']
        slopeunits = config['godt_2008']['layers']['slope']['units']
        cohesionfile = config['godt_2008']['layers']['cohesion']['file']
        cohesionunits = config['godt_2008']['layers']['cohesion']['units']
        frictionfile = config['godt_2008']['layers']['friction']['file']
        frictionunits = config['godt_2008']['layers']['friction']['units']

        thick = float(config['godt_2008']['parameters']['thick'])
        uwt = float(config['godt_2008']['parameters']['uwt'])
        nodata_cohesion = \
            float(config['godt_2008']['parameters']['nodata_cohesion'])
        nodata_friction = \
            float(config['godt_2008']['parameters']['nodata_friction'])
        dnthresh = float(config['godt_2008']['parameters']['dnthresh'])
        fsthresh = float(config['godt_2008']['parameters']['fsthresh'])
        acthresh = float(config['godt_2008']['parameters']['acthresh'])
        try:
            slopemin = float(config['godt_2008']['parameters']['slopemin'])
        except:
            slopemin = 0.01
            print('No slopemin found in config file, using 0.01 deg '
                  'for slope minimum')
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)

    if displmodel is None:
        try:
            displmodel = config['godt_2008']['parameters']['displmodel']
        except:
            print('No regression model specified, using default of J_PGA_M')
            displmodel = 'J_PGA_M'

    # TO DO: ADD ERROR CATCHING ON UNITS, MAKE SURE THEY ARE WHAT THEY SHOULD
    #        BE FOR THIS MODEL

    try:  # Try to fetch source information from config
        modelsref = config['godt_2008']['shortref']
        modellref = config['godt_2008']['longref']
        slopesref = config['godt_2008']['layers']['slope']['shortref']
        slopelref = config['godt_2008']['layers']['slope']['longref']
        cohesionsref = config['godt_2008']['layers']['cohesion']['shortref']
        cohesionlref = config['godt_2008']['layers']['cohesion']['longref']
        frictionsref = config['godt_2008']['layers']['friction']['shortref']
        frictionlref = config['godt_2008']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. '
              'Continuing')

    # Figure out how/if need to cut anything
    geodict = ShakeGrid.getFileGeoDict(shakefile)  # , adjust='res')
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if geodict.xmin < geodict.xmax:  # only if signs are not opposite
            if (geodict.xmin > bounds['xmin'] or geodict.xmax < bounds['xmax']
                    or geodict.ymin > bounds['ymin']
                    or geodict.ymax < bounds['ymax']):
                print('Specified bounds are outside shakemap area, using '
                      'ShakeMap bounds instead.')
                bounds = None

    if bounds is not None:
        tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                              bounds['xmax'],
                                              bounds['ymin'],
                                              bounds['ymax'],
                                              geodict.dx,
                                              geodict.dy,
                                              inside=False)
        # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
        if geodict.xmin > geodict.xmax:
            if tempgdict.xmin < 0:
                geodict._xmin -= 360.
            else:
                geodict._xmax += 360.
        geodict = geodict.getBoundsWithin(tempgdict)

    basegeodict, firstcol = GDALGrid.getFileGeoDict(
        os.path.join(slopefilepath, 'slope_min.bil'))
    if basegeodict == geodict:
        sampledict = geodict
    else:
        sampledict = basegeodict.getBoundsWithin(geodict)

    # Do we need to subdivide baselayer?
    if 'divfactor' in config['godt_2008'].keys():
        divfactor = float(config['godt_2008']['divfactor'])
        if divfactor != 1.:
            # adjust sampledict so everything will be resampled (cut one cell
            # of each edge so will be inside bounds)
            newxmin = sampledict.xmin - sampledict.dx/2. + \
                sampledict.dx/(2.*divfactor) + sampledict.dx
            newymin = sampledict.ymin - sampledict.dy/2. + \
                sampledict.dy/(2.*divfactor) + sampledict.dy
            newxmax = sampledict.xmax + sampledict.dx/2. - \
                sampledict.dx/(2.*divfactor) - sampledict.dx
            newymax = sampledict.ymax + sampledict.dy/2. - \
                sampledict.dy/(2.*divfactor) - sampledict.dy
            newdx = sampledict.dx / divfactor
            newdy = sampledict.dy / divfactor

            sampledict = GeoDict.createDictFromBox(newxmin,
                                                   newxmax,
                                                   newymin,
                                                   newymax,
                                                   newdx,
                                                   newdy,
                                                   inside=True)

    tmpdir = tempfile.mkdtemp()

    # Load in ShakeMap and get new geodictionary
    temp = ShakeGrid.load(shakefile)  # , adjust='res')
    junkfile = os.path.join(tmpdir, 'temp.bil')
    GDALGrid.copyFromGrid(temp.getLayer('pga')).save(junkfile)
    pga = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    GDALGrid.copyFromGrid(temp.getLayer('pgv')).save(junkfile)
    pgv = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    # Update geodictionary
    sampledict = pga.getGeoDict()

    t2 = temp.getEventDict()
    M = t2['magnitude']
    event_id = t2['event_id']
    shakedict = temp.getShakeDict()
    del (temp)

    # read in uncertainty if present
    if uncertfile is not None:
        try:
            temp = ShakeGrid.load(uncertfile)  # , adjust='res')
            GDALGrid.copyFromGrid(temp.getLayer('stdpga')).save(junkfile)
            uncertpga = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
            GDALGrid.copyFromGrid(temp.getLayer('stdpgv')).save(junkfile)
            uncertpgv = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
        except:
            print('Could not read uncertainty file, ignoring uncertainties')
            uncertfile = None
        if numstd is None:
            numstd = 1.

    # Read in all the slope files, divide all by 100 to get to slope in
    # degrees (because input files are multiplied by 100.)
    slopes = []
    quantiles = [
        'slope_min.bil', 'slope10.bil', 'slope30.bil', 'slope50.bil',
        'slope70.bil', 'slope90.bil', 'slope_max.bil'
    ]
    for quant in quantiles:
        tmpslp = quickcut(os.path.join(slopefilepath, quant), sampledict)
        tgd = tmpslp.getGeoDict()
        if tgd != sampledict:
            raise Exception('Input layers are not aligned to same geodict')
        else:
            slopes.append(tmpslp.getData() / slopediv)

    slopestack = np.dstack(slopes)

    # Change any zero slopes to a very small number to avoid dividing by
    # zero later
    slopestack[slopestack == 0] = 1e-8

    # Read in the cohesion and friction files and duplicate layers so they
    # are same shape as slope structure

    tempco = quickcut(cohesionfile, sampledict, method='near')
    tempco = tempco.getData()[:, :, np.newaxis] / codiv
    cohesion = np.repeat(tempco, 7, axis=2)
    cohesion[cohesion == -999.9] = nodata_cohesion
    cohesion = np.nan_to_num(cohesion)
    cohesion[cohesion == 0] = nodata_cohesion

    tempfric = quickcut(frictionfile, sampledict, method='near')
    tempfric = tempfric.getData().astype(float)[:, :, np.newaxis]
    friction = np.repeat(tempfric, 7, axis=2)
    friction[friction == -9999] = nodata_friction
    friction = np.nan_to_num(friction)
    friction[friction == 0] = nodata_friction

    # Do the calculations using Jibson (2007) PGA only model for Dn
    FS = (cohesion / (uwt * thick * np.sin(slopestack * (np.pi / 180.))) +
          np.tan(friction * (np.pi / 180.)) / np.tan(slopestack *
                                                     (np.pi / 180.)))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac = (FS - 1) * np.sin(slopestack * (np.pi / 180.)).astype(float)
    Ac[Ac < acthresh] = acthresh

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = np.repeat(pga.getData()[:, :, np.newaxis] / 100., 7,
                    axis=2).astype(float)
    if 'PGV' in displmodel:  # Load in PGV also, in cm/sec
        PGV = np.repeat(pgv.getData()[:, :, np.newaxis], 7,
                        axis=2).astype(float)
    else:
        PGV = None

    if uncertfile is not None:
        stdpga = np.repeat(uncertpga.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        stdpgv = np.repeat(uncertpgv.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        # estimate PGA +- 1std
        PGAmin = np.exp(np.log(PGA * 100) - numstd * stdpga) / 100
        PGAmax = np.exp(np.log(PGA * 100) + numstd * stdpga) / 100
        if 'PGV' in displmodel:
            PGVmin = np.exp(np.log(PGV) - numstd * stdpgv)
            PGVmax = np.exp(np.log(PGV) + numstd * stdpgv)
        else:
            PGVmin = None
            PGVmax = None

    # Ignore errors so still runs when Ac > PGA, just leaves nan instead
    # of crashing.
    np.seterr(invalid='ignore')

    Dn, logDnstd, logtype = NMdisp(Ac, PGA, model=displmodel, M=M, PGV=PGV)
    if uncertfile is not None:
        Dnmin, logDnstdmin, logtype = NMdisp(Ac,
                                             PGAmin,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmin)
        Dnmax, logDnstdmax, logtype = NMdisp(Ac,
                                             PGAmax,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmax)

    PROB = Dn.copy()
    PROB[PROB < dnthresh] = 0.
    PROB[PROB >= dnthresh] = 1.
    PROB = np.sum(PROB, axis=2)
    if uncertfile is not None:
        PROBmin = Dnmin.copy()
        PROBmin[PROBmin <= dnthresh] = 0.
        PROBmin[PROBmin > dnthresh] = 1.
        PROBmin = np.sum(PROBmin, axis=2)
        PROBmax = Dnmax.copy()
        PROBmax[PROBmax <= dnthresh] = 0.
        PROBmax[PROBmax > dnthresh] = 1.
        PROBmax = np.sum(PROBmax, axis=2)

    PROB[PROB == 1.] = 0.01
    PROB[PROB == 2.] = 0.10
    PROB[PROB == 3.] = 0.30
    PROB[PROB == 4.] = 0.50
    PROB[PROB == 5.] = 0.70
    PROB[PROB == 6.] = 0.90
    PROB[PROB == 7.] = 0.99

    if uncertfile is not None:
        PROBmin[PROBmin == 1.] = 0.01
        PROBmin[PROBmin == 2.] = 0.10
        PROBmin[PROBmin == 3.] = 0.30
        PROBmin[PROBmin == 4.] = 0.50
        PROBmin[PROBmin == 5.] = 0.70
        PROBmin[PROBmin == 6.] = 0.90
        PROBmin[PROBmin == 7.] = 0.99
        PROBmax[PROBmax == 1.] = 0.01
        PROBmax[PROBmax == 2.] = 0.10
        PROBmax[PROBmax == 3.] = 0.30
        PROBmax[PROBmax == 4.] = 0.50
        PROBmax[PROBmax == 5.] = 0.70
        PROBmax[PROBmax == 6.] = 0.90
        PROBmax[PROBmax == 7.] = 0.99

    if slopemin is not None:
        PROB[slopestack[:, :, 6] <= slopemin] = 0.
        # uncert too
        if uncertfile is not None:
            PROBmin[slopestack[:, :, 6] <= slopemin] = 0.
            PROBmax[slopestack[:, :, 6] <= slopemin] = 0.

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    shakedetail = '%s_ver%s' % (shakedict['shakemap_id'],
                                shakedict['shakemap_version'])

    description = {
        'name': modelsref,
        'longref': modellref,
        'units': 'Proportion of Area Affected',
        'shakemap': shakedetail,
        'event_id': event_id,
        'parameters': {
            'displmodel': displmodel,
            'thickness_m': thick,
            'unitwt_kNm3': uwt,
            'dnthresh_cm': dnthresh,
            'acthresh_g': acthresh,
            'fsthresh': fsthresh,
            'modeltype': 'Landslide'
        }
    }
    PROBgrid = GDALGrid(PROB, sampledict)
    if trimfile is not None:
        PROBgrid = trim_ocean(PROBgrid, trimfile)

    maplayers['model'] = {
        'grid': PROBgrid,
        'label': 'Landslide - Proportion of Area Affected',
        'type': 'output',
        'description': description
    }

    if uncertfile is not None:
        PROBmingrid = GDALGrid(PROBmin, sampledict)
        PROBmaxgrid = GDALGrid(PROBmax, sampledict)
        if trimfile is not None:
            PROBmingrid = trim_ocean(PROBmingrid, trimfile)
            PROBmaxgrid = trim_ocean(PROBmaxgrid, trimfile)
        maplayers['modelmin'] = {
            'grid': PROBmingrid,
            'label': 'Landslide Probability-%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }
        maplayers['modelmax'] = {
            'grid': PROBmaxgrid,
            'label': 'Landslide Probability+%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA[:, :, 0], sampledict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        if 'PGV' in displmodel:
            maplayers['pgv'] = {
                'grid': GDALGrid(PGV[:, :, 0], sampledict),
                'label': 'PGV (cm/s)',
                'type': 'input',
                'description': {
                    'units': 'cm/s',
                    'shakemap': shakedetail
                }
            }
        maplayers['minFS'] = {
            'grid': GDALGrid(np.min(FS, axis=2), sampledict),
            'label': 'Min Factor of Safety',
            'type': 'input',
            'description': {
                'units': 'unitless'
            }
        }
        maplayers['max slope'] = {
            'grid': GDALGrid(slopestack[:, :, -1], sampledict),
            'label': r'Maximum slope ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': slopesref,
                'longref': slopelref
            }
        }
        maplayers['cohesion'] = {
            'grid': GDALGrid(cohesion[:, :, 0], sampledict),
            'label': 'Cohesion (kPa)',
            'type': 'input',
            'description': {
                'units': 'kPa (adjusted)',
                'name': cohesionsref,
                'longref': cohesionlref
            }
        }
        maplayers['friction angle'] = {
            'grid': GDALGrid(friction[:, :, 0], sampledict),
            'label': r'Friction angle ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': frictionsref,
                'longref': frictionlref
            }
        }
        if uncertfile is not None:
            maplayers['pgamin'] = {
                'grid': GDALGrid(PGAmin[:, :, 0], sampledict),
                'label': 'PGA - %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
            maplayers['pgamax'] = {
                'grid': GDALGrid(PGAmax[:, :, 0], sampledict),
                'label': 'PGA + %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
        if 'PGV' in displmodel:
            if uncertfile is not None:
                maplayers['pgvmin'] = {
                    'grid': GDALGrid(PGVmin[:, :, 0], sampledict),
                    'label': 'PGV - %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }
                maplayers['pgvmax'] = {
                    'grid': GDALGrid(PGVmax[:, :, 0], sampledict),
                    'label': 'PGV + %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }

    shutil.rmtree(tmpdir)

    return maplayers
Exemple #6
0
def kritikos_fuzzygamma(shakefile, config, bounds=None):
    """
    Runs kritikos procedure with fuzzy gamma overlay method
    """

    cmodel = config['statistical_models']['kritikos_2015']
    gamma = cmodel['gamma_value']

    ############ This section reads in items from the config file
    ## Read in layer files and get data
    layers = cmodel['layers']
    try:
        # Slope
        slope_file = layers['slope']
        # DFF
        dff_file = layers['dff']
        # DFS
        dfs_file = layers['dfs']
        # elev
        elev_file = layers['elev']
    except:
        print('Unable to retrieve grid data.')

    try:
        div = cmodel['divisor']
        # Load in divisors
        MMI_div = div['MMI']
        slope_div = div['slope']
        dff_div = div['dff']
        dfs_div = div['dfs']
        slope_pos_div = div['slope_pos']
    except:
        print('Unable to retrieve divisors.')

    try:
        power = cmodel['power']
        # Load in powers
        MMI_power = power['MMI']
        slope_power = power['slope']
        dff_power = power['dff']
        dfs_power = power['dfs']
        slope_pos_power = power['slope_pos']
    except:
        print('Unable to retrieve powers.')

    # Cut and resample, create geodict
    try:
        bounds = None
        shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        slopedict, duplicated = GDALGrid.getFileGeoDict(slope_file)
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds[
                    'xmax'] or shkgdict.ymin > bounds[
                        'ymin'] or shkgdict.ymax < bounds['ymax']:
                print(
                    'Specified bounds are outside shakemap area, using ShakeMap bounds instead'
                )
                bounds = None
        if bounds is not None:
            tempgdict = GeoDict(
                {
                    'xmin': bounds['xmin'],
                    'ymin': bounds['ymin'],
                    'xmax': bounds['xmax'],
                    'ymax': bounds['ymax'],
                    'dx': 100.,
                    'dy': 100.,
                    'nx': 100.,
                    'ny': 100.
                },
                adjust='res')
            gdict = slpdict.getBoundsWithin(tempgdict)
        else:  # Get boundaries from shakemap if not specified
            gdict = slopedict.getBoundsWithin(shkgdict)
    except:
        raise NameError('Unable to create base geodict.')

    # Load in data
    ############## Still need to make DFF and DFS layers
    try:
        # Load in slope data
        slopegrid = GDALGrid.load(slope_file,
                                  samplegeodict=gdict,
                                  resample=False)
        slope_data = slopegrid.getData().astype(float)
        # Load in MMI
        shakemap = ShakeGrid.load(shakefile,
                                  samplegeodict=gdict,
                                  resample=True,
                                  method='linear',
                                  adjust='res')
        MMI_data = shakemap.getLayer('mmi').getData().astype(float)
        # Load in Dff
        ############### STILL NEED THIS FILE
        dffgrid = GDALGrid.load(dff_file, samplegeodict=gdict, resample=False)
        dff_data = dffgrid.getData().astype(float)
        # Load in DFS
        ############### STILL NEED THIS FILE
        dfsgrid = GDALGrid.load(dfs_file, samplegeodict=gdict, resample=False)
        dfs_data = dfsgrid.getData().astype(float)
        # Load in elevation
        elev_grid = GDALGrid.load(elev_file,
                                  samplegeodict=gdict,
                                  resample=False)
        DEM = elev_grid.getData().astype(float)
    except:
        print('Data could not be retrieved.')

    # Read in classifications
    try:
        mmi_class = cmodel['classification']['MMI']
        slope_class = cmodel['classification']['slope']
        dff_class = cmodel['classification']['dff']
        dfs_class = cmodel['classification']['dfs']
        slope_pos_class = cmodel['classification']['slope_pos']
    except:
        print('Could not recover classifications from config.')

    try:
        slope_pos_data = create_slopePos(slope_data, DEM, cmodel)
    except:
        print('Could not create slope position grid.')

    ####### Split classification strings into lists containing numbers and classify layers
    # MMI classifications
    try:
        mmi_classes = mmi_class.split(',')
        for i in mmi_classes:
            if i.find('-') != -1:
                j = i.split('-')
                if MMI_data in range(int(j[0]), int(j[1])):
                    MMI_data = int(j[0])
            else:
                MMI_data = int(i)
    except:
        print('Could not categorize MMI values')

    # Slope Classifications
    try:
        slope_classes = slope_class.split(',')
        k = 1
        for i in mmi_classes:
            if i.find('-') != -1:
                j = i.split('-')
                if slope_data in range(int(j[0]), int(j[1])):
                    slope_data = k
                    k += 1
            else:
                slope_data = 11
    except:
        print('Could not recategorize Slope Values.')

    # DFF classifications
    try:
        dff_classes = dff_class.split(',')
        k = 1
        for i in dff_classes:
            if i.find('-') != -1:
                j = i.split('-')
                if dff_data in range(int(j[0]), int(j[1])):
                    dff_data = k
                    k += 1
            else:
                dff_data = 7
    except:
        print('Could not recategorize DFF values.')

    # DFS classifications
    try:
        dfs_classes = dfs_class.split(',')
        k = 1
        for i in dfs_classes:
            if i.find('-') != -1:
                j = i.split('-')
                if dfs_data in range(int(j[0]), int(j[1])):
                    dfs_data = k
                    k += 1
            else:
                dfs_data = 6
    except:
        print('Could not recategorize DFS values.')

    # Slope position classification
    try:
        slope_pos_classes = slope_pos_class.split(',')
        k = 1
        for i in slope_poss_classes:
            if slope_pos_data == i:
                slope_pos_data = k
                k += 1
    except:
        print('Could not recategorize slope position values.')

    ##############
    # This section runs all the calculations
    ##############
    # Run each layer through a membership function
    try:
        layers = []
        # Calculate layers
        slope = 1 / (1 + np.exp(slope_data / slope_div, slope_power))
        MMI = 1 / (1 + np.exp(MMI_data / MMI_div, MMI_power))
        dff = 1 / (1 + np.exp(dff_data / dff_div, dff_power))
        dfs = 1 / (1 + np.exp(dfs_data / dfs_div, dfs_power))
        slope_pos = 1 / (1 +
                         np.exp(slop_pos_data / slop_pos_div, slope_pos_power))
        # Add to layers list (to be used in further calculations)
        layers.append(slope)
        layers.append(MMI)
        layers.append(dff)
        layers.append(dfs)
        layers.append(slope_pos)
    except:
        print('Layer calculations failed.')

    # Apply final calculations operator
    # From Kritikos paper equation 4
    ############ Haven't run.
    try:
        a = np.prod(layers)
        b = np.prod(1 - layers)
        mu_x = np.power(a, 1 - gamma) * np.power(1 - b, gamma)
    except:
        print('Unable to calculate final product.')
Exemple #7
0
    def getLosses(self, shakefile):
        """Calculate number of fatalities using semi-empirical approach.

        :param shakefile:
          Path to a ShakeMap grid.xml file.
        :returns:
          Tuple of:
            1) Total number of fatalities
            2) Dictionary of residential fatalities per building type, per country.
            3) Dictionary of non-residential fatalities per building type, per country.
        """
        # get shakemap geodict
        shakedict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        # get population geodict
        popdict = get_file_geodict(self._popfile)

        # get country code geodict
        isodict = get_file_geodict(self._isofile)

        # get urban grid geodict
        urbdict = get_file_geodict(self._urbanfile)

        # load all of the grids we need
        if popdict == shakedict == isodict == urbdict:
            # special case, probably for testing...
            shakegrid = ShakeGrid.load(shakefile, adjust='res')
            popgrid = read(self._popfile)
            isogrid = read(self._isofile)
            urbgrid = read(self._urbanfile)
        else:
            sampledict = popdict.getBoundsWithin(shakedict)
            shakegrid = ShakeGrid.load(shakefile,
                                       samplegeodict=sampledict,
                                       resample=True,
                                       method='linear',
                                       adjust='res')
            popgrid = read(self._popfile,
                           samplegeodict=sampledict,
                           resample=False)
            isogrid = read(self._isofile,
                           samplegeodict=sampledict,
                           resample=True,
                           method='nearest',
                           doPadding=True,
                           padValue=0)
            urbgrid = read(self._urbanfile,
                           samplegeodict=sampledict,
                           resample=True,
                           method='nearest',
                           doPadding=True,
                           padValue=RURAL)

        # determine the local apparent time of day (based on longitude)
        edict = shakegrid.getEventDict()
        etime = edict['event_timestamp']
        elon = edict['lon']
        time_of_day, event_year, event_hour = get_time_of_day(etime, elon)

        # round off our MMI data to nearest 0.5 (5.5 should stay 5.5, 5.4
        # should become 5.5, 5.24 should become 5.0, etc.)
        # TODO:  Someday, make this more general to include perhaps grids of all IMT values, or
        # at least the ones we have collapse data for.
        mmidata = np.round(shakegrid.getLayer('mmi').getData() / 0.5) * 0.5

        # get arrays from our other grids
        popdata = popgrid.getData()
        isodata = isogrid.getData()
        urbdata = urbgrid.getData()

        # modify the population values for growth rate by country
        ucodes = np.unique(isodata[~np.isnan(isodata)])
        for ccode in ucodes:
            cidx = (isodata == ccode)
            popdata[cidx] = self._popgrowth.adjustPopulation(
                popdata[cidx], ccode, self._popyear, event_year)

        # create a dictionary containing indoor populations by building type (in cells where MMI >= 6)
        #popbystruct = get_indoor_pop(mmidata,popdata,urbdata,isodata,time_of_day)

        # find all mmi values greater than 9, set them to 9
        mmidata[mmidata > 9.0] = 9.0

        # dictionary containers for sums of fatalities (res/nonres) by building type
        res_fatal_by_ccode = {}
        nonres_fatal_by_ccode = {}

        # fatality sum
        ntotal = 0

        # loop over countries
        ucodes = np.unique(isodata[~np.isnan(isodata)])
        for ucode in ucodes:
            if ucode == 0:
                continue
            res_fatal_by_btype = {}
            nonres_fatal_by_btype = {}

            cdict = self._country.getCountry(int(ucode))
            ccode = cdict['ISO2']
            # get the workforce Series data for the current country
            wforce = self.getWorkforce(ccode)
            if wforce is None:
                logging.info('No workforce data for %s.  Skipping.' %
                             (cdict['Name']))
                continue

            # loop over MMI values 6-9
            for mmi in np.arange(6, 9.5, 0.5):
                c1 = (mmidata == mmi)
                c2 = (isodata == ucode)
                if ucode > 900 and ucode != CALIFORNIA_US_CCODE:
                    ucode = US_CCODE
                for dclass in [URBAN, RURAL]:
                    c3 = (urbdata == dclass)

                    # get the population data in those cells at MMI, in country, and density class
                    # I think I want an AND condition here
                    popcells = popdata[c1 & c2 & c3]

                    # get the population distribution across residential, non-residential, and outdoor.
                    res, nonres, outside = pop_dist(
                        popcells, wforce, time_of_day, dclass)

                    # get the inventory for urban residential
                    resrow, nresrow = self.getInventories(ccode, dclass)

                    # TODO - figure out why this is happening, make the following lines
                    # not necessary
                    if 'Unnamed: 0' in resrow:
                        resrow = resrow.drop('Unnamed: 0')
                    if 'Unnamed: 0' in nresrow:
                        nresrow = nresrow.drop('Unnamed: 0')
                    # now multiply the residential/non-residential population through the inventory data
                    numres = len(resrow)
                    numnonres = len(nresrow)
                    resmat = np.reshape(
                        resrow.values, (numres, 1)).astype(np.float32)
                    nresmat = np.reshape(
                        nresrow.values, (numnonres, 1)).astype(np.float32)
                    popres = np.tile(res, (numres, 1))
                    popnonres = np.tile(nonres, (numnonres, 1))
                    popresbuilding = (popres * resmat)
                    popnonresbuilding = (popnonres * nresmat)

                    # now we have the residential and non-residental population
                    # distributed through the building types for each cell that matches
                    # MMI,country, and density criteria.
                    # popresbuilding rows are building types, columns are population cells

                    # next, we get the collapse rates for these buildings
                    # and multiply them by the population by building.
                    collapse_res = self.getCollapse(ccode, mmi, resrow)
                    collapse_nonres = self.getCollapse(ccode, mmi, nresrow)
                    resrates = np.reshape(
                        collapse_res.values.astype(np.float32), (numres, 1))
                    nonresrates = np.reshape(
                        collapse_nonres.values.astype(np.float32), (numnonres, 1))
                    rescollapse = popresbuilding * resrates
                    nonrescollapse = popnonresbuilding * nonresrates

                    # get the fatality rates given collapse by building type and
                    # multiply through the result of collapse*population per building
                    resfatalcol = self.getFatalityRates(
                        ccode, time_of_day, resrow)
                    nonresfatalcol = self.getFatalityRates(
                        ccode, time_of_day, nresrow)
                    resfatal = np.reshape(
                        resfatalcol.values.astype(np.float32), (numres, 1))
                    nonresfatal = np.reshape(
                        nonresfatalcol.values.astype(np.float32), (numnonres, 1))
                    resfat = rescollapse * resfatal
                    nonresfat = nonrescollapse * nonresfatal

                    # zero out the cells where fatalities are less than 1 or nan
                    try:
                        if len(resfat) and len(resfat[0]):
                            resfat[np.ma.masked_less(resfat, 1).mask] = 0.0
                    except:
                        resfat[np.isnan(resfat)] = 0.0
                    try:
                        if len(nonresfat) and len(nonresfat[0]):
                            nonresfat[np.ma.masked_less(
                                nonresfat, 1).mask] = 0.0
                    except:
                        nonresfat[np.isnan(nonresfat)] = 0.0

                    # sum the fatalities per building through all cells
                    resfatbybuilding = np.nansum(resfat, axis=1)
                    nonresfatbybuilding = np.nansum(nonresfat, axis=1)
                    resfdict = dict(
                        zip(resrow.index, resfatbybuilding.tolist()))
                    nonresfdict = dict(
                        zip(nresrow.index, nonresfatbybuilding.tolist()))
                    res_fatal_by_btype = add_dicts(
                        res_fatal_by_btype, resfdict)
                    nonres_fatal_by_btype = add_dicts(
                        nonres_fatal_by_btype, nonresfdict)

            # add the fatalities by building type to the dictionary containing fatalities by country
            res_fatal_by_ccode[ccode] = res_fatal_by_btype.copy()
            nonres_fatal_by_ccode[ccode] = nonres_fatal_by_btype.copy()

            # increment the total number of fatalities
            ntotal += int(sum(res_fatal_by_btype.values())
                          + sum(nonres_fatal_by_btype.values()))

        return (ntotal, res_fatal_by_ccode, nonres_fatal_by_ccode)
Exemple #8
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 slopediv=1.,
                 bounds=None,
                 numstd=1):
        """Set up the logistic model
        # ADD BOUNDS TO THIS MODEL
        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs. Only one
          model should be described in each config file.
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param uncertfile: Full file path to xml file of shakemap uncertainties
        :type uncertfile: string
        :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model
          if false, it will just output the model
        :type saveinputs: boolean
        :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying
          thresholds OVERWRITES VALUE IN CONFIG
        :type slopefile: string
        :param slopediv: number to divide slope by to get to degrees (usually will be default
          of 1.)
        :type slopediv: float
        :param numstd: number of +/- standard deviations to use if uncertainty is computed (uncertfile is not None)

        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception(
                'No config file found or problem with config file format')
        if len(mnames) > 1:
            raise Exception(
                'Config file contains more than one model which is no longer allowed,\
                            update your config file to the newer format')
        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(
            cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        self.numstd = numstd
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception(
                'You must specify a base layer corresponding to one of the files in the layer section.'
            )
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                print(
                    'Could not find slopefile term in config, no slope thresholds will be applied\n'
                )
                self.slopefile = None
        else:
            self.slopefile = slopefile
        self.slopediv = slopediv

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
            shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception(
                'All predictor variable grids must be a valid GMT or ESRI file type'
            )

        #now load the shakemap, resampling and padding if necessary
        if ShakeGrid.getFileGeoDict(shakefile, adjust='res') == sampledict:
            self.shakemap = ShakeGrid.load(shakefile, adjust='res')
            flag = 1
        else:
            self.shakemap = ShakeGrid.load(shakefile,
                                           samplegeodict=sampledict,
                                           resample=True,
                                           doPadding=True,
                                           adjust='res')
            flag = 0

        # take uncertainties into account
        if uncertfile is not None:
            try:
                if flag == 1:
                    self.uncert = ShakeGrid.load(uncertfile, adjust='res')
                else:
                    self.uncert = ShakeGrid.load(uncertfile,
                                                 samplegeodict=sampledict,
                                                 resample=True,
                                                 doPadding=True,
                                                 adjust='res')
            except:
                print(
                    'Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                if GMTGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GMTGrid.load(layerfile)
                                else:
                                    lyr = GMTGrid.load(layerfile,
                                                       sampledict,
                                                       resample=True,
                                                       method=interp,
                                                       doPadding=True)
                            elif ftype == 'esri':
                                if GDALGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GDALGrid.load(layerfile)
                                else:
                                    lyr = GDALGrid.load(layerfile,
                                                        sampledict,
                                                        resample=True,
                                                        method=interp,
                                                        doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                                    layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    if GMTGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GMTGrid.load(layerfile)
                    else:
                        lyr = GMTGrid.load(layerfile,
                                           sampledict,
                                           resample=True,
                                           method=interp,
                                           doPadding=True)
                elif ftype == 'esri':
                    if GDALGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GDALGrid.load(layerfile)
                    else:
                        lyr = GDALGrid.load(layerfile,
                                            sampledict,
                                            resample=True,
                                            method=interp,
                                            doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                        layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                elif "self.shakemap.getLayer('pgv').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                elif "self.shakemap.getLayer('mmi').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config[self.model]['slopemin'])
            self.slopemax = float(config[self.model]['slopemax'])
        except:
            print(
                'could not find slopemin and/or slopemax in config, no limits will be applied'
            )
            self.slopemin = 0.
            self.slopemax = 90.
Exemple #9
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 bounds=None,
                 slopemod=None,
                 trimfile=None):
        """
        Sets up the logistic model

        Args:
            shakefile (str): Path to shakemap grid.xml file for the event.
            config: configobj object defining the model and its inputs. Only
                one model should be described in each config file.
            uncertfile (str): Path to uncertainty.xml file.
            saveinputs (bool): Save input layers as Grid2D objects in addition
                to the model? If false (the default), it will just output the
                model.
            slopefile (str): Optional path to slopefile that will be resampled
                to the other input files for applying thresholds. OVERWRITES
                VALUE IN CONFIG.
            bounds (dict): Default of None uses ShakeMap boundaries, otherwise
                a dictionary of boundaries to cut to like

                .. code-block:: python

                    bounds = {
                        'xmin': lonmin, 'xmax': lonmax,
                        'ymin': latmin, 'ymax': latmax
                    }
            slopemod (str): How slope input should be modified to be in
                degrees: e.g., ``np.arctan(slope) * 180. / np.pi`` or
                ``slope/100.`` (note that this may be in the config file
                already).
            trimfile (str): shapefile of earth's landmasses to use to cut
                offshore areas.
        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception('No config file found or problem with config '
                            'file format')
        if len(mnames) > 1:
            raise Exception('Config file contains more than one model which '
                            'is no longer allowed, update your config file '
                            'to the newer format')

        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        # key = layer name, value = file name
        self.layers = validateLayers(cmodel)
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        #self.numstd = numstd
        self.clips = validateClips(cmodel, self.layers, self.gmused)
        self.notes = ''

        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to '
                            'one of the files in the layer section.')
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                # print('Slopefile not specified in config, no slope '
                #      'thresholds will be applied\n')
                self.slopefile = None
        else:
            self.slopefile = slopefile
        if slopemod is None:
            try:
                self.slopemod = cmodel['slopemod']
            except:
                self.slopemod = None

        # See if trimfile exists
        if trimfile is not None:
            if not os.path.exists(trimfile):
                print('trimfile defined does not exist: %s\nOcean will not be '
                      'trimmed' % trimfile)
                self.trimfile = None
            elif os.path.splitext(trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, ocean will not be '
                      'trimmed')
                self.trimfile = None
            else:
                self.trimfile = trimfile
        else:
            self.trimfile = None

        # Get month of event
        griddict, eventdict, specdict, fields, uncertainties = \
            getHeaderData(shakefile)
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]

        # Figure out how/if need to cut anything
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if geodict.xmin < geodict.xmax:  # only if signs are not opposite
                if (geodict.xmin > bounds['xmin']
                        or geodict.xmax < bounds['xmax']
                        or geodict.ymin > bounds['ymin']
                        or geodict.ymax < bounds['ymax']):
                    print('Specified bounds are outside shakemap area, using '
                          'ShakeMap bounds instead.')
                    bounds = None

        if bounds is not None:
            tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                                  bounds['xmax'],
                                                  bounds['ymin'],
                                                  bounds['ymax'],
                                                  geodict.dx,
                                                  geodict.dy,
                                                  inside=False)
            # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
            if geodict.xmin > geodict.xmax:
                if tempgdict.xmin < 0:
                    geodict._xmin -= 360.
                else:
                    geodict._xmax += 360.
            gdict = geodict.getBoundsWithin(tempgdict)
        else:
            gdict = geodict

        # Now find the layer that is our base layer and get the largest bounds
        # we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        else:
            raise Exception('All predictor variable grids must be a valid '
                            'GMT or ESRI file type.')

        # Do we need to subdivide baselayer?
        if 'divfactor' in self.config[self.model].keys():
            divfactor = float(self.config[self.model]['divfactor'])
            if divfactor != 1.:
                # adjust sampledict so everything will be resampled
                newxmin = sampledict.xmin - sampledict.dx / \
                    2. + sampledict.dx/(2.*divfactor)
                newymin = sampledict.ymin - sampledict.dy / \
                    2. + sampledict.dy/(2.*divfactor)
                newxmax = sampledict.xmax + sampledict.dx / \
                    2. - sampledict.dx/(2.*divfactor)
                newymax = sampledict.ymax + sampledict.dy / \
                    2. - sampledict.dy/(2.*divfactor)
                newdx = sampledict.dx / divfactor
                newdy = sampledict.dy / divfactor

                sampledict = GeoDict.createDictFromBox(newxmin,
                                                       newxmax,
                                                       newymin,
                                                       newymax,
                                                       newdx,
                                                       newdy,
                                                       inside=True)

        # Find slope thresholds, if applicable
        self.slopemin = 'none'
        self.slopemax = 'none'
        if self.slopefile is not None:
            try:
                self.slopemin = float(config[self.model]['slopemin'])
                self.slopemax = float(config[self.model]['slopemax'])
            except:
                print('Could not find slopemin and/or slopemax in config, '
                      'limits. No slope thresholds will be applied.')
                self.slopemin = 'none'
                self.slopemax = 'none'

        # Make temporary directory for hdf5 pytables file storage
        self.tempdir = tempfile.mkdtemp()

        # now load the shakemap, resampling and padding if necessary
        temp = ShakeGrid.load(shakefile)  # , adjust='res')
        self.shakedict = temp.getShakeDict()
        self.eventdict = temp.getEventDict()
        self.shakemap = {}

        # Read both PGA and PGV in, may need them for thresholds
        for gm in ['pga', 'pgv']:
            junkfile = os.path.join(self.tempdir, 'temp.bil')
            GDALGrid.copyFromGrid(temp.getLayer(gm)).save(junkfile)
            if gm in self.interpolations.keys():
                intermeth = self.interpolations[gm]
            else:
                intermeth = 'bilinear'
            junkgrid = quickcut(junkfile,
                                sampledict,
                                precise=True,
                                method=intermeth)
            if gm in self.clips:
                junkgrid.setData(
                    np.clip(junkgrid.getData(), self.clips[gm][0],
                            self.clips[gm][1]))
            self.shakemap[gm] = TempHdf(
                junkgrid, os.path.join(self.tempdir, '%s.hdf5' % gm))
            os.remove(junkfile)
        del (temp)

        # get updated geodict
        sampledict = junkgrid.getGeoDict()

        # take uncertainties into account, if available
        if uncertfile is not None:
            self.uncert = {}
            try:
                # Only read in the ones that will be needed
                temp = ShakeGrid.load(uncertfile)
                already = []
                for gm in self.gmused:
                    if 'pgv' in gm:
                        gmsimp = 'pgv'
                    elif 'pga' in gm:
                        gmsimp = 'pga'
                    elif 'mmi' in gm:
                        gmsimp = 'mmi'
                    if gmsimp in already:
                        continue
                    junkfile = os.path.join(self.tempdir, 'temp.bil')
                    GDALGrid.copyFromGrid(temp.getLayer('std%s' %
                                                        gmsimp)).save(junkfile)
                    if gmsimp in self.interpolations.keys():
                        intermeth = self.interpolations[gmsimp]
                    else:
                        intermeth = 'bilinear'
                    junkgrid = quickcut(junkfile,
                                        sampledict,
                                        precise=True,
                                        method=intermeth)
                    if gmsimp in self.clips:
                        junkgrid.setData(
                            np.clip(junkgrid.getData(), self.clips[gmsimp][0],
                                    self.clips[gmsimp][1]))
                    self.uncert['std' + gmsimp] = TempHdf(
                        junkgrid,
                        os.path.join(self.tempdir, 'std%s.hdf5' % gmsimp))
                    already.append(gmsimp)
                    os.remove(junkfile)
                del (temp)
            except:
                print('Could not read uncertainty file, ignoring '
                      'uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        # Load the predictor layers, save as hdf5 temporary files, put file
        # locations into a dictionary.

        # Will be replaced in the next section if a slopefile was defined
        self.nonzero = None

        # key = layer name, value = grid object
        self.layerdict = {}

        didslope = False
        for layername, layerfile in self.layers.items():
            start = timer()
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            temp = quickcut(layerfile,
                                            sampledict,
                                            precise=True,
                                            method=interp)
                            if layername in self.clips:
                                temp.setData(
                                    np.clip(temp.getData(),
                                            self.clips[layername][0],
                                            self.clips[layername][1]))
                            self.layerdict[layername] = TempHdf(
                                temp,
                                os.path.join(self.tempdir,
                                             '%s.hdf5' % layername))
                            del (temp)
            else:
                interp = self.interpolations[layername]
                temp = quickcut(layerfile,
                                sampledict,
                                precise=True,
                                method=interp)
                if layername in self.clips:
                    temp.setData(
                        np.clip(temp.getData(), self.clips[layername][0],
                                self.clips[layername][1]))
                if layername == 'rock':  # Convert unconsolidated sediments to a more reasonable coefficient
                    sub1 = temp.getData()
                    # Change to mixed sed rock coeff
                    sub1[sub1 <= -3.21] = -1.36
                    temp.setData(sub1)
                    self.notes += 'unconsolidated sediment coefficient changed\
                     to -1.36 (weaker) from -3.22 to better reflect that this \
                    unit is not actually strong\n'

                self.layerdict[layername] = TempHdf(
                    temp, os.path.join(self.tempdir, '%s.hdf5' % layername))
                td = temp.getGeoDict()
                if td != sampledict:
                    raise Exception(
                        'Geodictionaries of resampled files do not match')

                if layerfile == self.slopefile:
                    flag = 0
                    if self.slopemin == 'none' and self.slopemax == 'none':
                        flag = 1
                    if self.slopemod is None:
                        slope1 = temp.getData().astype(float)
                        slope = 0
                    else:
                        try:
                            slope = temp.getData().astype(float)
                            slope1 = eval(self.slopemod)
                        except:
                            print('slopemod provided not valid, continuing '
                                  'without slope thresholds.')
                            flag = 1
                    if flag == 0:
                        nonzero = np.array([(slope1 > self.slopemin) &
                                            (slope1 <= self.slopemax)])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                        del (slope)
                    else:
                        # Still remove areas where the slope equals exactly
                        # 0.0 to remove offshore liq areas.
                        nonzero = np.array([slope1 != 0.0])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                    didslope = True
                del (temp)

            print('Loading %s layer: %1.1f sec' % (layername, timer() - start))

        if didslope is False and self.slopefile is not None:
            # Slope didn't get read in yet
            temp = quickcut(self.slopefile,
                            sampledict,
                            precise=True,
                            method='bilinear')
            flag = 0
            if self.slopemin == 'none' and self.slopemax == 'none':
                flag = 1
            if self.slopemod is None:
                slope1 = temp.getData().astype(float)
                slope = 0
            else:
                try:
                    slope = temp.getData().astype(float)
                    slope1 = eval(self.slopemod)
                except:
                    print('slopemod provided not valid, continuing without '
                          'slope thresholds')
                    flag = 1
            if flag == 0:
                nonzero = np.array([
                    (slope1 > self.slopemin) & (slope1 <= self.slopemax)
                ])
                self.nonzero = nonzero[0, :, :]
                del (slope1)
                del (slope)
            else:
                # Still remove areas where the slope equals exactly
                # 0.0 to remove offshore liq areas.
                nonzero = np.array([slope1 != 0.0])
                self.nonzero = nonzero[0, :, :]
                del (slope1)

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = sampledict
Exemple #10
0
def godt2008(shakefile, config, uncertfile=None, saveinputs=False, regressionmodel='J_PGA', bounds=None, slopediv=100., codiv=10.):
    """ This function runs the Godt et al. (2008) global method for a given ShakeMap. The Factor of Safety
    is calculated using infinite slope analysis assumuing dry conditions. The method uses threshold newmark
    displacement and estimates areal coverage by doing the calculations for each slope quantile
    TO DO - add 'all' - averages Dn from all four equations, add term to convert PGA and PGV to Ia and use other equations, add Ambraseys and Menu (1988) option

    :param shakefile: url or filepath to shakemap xml file
    :type shakefile: string
    :param config: ConfigObj of config file containing inputs required for running the model
    :type config: ConfigObj
    :param saveinputs: Whether or not to return the model input layers, False (defeault) returns only the model output (one layer)
    :type saveinputs: boolean
    :param regressionmodel:
        Newmark displacement regression model to use
        'J_PGA' (default) - PGA-based model from Jibson (2007) - equation 6
        'J_PGA_M' - PGA and M-based model from Jibson (2007) - equation 7
        'RS_PGA_M' - PGA and M-based model from from Rathje and Saygili (2009)
        'RS_PGA_PGV' - PGA and PGV-based model from Saygili and Rathje (2008) - equation 6
    :type regressionmodel: string
    :param probtype: Method used to estimate probability. Entering 'jibson2000' uses equation 5 from Jibson et al. (2000) to estimate probability from Newmark displacement. 'threshold' uses a specified threshold of Newmark displacement (defined in config file) and assumes anything greather than this threshold fails
    :type probtype: string
    :param slopediv: Divide slope by this number to get slope in degrees (Verdin datasets need to be divided by 100)
    :type slopediv: float
    :param codiv: Divide cohesion by this number to get reasonable numbers (For Godt method, need to divide by 10 because that is how it was calibrated, but values are reasonable without multiplying for regular analysis)
    :type codiv: float

    :returns maplayers:  Dictionary containing output and input layers (if saveinputs=True) along with metadata formatted like maplayers['layer name']={'grid': mapio grid2D object, 'label': 'label for colorbar and top line of subtitle', 'type': 'output or input to model', 'description': 'detailed description of layer for subtitle, potentially including source information'}
    :type maplayers: OrderedDict

    :raises NameError: when unable to parse the config correctly (probably a formatting issue in the configfile) or when unable to find the shakefile (Shakemap URL or filepath) - these cause program to end
    """

    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    if uncertfile is not None:
        print('ground motion uncertainty option not implemented yet')

    # Parse config
    try:  # May want to add error handling so if refs aren't given, just includes unknown
        slopefilepath = config['mechanistic_models']['godt_2008']['layers']['slope']['filepath']
        slopeunits = config['mechanistic_models']['godt_2008']['layers']['slope']['units']
        cohesionfile = config['mechanistic_models']['godt_2008']['layers']['cohesion']['file']
        cohesionunits = config['mechanistic_models']['godt_2008']['layers']['cohesion']['units']
        frictionfile = config['mechanistic_models']['godt_2008']['layers']['friction']['file']
        frictionunits = config['mechanistic_models']['godt_2008']['layers']['friction']['units']

        thick = float(config['mechanistic_models']['godt_2008']['parameters']['thick'])
        uwt = float(config['mechanistic_models']['godt_2008']['parameters']['uwt'])
        nodata_cohesion = float(config['mechanistic_models']['godt_2008']['parameters']['nodata_cohesion'])
        nodata_friction = float(config['mechanistic_models']['godt_2008']['parameters']['nodata_friction'])
        dnthresh = float(config['mechanistic_models']['godt_2008']['parameters']['dnthresh'])
        fsthresh = float(config['mechanistic_models']['godt_2008']['parameters']['fsthresh'])
        acthresh = float(config['mechanistic_models']['godt_2008']['parameters']['acthresh'])
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)
        return

    # TO DO, ADD ERROR CATCHING ON UNITS, MAKE SURE THEY ARE WHAT THEY SHOULD BE FOR THIS MODEL

    try:  # Try to fetch source information from config
        modelsref = config['mechanistic_models']['godt_2008']['shortref']
        modellref = config['mechanistic_models']['godt_2008']['longref']
        slopesref = config['mechanistic_models']['godt_2008']['layers']['slope']['shortref']
        slopelref = config['mechanistic_models']['godt_2008']['layers']['slope']['longref']
        cohesionsref = config['mechanistic_models']['godt_2008']['layers']['cohesion']['shortref']
        cohesionlref = config['mechanistic_models']['godt_2008']['layers']['cohesion']['longref']
        frictionsref = config['mechanistic_models']['godt_2008']['layers']['friction']['shortref']
        frictionlref = config['mechanistic_models']['godt_2008']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. Continuing')

    # Load in shakefile
    if not os.path.isfile(shakefile):
        if isURL(shakefile):
            shakefile = getGridURL(shakefile)  # returns a file object
        else:
            raise NameError('Could not find "%s" as a file or a valid url' % (shakefile))
            return

    shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
            print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
            bounds = None
    if bounds is not None:
        tempgdict = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': shkgdict.dx, 'dy': shkgdict.dy, 'nx': shkgdict.nx, 'ny': shkgdict.ny}, adjust='res')
        gdict = shkgdict.getBoundsWithin(tempgdict)
        shakemap = ShakeGrid.load(shakefile, samplegeodict=gdict, adjust='bounds')
    else:
        shakemap = ShakeGrid.load(shakefile, adjust='res')
    shkgdict = shakemap.getGeoDict()  # Get updated geodict
    M = shakemap.getEventDict()['magnitude']

    # Read in all the slope files, divide all by 100 to get to slope in degrees (because input files are multiplied by 100.)
    slopes = []
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope_min.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope10.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope30.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope50.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope70.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope90.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope_max.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopestack = np.dstack(slopes)

    # Change any zero slopes to a very small number to avoid dividing by zero later
    slopestack[slopestack == 0] = 1e-8

    # Read in the cohesion and friction files and duplicate layers so they are same shape as slope structure
    cohesion = np.repeat(GDALGrid.load(cohesionfile, samplegeodict=shakemap.getGeoDict(), resample=True, method='nearest').getData()[:, :, np.newaxis]/codiv, 7, axis=2)
    cohesion[cohesion == -999.9] = nodata_cohesion
    cohesion[cohesion == 0] = nodata_cohesion
    friction = np.repeat(GDALGrid.load(frictionfile, samplegeodict=shakemap.getGeoDict(), resample=True, method='nearest').getData().astype(float)[:, :, np.newaxis], 7, axis=2)
    friction[friction == -9999] = nodata_friction
    friction[friction == 0] = nodata_friction

    # Do the calculations using Jibson (2007) PGA only model for Dn
    FS = cohesion/(uwt*thick*np.sin(slopestack*(np.pi/180.))) + np.tan(friction*(np.pi/180.))/np.tan(slopestack*(np.pi/180.))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    Ac = (FS-1)*np.sin(slopestack*(np.pi/180.)).astype(float)  # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac[Ac < acthresh] = acthresh

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = np.repeat(shakemap.getLayer('pga').getData()[:, :, np.newaxis]/100., 7, axis=2).astype(float)

    if 'PGV' in regressionmodel:  # Load in PGV also, in cm/sec
        PGV = np.repeat(shakemap.getLayer('pgv').getData()[:, :, np.newaxis], 7, axis=2).astype(float)

    np.seterr(invalid='ignore')  # Ignore errors so still runs when Ac > PGA, just leaves nan instead of crashing

    if regressionmodel is 'J_PGA':
        Dn = J_PGA(Ac, PGA)

    if regressionmodel is 'J_PGA_M':
        Dn = J_PGA_M(Ac, PGA, M)

    if regressionmodel is 'RS_PGA_M':
        Dn = RS_PGA_M(Ac, PGA, M)

    if regressionmodel is 'RS_PGA_PGV':
        Dn = RS_PGA_PGV(Ac, PGA, PGV)

    PROB = Dn.copy()
    PROB[PROB < dnthresh] = 0.
    PROB[PROB >= dnthresh] = 1.
    PROB = np.sum(PROB, axis=2)
    PROB[PROB == 1.] = 0.01
    PROB[PROB == 2.] = 0.10
    PROB[PROB == 3.] = 0.30
    PROB[PROB == 4.] = 0.50
    PROB[PROB == 5.] = 0.70
    PROB[PROB == 6.] = 0.90
    PROB[PROB == 7.] = 0.99

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])

    description = {'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'thickness_m': thick, 'unitwt_kNm3': uwt, 'dnthresh_cm': dnthresh, 'acthresh_g': acthresh, 'fsthresh': fsthresh}}

    maplayers['model'] = {'grid': GDALGrid(PROB, shakemap.getGeoDict()), 'label': 'Areal coverage', 'type': 'output', 'description': description}

    if saveinputs is True:
        maplayers['pga'] = {'grid': GDALGrid(PGA[:, :, 0], shakemap.getGeoDict()), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        if 'PGV' in regressionmodel:
            maplayers['pgv'] = {'grid': GDALGrid(PGV[:, :, 0], shakemap.getGeoDict()), 'label': 'PGV (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
        maplayers['minFS'] = {'grid': GDALGrid(np.min(FS, axis=2), shakemap.getGeoDict()), 'label': 'Min Factor of Safety', 'type': 'input', 'description': {'units': 'unitless'}}
        maplayers['max slope'] = {'grid': GDALGrid(slopestack[:, :, -1], shakemap.getGeoDict()), 'label': 'Maximum slope ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': slopesref, 'longref': slopelref}}
        maplayers['cohesion'] = {'grid': GDALGrid(cohesion[:, :, 0], shakemap.getGeoDict()), 'label': 'Cohesion (kPa)', 'type': 'input', 'description': {'units': 'kPa (adjusted)', 'name': cohesionsref, 'longref': cohesionlref}}
        maplayers['friction angle'] = {'grid': GDALGrid(friction[:, :, 0], shakemap.getGeoDict()), 'label': 'Friction angle ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': frictionsref, 'longref': frictionlref}}

    return maplayers
Exemple #11
0
def HAZUS(shakefile, config, uncertfile=None, saveinputs=False, modeltype='coverage', regressionmodel='J_PGA', probtype='jibson2000', bounds=None):
    """
    Runs HAZUS landslide procedure (FEMA, 2003, Chapter 4) using susceptiblity categories from defined by HAZUS manual (I-X)

    :param shakefile: URL or complete file path to the location of the Shakemap to use as input
    :type shakefile: string:
    :param config: Model configuration file object containing locations of input files and other input values config = ConfigObj(configfilepath)
    :type config: ConfigObj
    :param saveinputs: Whether or not to return the model input layers, False (defeault) returns only the model output (one layer)
    :type saveinputs: boolean
    :param modeltype: 'coverage' if critical acceleration is exceeded by pga, this gives the estimated areal coverage of landsliding for that cell
        'dn_hazus' - Outputs Newmark displacement using HAZUS methods without relating to probability of failure
        'dn_prob' - Estimates Newmark displacement using HAZUS methods and relates to probability of failure using param probtype
        'ac_classic_dn' - Uses the critical acceleration defined by HAZUS methodology and uses regression model defined by regressionmodel param to get Newmark displacement without relating to probability of failure
        'ac_classic_prob' - Uses the critical acceleration defined by HAZUS methodology and uses regression model defined by regressionmodel param to get Newmark displacement and probability defined by probtype method
    :type modeltype: string
    :param regressionmodel:
        Newmark displacement regression model to use
        'J_PGA' (default) - PGA-based model from Jibson (2007) - equation 6
        'J_PGA_M' - PGA and M-based model from Jibson (2007) - equation 7
        'RS_PGA_M' - PGA and M-based model from from Rathje and Saygili (2009)
        'RS_PGA_PGV' - PGA and PGV-based model from Saygili and Rathje (2008) - equation 6
    :type regressionmodel: string
    :param probtype: Method used to estimate probability. Entering 'jibson2000' uses equation 5 from Jibson et al. (2000) to estimate probability from Newmark displacement. 'threshold' uses a specified threshold of Newmark displacement (defined in config file) and assumes anything greather than this threshold fails
    :type probtype: string
    :param bounds: Boundaries to compute over if different from ShakeMap boundaries as dictionary with keys 'xmin', 'xmax', 'ymin', 'ymax'

    :returns maplayers:  Dictionary containing output and input layers (if saveinputs=True) along with metadata formatted like maplayers['layer name']={'grid': mapio grid2D object, 'label': 'label for colorbar and top line of subtitle', 'type': 'output or input to model', 'description': 'detailed description of layer for subtitle, potentially including source information'}
    :type maplayers: OrderedDict
    """

    # Empty refs
    suslref = 'unknown'
    sussref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # Parse config and read in files
    sus = None
    susdat = None

    if uncertfile is not None:
        print('ground motion uncertainty option not implemented yet')

    # Read in susceptiblity file
    #try:
    susfile = config['mechanistic_models']['hazus']['layers']['susceptibility']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
    susdict = GDALGrid.getFileGeoDict(susfile)
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
            print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
            bounds = None
    if bounds is not None:
        tempgdict1 = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': 100., 'dy': 100., 'nx': 100., 'ny': 100.}, adjust='res')
        tempgdict = susdict.getBoundsWithin(tempgdict1)
    else:
        tempgdict = susdict.getBoundsWithin(shkgdict)
    sus = GDALGrid.load(susfile, samplegeodict=tempgdict, resample=False)
    gdict = sus.getGeoDict()
    susdat = sus.getData()
    #except Exception as e:
    #    raise IOError('Unable to read in susceptibility category file specified in config, %s,' % e)
    #    return

    try:  # Try to fetch source information from config
        modelsref = config['mechanistic_models']['hazus']['shortref']
        modellref = config['mechanistic_models']['hazus']['longref']
        sussref = config['mechanistic_models']['hazus']['layers']['susceptibility']['shortref']
        suslref = config['mechanistic_models']['hazus']['layers']['susceptibility']['longref']
    except:
        print('Was not able to retrieve all references from config file. Continuing')

    try:
        dnthresh = float(config['mechanistic_models']['hazus']['values']['dnthresh'])
    except:
        if probtype == 'threshold':
            dnthresh = 5.
            print('Unable to find dnthresh in config, using 5cm')

    # Load in shakemap, resample to susceptibility file
    shakemap = ShakeGrid.load(shakefile, adjust='res')

    PGA = shakemap.getLayer('pga').subdivide(gdict).getData().astype(float)/100.  # in units of g
    PGV = shakemap.getLayer('pgv').subdivide(gdict).getData().astype(float)  # cm/sec
    M = shakemap.getEventDict()['magnitude']

    # Get critical accelerations in g
    Ac = np.empty(np.shape(susdat))
    Ac[(susdat < 1) & (susdat > 10)] = 9999.
    Ac[susdat == 1] = 0.6
    Ac[susdat == 2] = 0.5
    Ac[susdat == 3] = 0.4
    Ac[susdat == 4] = 0.35
    Ac[susdat == 5] = 0.3
    Ac[susdat == 6] = 0.25
    Ac[susdat == 7] = 0.2
    Ac[susdat == 8] = 0.15
    Ac[susdat == 9] = 0.1
    Ac[susdat == 10] = 0.05

    # can delete sus and susdat now, if don't need to output it, to free up memory
    if saveinputs is False:
        del susdat, sus

    if modeltype == 'coverage':
        areal = np.zeros(np.shape(PGA))
        # This seems to be slow for large matrices
        areal[(PGA >= Ac) & (Ac == 0.6)] = 0.01
        areal[(PGA >= Ac) & (Ac == 0.5)] = 0.02
        areal[(PGA >= Ac) & (Ac == 0.4)] = 0.03
        areal[(PGA >= Ac) & (Ac == 0.35)] = 0.05
        areal[(PGA >= Ac) & (Ac == 0.3)] = 0.08
        areal[(PGA >= Ac) & (Ac == 0.25)] = 0.1
        areal[(PGA >= Ac) & (Ac == 0.2)] = 0.15
        areal[(PGA >= Ac) & (Ac == 0.15)] = 0.2
        areal[(PGA >= Ac) & (Ac == 0.1)] = 0.25
        areal[(PGA >= Ac) & (Ac == 0.05)] = 0.3
        # # But this way is even slower, takes 2x as long
        # numrows, numcols = np.shape(areal)
        # for j in np.arange(numrows):
        #     for k in np.arange(numcols):
        #         acval = Ac[j, k]
        #         if PGA[j, k] >= acval:
        #             if acval == 0.6:
        #                 areal[j, k] = 0.01
        #             elif acval == 0.5:
        #                 areal[j, k] = 0.02
        #             elif acval == 0.4:
        #                 areal[j, k] = 0.03
        #             elif acval == 0.35:
        #                 areal[j, k] = 0.05
        #             elif acval == 0.3:
        #                 areal[j, k] = 0.08
        #             elif acval == 0.25:
        #                 areal[j, k] = 0.1
        #             elif acval == 0.2:
        #                 areal[j, k] = 0.15
        #             elif acval == 0.15:
        #                 areal[j, k] = 0.2
        #             elif acval == 0.1:
        #                 areal[j, k] = 0.25
        #             elif acval == 0.05:
        #                 areal[j, k] = 0.3

    elif modeltype == 'dn_hazus' or modeltype == 'dn_prob':
        ed_low, ed_high = est_disp(Ac, PGA)
        ed_mean = np.mean((np.dstack((ed_low, ed_high))), axis=2)  # Get mean estimated displacements
        dn = ed_mean * numcycles(M) * PGA
    else:  # Calculate newmark displacement using a regression model
        if regressionmodel is 'J_PGA':
            dn = J_PGA(Ac, PGA)
        elif regressionmodel is 'J_PGA_M':
            dn = J_PGA_M(Ac, PGA, M)
        elif regressionmodel is 'RS_PGA_M':
            dn = RS_PGA_M(Ac, PGA, M)
        elif regressionmodel is 'RS_PGA_PGV':
            dn = RS_PGA_PGV(Ac, PGA, PGV)
        else:
            print('Unrecognized model, using J_PGA\n')
            dn = J_PGA(Ac, PGA)

    # Calculate probability from dn, if necessary for selected model
    if modeltype == 'ac_classic_prob' or modeltype == 'dn_prob':
        if probtype.lower() in 'jibson2000':
            PROB = 0.335*(1-np.exp(-0.048*dn**1.565))
            dnthresh = None
        elif probtype.lower() in 'threshold':
            PROB = dn.copy()
            PROB[PROB <= dnthresh] = 0
            PROB[PROB > dnthresh] = 1
        else:
            raise NameError('invalid probtype, assuming jibson2000')
            PROB = 0.335*(1-np.exp(-0.048*dn**1.565))
            dnthresh = None

    # Turn output and inputs into into grids and put in maplayers dictionary
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])

    if modeltype == 'coverage':
        maplayers['model'] = {'grid': GDALGrid(areal, gdict), 'label': 'Areal coverage', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': {'modeltype': modeltype}}}
    elif modeltype == 'dn_hazus':
        maplayers['model'] = {'grid': GDALGrid(dn, gdict), 'label': 'Dn (cm)', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'displacement', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'modeltype': modeltype}}}
    elif modeltype == 'ac_classic_dn':
        maplayers['model'] = {'grid': GDALGrid(dn, gdict), 'label': 'Dn (cm)', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'displacement', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'modeltype': modeltype}}}
    elif modeltype == 'dn_prob':
        maplayers['model'] = {'grid': GDALGrid(PROB, gdict), 'label': 'Landslide Probability', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'probability', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'dnthresh_cm': dnthresh, 'modeltype': modeltype, 'probtype': probtype}}}
    elif modeltype == 'ac_classic_prob':
        maplayers['model'] = {'grid': GDALGrid(PROB, gdict), 'label': 'Landslide Probability', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'probability', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'dnthresh_cm': dnthresh, 'modeltype': modeltype, 'probtype': probtype}}}

    if saveinputs is True:
        maplayers['suscat'] = {'grid': sus, 'label': 'Susceptibility Category', 'type': 'input', 'description': {'name': sussref, 'longref': suslref, 'units': 'Category'}}
        maplayers['Ac'] = {'grid': GDALGrid(Ac, gdict), 'label': 'Ac (g)', 'type': 'output', 'description': {'units': 'g', 'shakemap': shakedetail}}
        maplayers['pga'] = {'grid': GDALGrid(PGA, gdict), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        if 'pgv' in regressionmodel.lower():
            maplayers['pgv'] = {'grid': GDALGrid(PGV, gdict), 'label': 'PGV (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
        if 'dn' not in modeltype.lower() and modeltype != 'coverage':
            maplayers['dn'] = {'grid': GDALGrid(dn, gdict), 'label': 'Dn (cm)', 'type': 'output', 'description': {'units': 'displacement', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'modeltype': modeltype}}}

    return maplayers
Exemple #12
0
def classic(shakefile, config, uncertfile=None, saveinputs=False, regressionmodel='J_PGA', probtype='jibson2000', slopediv=1., codiv=1., bounds=None):
    """This function uses the Newmark method to estimate probability of failure at each grid cell.
    Factor of Safety and critcal accelerations are calculated following Jibson et al. (2000) and the
    Newmark displacement is estimated using PGA, PGV, and/or Magnitude (depending on equation used)
    from Shakemap with regression equations from Jibson (2007), Rathje and Saygili (2008) and
    Saygili and Rathje (2009)

    :param shakefile: URL or complete file path to the location of the Shakemap to use as input
    :type shakefile: string:
    :param config: Model configuration file object containing locations of input files and other input values config = ConfigObj(configfilepath)
    :type config: ConfigObj
    :param uncertfile: complete file path to the location of the uncertainty.xml for the shakefile, if this is not None, it will compute the model for +-std in addition to the best estimate
    :param saveinputs: Whether or not to return the model input layers, False (defeault) returns only the model output (one layer)
    :type saveinputs: boolean
    :param regressionmodel:
        Newmark displacement regression model to use
        'J_PGA' (default) - PGA-based model from Jibson (2007) - equation 6
        'J_PGA_M' - PGA and M-based model from Jibson (2007) - equation 7
        'RS_PGA_M' - PGA and M-based model from from Rathje and Saygili (2009)
        'RS_PGA_PGV' - PGA and PGV-based model from Saygili and Rathje (2008) - equation 6
    :type regressionmodel: string
    :param probtype: Method used to estimate probability. Entering 'jibson2000' uses equation 5 from Jibson et al. (2000) to estimate probability from Newmark displacement. 'threshold' uses a specified threshold of Newmark displacement (defined in config file) and assumes anything greather than this threshold fails
    :type probtype: string
    :param slopediv: Divide slope by this number to get slope in degrees (Verdin datasets need to be divided by 100)
    :type slopediv: float
    :param codiv: Divide cohesion by this number to get reasonable numbers (For Godt method, need to divide by 10 because that is how it was calibrated, but values are reasonable without multiplying for regular analysis)
    :type codiv: float

    :returns maplayers:  Dictionary containing output and input layers (if saveinputs=True) along with metadata formatted like maplayers['layer name']={'grid': mapio grid2D object, 'label': 'label for colorbar and top line of subtitle', 'type': 'output or input to model', 'description': 'detailed description of layer for subtitle, potentially including source information'}
    :type maplayers: OrderedDict

    :raises NameError: when unable to parse the config correctly (probably a formatting issue in the configfile) or when unable to find the shakefile (Shakemap URL or filepath) - these cause program to end
    :raises NameError: when probtype does not match a predifined probability type, will cause to default to 'jibson2000'

    """
    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # Parse config - should make it so it uses defaults if any are missing...
    try:
        slopefile = config['mechanistic_models']['classic_newmark']['layers']['slope']['file']
        slopeunits = config['mechanistic_models']['classic_newmark']['layers']['slope']['units']
        cohesionfile = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['file']
        cohesionunits = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['units']
        frictionfile = config['mechanistic_models']['classic_newmark']['layers']['friction']['file']
        frictionunits = config['mechanistic_models']['classic_newmark']['layers']['friction']['units']

        thick = float(config['mechanistic_models']['classic_newmark']['parameters']['thick'])
        uwt = float(config['mechanistic_models']['classic_newmark']['parameters']['uwt'])
        nodata_cohesion = float(config['mechanistic_models']['classic_newmark']['parameters']['nodata_cohesion'])
        nodata_friction = float(config['mechanistic_models']['classic_newmark']['parameters']['nodata_friction'])
        try:
            dnthresh = float(config['mechanistic_models']['classic_newmark']['parameters']['dnthresh'])
        except:
            if probtype == 'threshold':
                dnthresh = 5.
                print('Unable to find dnthresh in config, using 5cm')
            else:
                dnthresh = None
        fsthresh = float(config['mechanistic_models']['classic_newmark']['parameters']['fsthresh'])
        acthresh = float(config['mechanistic_models']['classic_newmark']['parameters']['acthresh'])
        slopethresh = float(config['mechanistic_models']['classic_newmark']['parameters']['slopethresh'])
        try:
            m = float(config['mechanistic_models']['classic_newmark']['parameters']['m'])
        except:
            print('no constant saturated thickness specified, m=0 if no watertable file is found')
            m = 0.
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)
        return

    try:  # Try to fetch source information from config
        modelsref = config['mechanistic_models']['classic_newmark']['shortref']
        modellref = config['mechanistic_models']['classic_newmark']['longref']
        slopesref = config['mechanistic_models']['classic_newmark']['layers']['slope']['shortref']
        slopelref = config['mechanistic_models']['classic_newmark']['layers']['slope']['longref']
        cohesionsref = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['shortref']
        cohesionlref = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['longref']
        frictionsref = config['mechanistic_models']['classic_newmark']['layers']['friction']['shortref']
        frictionlref = config['mechanistic_models']['classic_newmark']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. Continuing')

    # Cut and resample all files
    shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
    slpdict = GDALGrid.getFileGeoDict(slopefile)
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
            print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
            bounds = None
    if bounds is not None:
        tempgdict = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': 100., 'dy': 100., 'nx': 100., 'ny': 100.}, adjust='res')
        gdict = slpdict.getBoundsWithin(tempgdict)
    else:  # Get boundaries from shakemap if not specified
        shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        slpdict = GDALGrid.getFileGeoDict(slopefile)
        gdict = slpdict.getBoundsWithin(shkgdict)

    # Load in slope file
    slopegrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
    gdict = slopegrid.getGeoDict()  # Get this again just in case it changed
    slope = slopegrid.getData().astype(float)/slopediv  # Adjust slope to degrees, if needed
    # Change any zero slopes to a very small number to avoid dividing by zero later
    slope[slope == 0] = 1e-8

    # Load in shakefile
    if not os.path.isfile(shakefile):
        if isURL(shakefile):
            shakefile = getGridURL(shakefile)  # returns a file object
        else:
            raise NameError('Could not find "%s" as a file or a valid url' % (shakefile))
            return

    # Load in shakemap, resample to slope file (this will be important when go to higher res)
    shakemap = ShakeGrid.load(shakefile, samplegeodict=gdict, resample=True, method='linear', adjust='res')
    M = shakemap.getEventDict()['magnitude']
    # Read in uncertainty layer, if present
    if uncertfile is not None:
        try:
            uncert = ShakeGrid.load(uncertfile, samplegeodict=gdict, resample=True, method='linear', adjust='res')
        except:
            print('Could not read uncertainty file, ignoring uncertainties')
            uncertfile = None

    # Read in the cohesion and friction files, resampled to slope grid
    cohesion = GDALGrid.load(cohesionfile, samplegeodict=gdict, resample=True, method='nearest').getData().astype(float)/codiv
    cohesion[np.isnan(cohesion)] = nodata_cohesion
    friction = GDALGrid.load(frictionfile, samplegeodict=gdict, resample=True, method='nearest').getData().astype(float)
    friction[np.isnan(friction)] = nodata_friction

    # See if there is a water table depth file and read it in if there is
    try:
        waterfile = config['mechanistic_models']['classic_newmark']['layers']['watertable']['file']
        watertable = GDALGrid.load(waterfile, samplegeodict=gdict, resample=True, method='linear').getData()  # Needs to be in meters!
        uwtw = float(config['mechanistic_models']['classic_newmark']['parameters']['uwtw'])
        try:
            watersref = config['mechanistic_models']['classic_newmark']['layers']['watertable']['shortref']
            waterlref = config['mechanistic_models']['classic_newmark']['layers']['watertable']['longref']
        except:
            print('Was not able to retrieve water table references from config file. Continuing')

    except:
        print(('Water table file not specified or readable, assuming constant saturated thickness proportion of %0.1f' % m))
        watertable = None
        try:
            uwtw = float(config['mechanistic_models']['classic_newmark']['parameters']['uwtw'])
        except:
            print('Could not read soil wet unit weight, using 18.8 kN/m3')
            uwtw = 18.8

    # Factor of safety
    if watertable is not None:
        watertable[watertable > thick] = thick
        m = (thick - watertable)/thick
    FS = cohesion/(uwt*thick*np.sin(slope*(np.pi/180.))) + np.tan(friction*(np.pi/180.))/np.tan(slope*(np.pi/180.)) - (m*uwtw*np.tan(friction*(np.pi/180.)))/(uwt*np.tan(slope*(np.pi/180.)))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    Ac = (FS-1.)*np.sin(slope*(np.pi/180.))  # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac[Ac < acthresh] = acthresh
    Ac[slope < slopethresh] = float('nan')

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = shakemap.getLayer('pga').getData().astype(float)/100.
    PGV = shakemap.getLayer('pgv').getData().astype(float)
    if uncertfile is not None:
        stdpga = uncert.getLayer('stdpga')
        stdpgv = uncert.getLayer('stdpgv')
        # Estimate PGA +- 1std
        PGAmin = np.exp(np.log(PGA*100.) - stdpga.getData())/100.
        PGAmax = np.exp(np.log(PGA*100.) + stdpga.getData())/100.
        PGVmin = np.exp(np.log(PGV) - stdpgv.getData())
        PGVmax = np.exp(np.log(PGV) + stdpgv.getData())

    np.seterr(invalid='ignore')  # Ignore errors so still runs when Ac > PGA, just leaves nan instead of crashing

    if regressionmodel is 'J_PGA':
        Dn = J_PGA(Ac, PGA)
        if uncertfile is not None:
            Dnmin = J_PGA(Ac, PGAmin)
            Dnmax = J_PGA(Ac, PGAmax)
    elif regressionmodel is 'J_PGA_M':
        Dn = J_PGA_M(Ac, PGA, M)
        if uncertfile is not None:
            Dnmin = J_PGA_M(Ac, PGAmin, M)
            Dnmax = J_PGA_M(Ac, PGAmax, M)

    elif regressionmodel is 'RS_PGA_M':
        Dn = RS_PGA_M(Ac, PGA, M)
        if uncertfile is not None:
            Dnmin = RS_PGA_M(Ac, PGAmin, M)
            Dnmax = RS_PGA_M(Ac, PGAmax, M)

    elif regressionmodel is 'RS_PGA_PGV':
        Dn = RS_PGA_PGV(Ac, PGA, PGV)
        if uncertfile is not None:
            Dnmin = RS_PGA_PGV(Ac, PGAmin, PGVmin)
            Dnmax = RS_PGA_PGV(Ac, PGAmax, PGVmax)
    else:
        print('Unrecognized regression model, aborting')
        return

    units = 'probability'
    label = 'Landslide Probability'
    if probtype.lower() in 'jibson2000':
        PROB = 0.335*(1-np.exp(-0.048*Dn**1.565))
        dnthresh = None
        if uncertfile is not None:
            PROBmin = 0.335*(1-np.exp(-0.048*Dnmin**1.565))
            PROBmax = 0.335*(1-np.exp(-0.048*Dnmax**1.565))
    elif probtype.lower() in 'threshold':
        PROB = Dn.copy()
        PROB[PROB <= dnthresh] = 0
        PROB[PROB > dnthresh] = 1
        units = 'prediction'
        label = 'Predicted Landslides'
        if uncertfile is not None:
            PROBmin = Dnmin.copy()
            PROBmin[PROBmin <= dnthresh] = 0
            PROBmin[PROBmin > dnthresh] = 1
            PROBmax = Dnmax.copy()
            PROBmax[PROBmax <= dnthresh] = 0
            PROBmax[PROBmax > dnthresh] = 1
    else:
        raise NameError('invalid probtype, assuming jibson2000')
        PROB = 0.335*(1-np.exp(-0.048*Dn**1.565))
        dnthresh = None
        if uncertfile is not None:
            PROBmin = 0.335*(1-np.exp(-0.048*Dnmin**1.565))
            PROBmax = 0.335*(1-np.exp(-0.048*Dnmax**1.565))

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])

    if watertable is not None:
        des = 'variable'
    else:
        des = m
    description = {'name': modelsref, 'longref': modellref, 'units': units, 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'thickness_m': thick, 'unitwt_kNm3': uwt, 'dnthresh_cm': dnthresh, 'acthresh_g': acthresh, 'fsthresh': fsthresh, 'slopethresh': slopethresh, 'sat_proportion': des}}

    maplayers['model'] = {'grid': GDALGrid(PROB, gdict), 'label': label, 'type': 'output', 'description': description}
    if uncertfile is not None:
        maplayers['modelmin'] = {'grid': GDALGrid(PROBmin, gdict), 'label': label+' -1std', 'type': 'output', 'description': description}
        maplayers['modelmax'] = {'grid': GDALGrid(PROBmax, gdict), 'label': label+' +1std', 'type': 'output', 'description': description}

    if saveinputs is True:
        maplayers['pga'] = {'grid': GDALGrid(PGA, gdict), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        maplayers['FS'] = {'grid': GDALGrid(FS, gdict), 'label': 'Factor of Safety', 'type': 'input', 'description': {'units': 'unitless'}}
        maplayers['Ac'] = {'grid': GDALGrid(Ac, gdict), 'label': 'Critical acceleration (g)', 'type': 'input'}
        maplayers['Dn'] = {'grid': GDALGrid(Dn, gdict), 'label': 'Newmark Displacement (cm)', 'type': 'input'}
        maplayers['slope'] = {'grid': GDALGrid(slope, gdict), 'label': 'Max slope ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': slopesref, 'longref': slopelref}}
        maplayers['cohesion'] = {'grid': GDALGrid(cohesion, gdict), 'label': 'Cohesion (kPa)', 'type': 'input', 'description': {'units': 'kPa (adjusted)', 'name': cohesionsref, 'longref': cohesionlref}}
        maplayers['friction angle'] = {'grid': GDALGrid(friction, gdict), 'label': 'Friction angle ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': frictionsref, 'longref': frictionlref}}
        if uncertfile is not None:
            maplayers['pgamin'] = {'grid': GDALGrid(PGAmin, gdict), 'label': 'PGA - 1std (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
            maplayers['pgamax'] = {'grid': GDALGrid(PGAmax, gdict), 'label': 'PGA + 1std (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        if 'PGV' in regressionmodel:
            maplayers['pgv'] = {'grid': GDALGrid(PGV, gdict), 'label': 'PGV (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
            if uncertfile is not None:
                maplayers['pgvmin'] = {'grid': GDALGrid(PGVmin, gdict), 'label': 'PGV - 1std (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
                maplayers['pgvmax'] = {'grid': GDALGrid(PGVmax, gdict), 'label': 'PGV + 1std (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
        if watertable is not None:
            maplayers['sat thick prop'] = {'grid': GDALGrid(m, gdict), 'label': 'Saturated thickness proprtion [0,1]', 'type': 'input', 'description': {'units': 'meters', 'name': watersref, 'longref': waterlref}}

    return maplayers
Exemple #13
0
    def __init__(self, config, shakefile, model, uncertfile=None):
        """Set up the logistic model

        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param model: Name of model defined in config that should be run for the event of interest
        :type model: string
        :param uncertfile:
        :type uncertfile:

        """
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model, config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.

        self.model = model
        cmodel = config['logistic_models'][model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in
                       value.lower() or 'mmi' in value.lower()]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res')

        # take uncertainties into account
        if uncertfile is not None:
            try:
                self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True,
                                             adjust='res')
            except:
                print('Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) - self.uncert.getLayer('stdpga').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) + self.uncert.getLayer('stdpga').getData()))")
                elif "self.layerdict['pgv'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) - self.uncert.getLayer('stdpgv').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) + self.uncert.getLayer('stdpgv').getData()))")
                elif "self.layerdict['mmi'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) - self.uncert.getLayer('stdmmi').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) + self.uncert.getLayer('stdmmi').getData()))")
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config['logistic_models'][model]['slopemin'])
            self.slopemax = float(config['logistic_models'][model]['slopemax'])
        except:
            print('could not find slopemin and/or slopemax in config, no limits will be applied')
            self.slopemin = 0.
            self.slopemax = 90.
Exemple #14
0
def holzer_liq(shakefile,
               config,
               uncertfile=None,
               saveinputs=False,
               modeltype=None,
               displmodel=None,
               probtype=None,
               bounds=None):
    """
    Method for computing the probability of liquefaction using the Holzer method
    using the Wills et al. (2015) Vs30 map of California to define the
    susceptibility classes and the Fan et al. global water table model. 
    """
    layers = config['holzer_liq_cal']['layers']
    vs30_file = layers['vs30']['file']
    wtd_file = layers['watertable']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile)
    fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0]

    #---------------------------------------------------------------------------
    # Loading info
    #---------------------------------------------------------------------------
    shakemap = ShakeGrid.load(shakefile,
                              fgeodict,
                              resample=True,
                              method='linear',
                              doPadding=True)
    PGA = shakemap.getLayer('pga').getData() / 100  # convert to g
    griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
        shakefile)
    mag = eventdict['magnitude']

    #---------------------------------------------------------------------------
    # Logistic funciton parameters from Vs30
    #---------------------------------------------------------------------------
    vs30_grid = GMTGrid.load(vs30_file)

    vs30 = vs30_grid.getData()
    a0 = np.zeros_like(vs30)
    b0 = np.zeros_like(vs30)
    c0 = np.zeros_like(vs30)
    a1 = np.zeros_like(vs30)
    b1 = np.zeros_like(vs30)
    c1 = np.zeros_like(vs30)
    for k, v in config['holzer_liq_cal']['parameters'].items():
        ind = np.where(vs30 == float(v[0]))
        a0[ind] = v[1]
        b0[ind] = v[2]
        c0[ind] = v[3]
        a1[ind] = v[4]
        b1[ind] = v[5]
        c1[ind] = v[6]

    #---------------------------------------------------------------------------
    # Water table
    #---------------------------------------------------------------------------
    wtd_grid = GMTGrid.load(wtd_file,
                            fgeodict,
                            resample=True,
                            method=layers['watertable']['interpolation'],
                            doPadding=True)
    tmp = wtd_grid._data
    tmp = np.nan_to_num(tmp)

    # Compute water weights
    w0, w1 = get_water_weights(tmp)

    #---------------------------------------------------------------------------
    # Compute probability of liquefaction
    #---------------------------------------------------------------------------
    prob0 = get_prob(PGA, a0, b0, c0, mag)
    prob1 = get_prob(PGA, a1, b1, c1, mag)
    prob = prob0 * w0 + prob1 * w1

    #---------------------------------------------------------------------------
    # Turn output and inputs into into grids and put in maplayers dictionary
    #---------------------------------------------------------------------------
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
    modelsref = config['holzer_liq_cal']['shortref']
    modellref = config['holzer_liq_cal']['longref']
    modeltype = 'Holzer/Wills'
    maplayers['model'] = {
        'grid': GDALGrid(prob, fgeodict),
        'label': 'Probability',
        'type': 'output',
        'description': {
            'name': modelsref,
            'longref': modellref,
            'units': 'coverage',
            'shakemap': shakedetail,
            'parameters': {
                'modeltype': modeltype
            }
        }
    }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA, fgeodict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        maplayers['vs30'] = {
            'grid': GDALGrid(vs30, fgeodict),
            'label': 'Vs30 (m/s)',
            'type': 'input',
            'description': {
                'units': 'm/s'
            }
        }
        maplayers['wtd'] = {
            'grid': GDALGrid(wtd_grid._data, fgeodict),
            'label': 'wtd (m)',
            'type': 'input',
            'description': {
                'units': 'm'
            }
        }
    return maplayers
def check_input_extents(config, shakefile=None, bounds=None):
    """Make sure all input files exist and cover the extent desired

    Args:
        config: configObj of a single model
        shakefile: path to ShakeMap grid.xml file (used for bounds). If not
            provided, bounds must be provided
        bounds: dictionary of bounds with keys: 'xmin', 'xmax', 'ymin', 'ymax'

    Returns:
        tuple containing:
            notcovered: list of files that do not cover the entire area
                defined by bounds or shakefile
            newbounds: new dictionary of bounds of subarea of original
                bounds or shakefile extent that is covered by all input files
    """
    if shakefile is None and bounds is None:
        raise Exception('Must define either a shakemap file or bounds')
    modelname = config.keys()[0]
    # Make dummy geodict to use
    if bounds is None:
        evdict = ShakeGrid.getFileGeoDict(shakefile)
    else:
        evdict = GeoDict.createDictFromBox(
            bounds['xmin'], bounds['xmax'],
            bounds['ymin'], bounds['ymax'],
            0.00001, 0.00001, inside=False)

    # Check extents of all input layers
    notcovered = []
    notcovgdicts = []
    newbounds = None
    for item, value in config[modelname]['layers'].items():
        if 'file' in value.keys():
            filelook = value['file']
            if getFileType(filelook) == 'gmt':
                tmpgd, _ = GMTGrid.getFileGeoDict(filelook)
            else:
                tmpgd, _ = GDALGrid.getFileGeoDict(filelook)
            # See if tempgd contains evdict
            contains = tmpgd.contains(evdict)
            if not contains:
                notcovered.append(filelook)
                notcovgdicts.append(tmpgd)
                # print(filelook)
    if len(notcovered) > 0:
        # Figure out what bounds COULD be run
        xmins = [gd.xmin for gd in notcovgdicts]
        xmaxs = [gd.xmax for gd in notcovgdicts]
        ymins = [gd.ymin for gd in notcovgdicts]
        ymaxs = [gd.ymax for gd in notcovgdicts]

        # Set in by a buffer of 0.05 degrees because mapio doesn't like 
        # when bounds are exactly the same for getboundswithin
        newbounds = dict(xmin=evdict.xmin + 0.05,
                         xmax=evdict.xmax - 0.05,
                         ymin=evdict.ymin + 0.05,
                         ymax=evdict.ymax - 0.05)
        # Which one is the problem?
        if evdict.xmin < np.max(xmins):
            newbounds['xmin'] = np.max(xmins) + 0.05
        if evdict.xmax > np.min(xmaxs):
            newbounds['xmax'] = np.min(xmaxs) - 0.05
        if evdict.ymin < np.max(ymins):
            newbounds['ymin'] = np.max(ymins) + 0.05
        if evdict.ymax > np.min(ymaxs):
            newbounds['ymax'] = np.min(ymaxs) - 0.05

        # See if this is a possible extent
        try:
            test = GeoDict.createDictFromBox(
                newbounds['xmin'], newbounds['xmax'],
                newbounds['ymin'], newbounds['ymax'],
                0.00001, 0.00001, inside=False)
        except BaseException:
            print('Cannot make new bounds that will work')
            newbounds = None

    return notcovered, newbounds
Exemple #16
0
def test_save():
    tdir = tempfile.mkdtemp()
    testfile = os.path.join(tdir,'test.xml')
    try:
        print('Testing save/read functionality for shakemap grids...')
        pga = np.arange(0,16,dtype=np.float32).reshape(4,4)
        pgv = np.arange(1,17,dtype=np.float32).reshape(4,4)
        mmi = np.arange(2,18,dtype=np.float32).reshape(4,4)
        geodict = GeoDict({'xmin':0.5,'xmax':3.5,
                           'ymin':0.5,'ymax':3.5,
                           'dx':1.0,'dy':1.0,
                           'ny':4,'nx':4})
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        shakeDict = {'event_id':'usabcd1234',
                     'shakemap_id':'usabcd1234',
                     'shakemap_version':1,
                     'code_version':'4.0',
                     'process_timestamp':datetime.utcnow(),
                     'shakemap_originator':'us',
                     'map_status':'RELEASED',
                     'shakemap_event_type':'ACTUAL'}
        eventDict = {'event_id':'usabcd1234',
                     'magnitude':7.6,
                     'depth':1.4,
                     'lat':2.0,
                     'lon':2.0,
                     'event_timestamp':datetime.utcnow(),
                     'event_network':'us',
                     'event_description':'sample event'}
        uncDict = {'pga':(0.0,0),
                   'pgv':(0.0,0),
                   'mmi':(0.0,0)}
        shake = ShakeGrid(layers,geodict,eventDict,shakeDict,uncDict)
        
        print('Testing save/read functionality...')
        shake.save(testfile,version=3)
        shake2 = ShakeGrid.load(testfile)
        for layer in ['pga','pgv','mmi']:
            tdata = shake2.getLayer(layer).getData()
            np.testing.assert_almost_equal(tdata,layers[layer])

        print('Passed save/read functionality for shakemap grids.')

        print('Testing getFileGeoDict method...')
        fgeodict = ShakeGrid.getFileGeoDict(testfile)
        print('Passed save/read functionality for shakemap grids.')
        
        print('Testing loading with bounds (no resampling or padding)...')
        sampledict = GeoDict({'xmin':-0.5,'xmax':3.5,
                              'ymin':-0.5,'ymax':3.5,
                              'dx':1.0,'dy':1.0,
                              'ny':5,'nx':5})
        shake3 = ShakeGrid.load(testfile,samplegeodict=sampledict,
                                resample=False,doPadding=False,padValue=np.nan)
        tdata = shake3.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata,layers['pga'])

        print('Passed loading with bounds (no resampling or padding)...')

        print('Testing loading shakemap with padding, no resampling...')
        newdict = GeoDict({'xmin':-0.5,'xmax':4.5,
                           'ymin':-0.5,'ymax':4.5,
                           'dx':1.0,'dy':1.0,
                           'ny':6,'nx':6})
        shake4 = ShakeGrid.load(testfile,samplegeodict=newdict,
                                resample=False,doPadding=True,padValue=np.nan)
        output = np.array([[np.nan,np.nan,np.nan,np.nan,np.nan,np.nan],
                           [np.nan,0.0,1.0,2.0,3.0,np.nan],
                           [np.nan,4.0,5.0,6.0,7.0,np.nan],
                           [np.nan,8.0,9.0,10.0,11.0,np.nan],
                           [np.nan,12.0,13.0,14.0,15.0,np.nan],
                           [np.nan,np.nan,np.nan,np.nan,np.nan,np.nan]])
        tdata = shake4.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata,output)
        print('Passed loading shakemap with padding, no resampling...')

        #make a bigger grid
        pga = np.arange(0,36,dtype=np.float32).reshape(6,6)
        pgv = np.arange(1,37,dtype=np.float32).reshape(6,6)
        mmi = np.arange(2,38,dtype=np.float32).reshape(6,6)
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        geodict = GeoDict({'xmin':0.5,'xmax':5.5,
                           'ymin':0.5,'ymax':5.5,
                           'dx':1.0,'dy':1.0,
                           'ny':6,'nx':6})
        shake = ShakeGrid(layers,geodict,eventDict,shakeDict,uncDict)
        shake.save(testfile,version=3)

        print('Testing resampling, no padding...')
        littledict = GeoDict({'xmin':2.0,'xmax':4.0,
                              'ymin':2.0,'ymax':4.0,
                              'dx':1.0,'dy':1.0,
                              'ny':3,'nx':3})
        shake5 = ShakeGrid.load(testfile,samplegeodict=littledict,resample=True,doPadding=False,padValue=np.nan)
        output = np.array([[10.5,11.5,12.5],
                           [16.5,17.5,18.5],
                           [22.5,23.5,24.5]])
        tdata = shake5.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata,output)
        print('Passed resampling, no padding...')

        print('Testing resampling and padding...')
        pga = np.arange(0,16,dtype=np.float32).reshape(4,4)
        pgv = np.arange(1,17,dtype=np.float32).reshape(4,4)
        mmi = np.arange(2,18,dtype=np.float32).reshape(4,4)
        geodict = GeoDict({'xmin':0.5,'ymax':3.5,
                           'ymin':0.5,'xmax':3.5,
                           'dx':1.0,'dy':1.0,
                           'ny':4,'nx':4})
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        shake = ShakeGrid(layers,geodict,eventDict,shakeDict,uncDict)
        shake.save(testfile,version=3)
        bigdict = GeoDict({'xmin':0.0,'xmax':4.0,
                           'ymin':0.0,'ymax':4.0,
                           'dx':1.0,'dy':1.0,
                           'ny':5,'nx':5})
        shake6 = ShakeGrid.load(testfile,samplegeodict=bigdict,resample=True,doPadding=True,padValue=np.nan)
        tdata = shake6.getLayer('pga').getData()
        output = np.array([[np.nan,np.nan,np.nan,np.nan,np.nan],
                           [np.nan,2.5,3.5,4.5,np.nan],
                           [np.nan,6.5,7.5,8.5,np.nan],
                           [np.nan,10.5,11.5,12.5,np.nan],
                           [np.nan,np.nan,np.nan,np.nan,np.nan]])
        np.testing.assert_almost_equal(tdata,output)
        print('Passed resampling and padding...')
    except Exception as error:
        print('Failed to read grid.xml format file "%s". Error "%s".' % (xmlfile,str(error)))
        assert 0 == 1
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Exemple #17
0
def hazus_liq(shakefile,
              config,
              uncertfile=None,
              saveinputs=False,
              modeltype=None,
              displmodel=None,
              probtype=None,
              bounds=None):
    """
    Method for computing the probability of liquefaction using the Hazus method
    using the Wills et al. (2015) Vs30 map of California to define the
    susceptibility classes and the Fan et al. global water table model. 
    """
    layers = config['hazus_liq_cal']['layers']
    vs30_file = layers['vs30']['file']
    wtd_file = layers['watertable']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile)
    fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0]

    #---------------------------------------------------------------------------
    # Loading
    #---------------------------------------------------------------------------
    shakemap = ShakeGrid.load(shakefile,
                              fgeodict,
                              resample=True,
                              method='linear',
                              doPadding=True)
    PGA = shakemap.getLayer('pga').getData() / 100  # convert to g
    griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
        shakefile)
    mag = eventdict['magnitude']

    # Correction factor for moment magnitudes other than M=7.5
    k_m = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188

    #---------------------------------------------------------------------------
    # Susceptibility from Vs30
    #---------------------------------------------------------------------------
    vs30_grid = GMTGrid.load(vs30_file)

    vs30 = vs30_grid.getData()
    p_ml = np.zeros_like(vs30)
    a = np.zeros_like(vs30)
    b = np.zeros_like(vs30)
    for k, v in config['hazus_liq_cal']['parameters'].items():
        ind = np.where(vs30 == float(v[0]))
        if v[1] == "VH":
            p_ml[ind] = 0.25
            a[ind] = 9.09
            b[ind] = -0.82
        if v[1] == "H":
            p_ml[ind] = 0.2
            a[ind] = 7.67
            b[ind] = -0.92
        if v[1] == "M":
            p_ml[ind] = 0.1
            a[ind] = 6.67
            b[ind] = -1.0
        if v[1] == "L":
            p_ml[ind] = 0.05
            a[ind] = 5.57
            b[ind] = -1.18
        if v[1] == "VL":
            p_ml[ind] = 0.02
            a[ind] = 4.16
            b[ind] = -1.08

    # Conditional liquefaction probability for a given susceptibility category
    # at a specified PGA
    p_liq_pga = a * PGA + b
    p_liq_pga = p_liq_pga.clip(min=0, max=1)

    #---------------------------------------------------------------------------
    # Water table
    #---------------------------------------------------------------------------
    wtd_grid = GMTGrid.load(wtd_file,
                            fgeodict,
                            resample=True,
                            method=layers['watertable']['interpolation'],
                            doPadding=True)
    tmp = wtd_grid._data
    tmp = np.nan_to_num(tmp)

    # Convert to ft
    wt_ft = tmp * 3.28084

    # Correction factor for groundwater depths other than five feet
    k_w = 0.022 * wt_ft + 0.93

    #---------------------------------------------------------------------------
    # Combine to get conditional liquefaction probability
    #---------------------------------------------------------------------------
    p_liq_sc = p_liq_pga * p_ml / k_m / k_w

    #---------------------------------------------------------------------------
    # Turn output and inputs into into grids and put in maplayers dictionary
    #---------------------------------------------------------------------------
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
    modelsref = config['hazus_liq_cal']['shortref']
    modellref = config['hazus_liq_cal']['longref']
    modeltype = 'Hazus/Wills'
    maplayers['model'] = {
        'grid': GDALGrid(p_liq_sc, fgeodict),
        'label': 'Probability',
        'type': 'output',
        'description': {
            'name': modelsref,
            'longref': modellref,
            'units': 'coverage',
            'shakemap': shakedetail,
            'parameters': {
                'modeltype': modeltype
            }
        }
    }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA, fgeodict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        maplayers['vs30'] = {
            'grid': GDALGrid(vs30, fgeodict),
            'label': 'Vs30 (m/s)',
            'type': 'input',
            'description': {
                'units': 'm/s'
            }
        }
        maplayers['wtd'] = {
            'grid': GDALGrid(wtd_grid._data, fgeodict),
            'label': 'wtd (m)',
            'type': 'input',
            'description': {
                'units': 'm'
            }
        }
    return maplayers
Exemple #18
0
def kritikos_fuzzygamma(shakefile, config, bounds=None):
    """
    Runs kritikos procedure with fuzzy gamma
    """

    cmodel = config['statistic_models']['kritikos_2014']
    gamma = cmodel['gamma_value']

    ## Read in layer files and get data
    layers = cmodel['layers']
    try:
        # Slope
        slope_file = layers['slope']
        # DFF
        dff_file = layers['dff']
        # DFS
        dfs_file = layers['dfs']
        # Slope Position
        slope_pos_file = layers['slope_pos']
    except:
        print('Unable to retrieve grid data.')

    try:
        div = cmodel['divisor']
        # Load in divisors
        MMI_div = div['MMI']
        slope_div = div['slope']
        dff_div = div['dff']
        dfs_div = div['dfs']
        slope_pos_div = div['slope_pos']
    except:
        print('Unable to retrieve divisors.')

    try:
        power = cmodel['power']
        # Load in powers
        MMI_power = power['MMI']
        slope_power = power['slope']
        dff_power = power['dff']
        dfs_power = power['dfs']
        slope_pos_power = power['slope_pos']
    except:
        print('Unable to retrieve powers.')

    # Cut and resample all files
    try:
        shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        slopedict = GDALGrid.getFileGeoDict(slope_file)
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
                print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
                bounds = None
        if bounds is not None:
            tempgdict = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': 100., 'dy': 100., 'nx': 100., 'ny': 100.}, adjust='res')
            gdict = slpdict.getBoundsWithin(tempgdict)
        else:  # Get boundaries from shakemap if not specified
            shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
            slpdict = GDALGrid.getFileGeoDict(slopefile)
            gdict = slpdict.getBoundsWithin(shkgdict)
    except:
        print('Unable to create base geodict.')

    # Load in data
    try:
        # Load in slope data
        slopegrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        slope_data = slopefrid.getData().astype(float)
        # Load in MMI
        shakemap = ShakeGrid.load(shakefile, samplegeodict=gdict, resample=True, method='linear', adjust='res')
        MMI_data = shakemap.getLayer('MMI').getData().astype(float)
        # Load in Dff
        dffgrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        dff_data = dffgrid.getData().astype(float)
        # Load in DFS
        dfsgrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        dfs_data = dfsgrid.getData().astype(float)
        # Load in Slope Position
        slope_pos_grid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        slope_pos_data = slop_pos_grid.getData().astype(float)
    except:
        print('Data could not be retrieved.')

            [[[classification]]]
            MMI = 5,6,7,8,9
            slope = 0-4, 5-9, 10-14, 15-19, 20-24, 25-29, 30-34, 35-39, 40-44, 45-49, 50+  # Reclassify as 1,2,3,etc.
            dff = 0-4, 5-9, 10-19, 20-29, 30-39, 40-49, 50+  # Reclassify as 1,2,3,etc.
            dfs = 0-0.49, 0.5-0.99, 1.0-1.49, 1.5-1.99, 2.0-2.49, 2.5+  # Reclassify as 1,2,3,etc.
            slope_pos = 'Flat', 'Valley', 'Mid-Slope', 'Ridge'  # Reclassify as 1,2,3,etc.