Beispiel #1
0
def make_map(adict):

    imtype = adict['imtype']

    for key in ('std', 'phi', 'tau'):
        if key not in adict['imtdict'] or adict['imtdict'][key] is None:
            continue

        fig1 = draw_uncertainty_map(adict, key)

        if key == 'std':
            ext = '_sigma'
        elif key == 'phi':
            ext = '_phi'
        else:
            ext = '_tau'

        if imtype == 'MMI':
            fileimt = 'intensity'
        else:
            fileimt = oq_to_file(imtype)

        # save to pdf/jpeg
        pdf_file = os.path.join(adict['datadir'], fileimt + ext + '.pdf')
        jpg_file = os.path.join(adict['datadir'], fileimt + ext + '.jpg')

        fig1.savefig(pdf_file, bbox_inches='tight', dpi=adict['pdf_dpi'])
        fig1.savefig(jpg_file, bbox_inches='tight', dpi=adict['img_dpi'])
        plt.close(fig1)
Beispiel #2
0
def make_map(adict):

    imtype = adict['imtype']

    if imtype == 'thumbnail':
        make_pin_thumbnail(adict)
        return
    elif imtype == 'overlay':
        make_overlay(adict)
        return

    fig1, fig2 = draw_map(adict)

    if imtype == 'MMI':
        # save to pdf/jpeg
        pdf_file = os.path.join(adict['datadir'], 'intensity.pdf')
        jpg_file = os.path.join(adict['datadir'], 'intensity.jpg')
        # save the legend file
        legend_file = os.path.join(adict['datadir'], 'mmi_legend.png')
        fig2.gca().xaxis.set_major_locator(NullLocator())
        fig2.gca().yaxis.set_major_locator(NullLocator())
        fig2.savefig(legend_file, bbox_inches='tight', pad_inches=0)
    else:
        fileimt = oq_to_file(imtype)
        pdf_file = os.path.join(adict['datadir'], '%s.pdf' % (fileimt))
        jpg_file = os.path.join(adict['datadir'], '%s.jpg' % (fileimt))

    fig1.savefig(pdf_file, bbox_inches='tight')
    fig1.savefig(jpg_file, bbox_inches='tight')
Beispiel #3
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTArrays(
                myimt, 'GREATER_OF_TWO_HORIZONTAL')
        container.close()

        #
        # Make plots
        #
        for myimt in imtlist:
            data = datadict[myimt]
            fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
            plt.subplots_adjust(hspace=0.1)
            axa[0].plot(data['lons'], data['mean'], color='k', label='mean')
            axa[0].plot(data['lons'],
                        data['mean'] + data['std'],
                        '--b',
                        label='mean +/- stddev')
            axa[0].plot(data['lons'], data['mean'] - data['std'], '--b')
            axa[1].plot(data['lons'], data['std'], '-.r', label='stddev')
            plt.xlabel('Longitude')
            axa[0].set_ylabel('Mean ln(%s) (g)' % myimt)
            axa[1].set_ylabel('Stddev ln(%s) (g)' % myimt)
            axa[0].legend(loc='best')
            axa[1].legend(loc='best')
            axa[0].set_title(self._eventid)
            axa[0].grid()
            axa[1].grid()
            axa[1].set_ylim(bottom=0)
            fileimt = oq_to_file(myimt)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.png')
            plt.savefig(pfile)
            plt.close()
Beispiel #4
0
def do_raster(evid, datapath, oc):

    check_failures(evid, datapath, RasterModule)

    mod = RasterModule(evid)
    mod.execute()
    mod.writeContents()

    driver = gdal.GetDriverByName('ENVI')
    driver.Register()

    imts = oc.getIMTs()

    rzip = os.path.join(datapath, evid, 'current', 'products', 'raster.zip')
    with tempfile.TemporaryDirectory() as tmpdirname:
        with zipfile.ZipFile(rzip, 'r') as zip_ref:
            zip_ref.extractall(tmpdirname)
            for imt in imts:
                component, imt = imt.split('/')
                fname = oq_to_file(imt)
                fname = os.path.join(tmpdirname, fname + '_mean.flt')
                rin = gdal.Open(fname, GA_ReadOnly)
                if rin is None:
                    raise RuntimeError("Couldn't open %s" % fname)
                cols = rin.RasterXSize
                rows = rin.RasterYSize
                band = rin.GetRasterBand(1)
                rgrid = band.ReadAsArray(0, 0, cols, rows)
                comp = oc.getComponents(imt)
                cdata = oc.getIMTGrids(imt, comp[0])['mean']
                assert np.allclose(cdata, rgrid)

                fname = oq_to_file(imt)
                fname = os.path.join(tmpdirname, fname + '_std.flt')
                rin = gdal.Open(fname, GA_ReadOnly)
                if rin is None:
                    raise RuntimeError("Couldn't open %s" % fname)
                cols = rin.RasterXSize
                rows = rin.RasterYSize
                band = rin.GetRasterBand(1)
                rgrid = band.ReadAsArray(0, 0, cols, rows)
                comp = oc.getComponents(imt)
                cdata = oc.getIMTGrids(imt, comp[0])['std']
                assert np.allclose(cdata, rgrid)
Beispiel #5
0
def test_imt():
    """Test the imt string functions.

    """
    assert oq_to_file('SA(1.0)') == 'psa1p0'
    assert oq_to_file('SA(0.3)') == 'psa0p3'
    assert oq_to_file('SA(15.0)') == 'psa15p0'
    assert oq_to_file('SA(3)') == 'psa3p0'
    assert oq_to_file('SA(.5)') == 'psa0p5'

    try:
        _ = oq_to_file('SA()')
    except ValueError as ve:
        assert 1 == 1

    assert file_to_oq('psa1p0') == 'SA(1.0)'
    assert file_to_oq('psa0p3') == 'SA(0.3)'
    assert file_to_oq('psa15p0') == 'SA(15.0)'

    try:
        _ = file_to_oq('psa2')
    except ValueError as ve:
        assert 1 == 1

    try:
        _ = file_to_oq('psa2p')
    except ValueError as ve:
        assert 1 == 1

    # Test that a fileimt that is the same as ['PGA', 'PGV', 'MMI']
    # is simply returned
    assert file_to_oq('pga') == 'PGA'
    assert file_to_oq('pgv') == 'PGV'
    assert file_to_oq('mmi') == 'MMI'
def test_imt():
    """Test the imt string functions.

    """
    assert oq_to_file('SA(1.0)') == 'PSA1p0'
    assert oq_to_file('SA(0.3)') == 'PSA0p3'
    assert oq_to_file('SA(15.0)') == 'PSA15p0'
    assert oq_to_file('SA(3)') == 'PSA3p0'
    assert oq_to_file('SA(.5)') == 'PSA0p5'

    try:
        _ = oq_to_file('SA()')
    except ValueError as ve:
        assert 1 == 1

    assert file_to_oq('PSA1p0') == 'SA(1.0)'
    assert file_to_oq('PSA0p3') == 'SA(0.3)'
    assert file_to_oq('PSA15p0') == 'SA(15.0)'

    try:
        _ = file_to_oq('PSA2')
    except ValueError as ve:
        assert 1 == 1

    try:
        _ = file_to_oq('PSA2p')
    except ValueError as ve:
        assert 1 == 1
Beispiel #7
0
def make_map(adict):

    imtype = adict['imtype']

    fig1 = draw_uncertainty_map(adict)

    if imtype == 'MMI':
        # save to pdf/jpeg
        pdf_file = os.path.join(adict['datadir'], 'intensity_uncertainty.pdf')
        jpg_file = os.path.join(adict['datadir'], 'intensity_uncertainty.jpg')
    else:
        fileimt = oq_to_file(imtype)
        pdf_file = os.path.join(adict['datadir'],
                                '%s_uncertainty.pdf' % (fileimt))
        jpg_file = os.path.join(adict['datadir'],
                                '%s_uncertainty.jpg' % (fileimt))

    fig1.savefig(pdf_file, bbox_inches='tight', dpi=adict['pdf_dpi'])
    fig1.savefig(jpg_file, bbox_inches='tight', dpi=adict['img_dpi'])
    plt.close(fig1)
Beispiel #8
0
    def execute(self):
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('raster module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create GIS-readable .flt files of imt and uncertainty
        self.logger.info('Creating GIS grids...')
        layers = config['products']['raster']['layers']
        for layer in layers:
            fileimt = oq_to_file(layer)
            imtdict = container.getIMTGrids(layer, 'Larger')
            mean_grid = imtdict['mean']
            std_grid = imtdict['std']
            mean_gdal = GDALGrid.copyFromGrid(mean_grid)
            std_gdal = GDALGrid.copyFromGrid(std_grid)
            mean_fname = os.path.join(datadir, '%s_mean.flt' % fileimt)
            std_fname = os.path.join(datadir, '%s_std.flt' % fileimt)
            self.logger.info('Saving %s...' % mean_fname)
            mean_gdal.save(mean_fname)
            self.logger.info('Saving %s...' % std_fname)
            std_gdal.save(std_fname)
Beispiel #9
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('mapping module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Mapping...')

        # get the filter size from the products.conf
        filter_size = config['products']['contour']['filter_size']

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the mapping functions
        layers = config['products']['mapping']['layers']
        if 'topography' in layers and layers['topography'] != '':
            topofile = layers['topography']
        else:
            topofile = None
        if 'roads' in layers and layers['roads'] != '':
            roadfile = layers['roads']
        else:
            roadfile = None
        if 'faults' in layers and layers['faults'] != '':
            faultfile = layers['faults']
        else:
            faultfile = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        if topofile:
            topogrid = read(topofile, samplegeodict=sampledict, resample=False)
        else:
            tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
            topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        extent = (float(xmin), float(ymin), float(xmax), float(ymax))
        if 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            states_provs = list(states_provs.intersecting_geometries(extent))
            if len(states_provs) > 300:
                states_provs = None
            else:
                states_provs = cfeature.NaturalEarthFeature(
                    category='cultural',
                    name='admin_1_states_provinces_lines',
                    scale='10m',
                    facecolor='none')

            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')

            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)

            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)

        if faultfile is not None:
            faults = ShapelyFeature(Reader(faultfile).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor='none')
        else:
            faults = None

        if roadfile is not None:
            roads = ShapelyFeature(Reader(roadfile).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor='none')
            if len(list(roads.intersecting_geometries(extent))) > 200:
                roads = None
            else:
                roads = ShapelyFeature(Reader(roadfile).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        else:
            roads = None

        alist = []
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'faults': faults,
                'datadir': datadir,
                'operator': operator,
                'filter_size': filter_size,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict
            }
            alist.append(d)
            if imtype == 'MMI':
                g = copy.deepcopy(d)
                g['imtype'] = 'thumbnail'
                alist.append(g)
                h = copy.deepcopy(d)
                h['imtype'] = 'overlay'
                alist.append(h)
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.jpg', 'image/jpeg')
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.pdf', 'application/pdf')
                self.contents.addFile('intensityThumbnail',
                                      'Intensity Thumbnail',
                                      'Thumbnail of intensity map.',
                                      'pin-thumbnail.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.pngw', 'text/plain')
            else:
                fileimt = oq_to_file(imtype)
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.jpg', 'image/jpeg')
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
Beispiel #10
0
def contour_to_files(container, config, output_dir, logger):
    """
    Generate contours of all configured IMT values.

    Args:
      container (ShakeMapOutputContainer): ShakeMapOutputContainer with
          ShakeMap output data.
      config (dict): Product configuration information (from product.conf).
      output_dir (str): Path to directory where output files will be written.
      logger (logging.Logger): Python logging Logger instance.

    Raises:
        LookupError: When configured file format is not supported, or
            when configured IMT is not found in container.

    """
    jsonstr = container.getString('info.json')
    infojson = json.loads(jsonstr)
    event_info = {
        'event_id': infojson['input']['event_information']['event_id'],
        'longitude': infojson['input']['event_information']['longitude'],
        'latitude': infojson['input']['event_information']['latitude']
    }

    imtlist = config['products']['contours']['IMTS'].keys()

    file_format = config['products']['contours']['format']
    # open a file for writing
    if file_format not in FORMATS:
        raise LookupError(
            'File format %s not supported for contours.' % file_format)
    driver, extension = FORMATS[file_format]
    schema = {'geometry': 'MultiLineString',
              'properties': {'value': 'float',
                             'units': 'str'}}
    crs = {'no_defs': True, 'ellps': 'WGS84',
           'datum': 'WGS84', 'proj': 'longlat'}

    for imtype in imtlist:
        fileimt = oq_to_file(imtype)
        try:
            components = container.getComponents(imtype)
        except LookupError as look_error:
            fmt = 'No IMT called %s in container %s. Skipping.'
            logger.warn(fmt % (imtype, container.getFileName()))
            continue
        imtype_spec = config['products']['contours']['IMTS'][imtype]
        filter_size = int(imtype_spec['filter_size'])
        for component in components:
            if component == 'GREATER_OF_TWO_HORIZONTAL':
                fname = 'cont_%s.%s' % (fileimt, extension)
            else:
                fname = 'cont_%s_%s.%s' % (fileimt, component, extension)
            filename = os.path.join(output_dir, fname)
            if os.path.isfile(filename):
                fpath, fext = os.path.splitext(filename)
                flist = glob.glob(fpath + '.*')
                for fname in flist:
                    os.remove(fname)

            # fiona spews a warning here when driver is geojson
            # this warning appears to be un-catchable using
            # with warnings.catch_warnings()
            # or
            # logging.captureWarning()
            # or
            # even redirecting stderr/stdout to IO streams
            # not sure where the warning is coming from,
            # but there appears to be no way to stop it...
            with fiona.drivers():
                vector_file = fiona.open(filename, 'w',
                                         driver=driver,
                                         schema=schema,
                                         crs=crs)

                intervals = None
                if 'intervals' in imtype_spec:
                    intervals = [float(i) for i in imtype_spec['intervals']]

                line_strings = contour(container, imtype, component, intervals)
                for feature in line_strings:
                    vector_file.write(feature)

                logger.debug('Writing contour file %s' % filename)
                vector_file.close()
Beispiel #11
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        event_paths = glob.glob(os.path.join(data_path, "%s*" % self._eventid))
        datalist = []
        sigmas = []
        for path in event_paths:
            datadir = os.path.join(path, 'current', 'products')
            if not os.path.isdir(datadir):
                raise NotADirectoryError('%s is not a valid directory.' %
                                         datadir)
            datafile = os.path.join(datadir, 'shake_result.hdf')
            if not os.path.isfile(datafile):
                raise FileNotFoundError('%s does not exist.' % datafile)

            # Open the ShakeMapOutputContainer and extract the data
            container = ShakeMapOutputContainer.load(datafile)
            if container.getDataType() != 'points':
                raise NotImplementedError('xtestplot_multi module can only '
                                          'operate on sets of points, not '
                                          'gridded data')

            stas = container.getStationDict()
            ampd = stas['features'][0]['properties'][
                        'channels'][0]['amplitudes'][0]
            if 'ln_sigma' in ampd:
                sigmas.append(ampd['ln_sigma'])
            else:
                sigmas.append(0)
            datadict = {}
            imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
            for myimt in imtlist:
                datadict[myimt] = container.getIMTArrays(
                    myimt, 'GREATER_OF_TWO_HORIZONTAL')
            datalist.append(datadict)

        container.close()
        #
        # Make plots
        #
        colors = ['k', 'b', 'g', 'r', 'c', 'm']
        for myimt in imtlist:
            fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
            plt.subplots_adjust(hspace=0.1)
            for ix, dd in enumerate(datalist):
                data = dd[myimt]
                axa[0].plot(data['lons'],
                            data['mean'],
                            color=colors[ix],
                            label=r'$\sigma_\epsilon = %.2f$' %
                            sigmas[ix])
                axa[1].plot(data['lons'],
                            data['std'],
                            '-.', color=colors[ix],
                            label=r'$\sigma_\epsilon = %.2f$' %
                            sigmas[ix])
            plt.xlabel('Longitude')
            axa[0].set_ylabel('Mean ln(%s) (g)' % myimt)
            axa[1].set_ylabel('Stddev ln(%s) (g)' % myimt)
            axa[0].legend(loc='best')
            axa[1].legend(loc='best')
            axa[0].set_title(self._eventid)
            axa[0].grid()
            axa[1].grid()
            axa[1].set_ylim(ymin=0)
            fileimt = oq_to_file(myimt)
            pfile = os.path.join(event_paths[0], 'current', 'products',
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile, tight_layout=True)
            pfile = os.path.join(event_paths[0], 'current', 'products',
                                 self._eventid + '_' + fileimt + '.png')
            plt.savefig(pfile, tight_layout=True)
            plt.close()
Beispiel #12
0
    def execute(self):
        """
        Write raster.zip file containing ESRI Raster files of all the IMTs
        in shake_result.hdf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('raster module can only operate on '
                                      'gridded data, not sets of points')

        # create GIS-readable .flt files of imt and uncertainty
        self.logger.debug('Creating GIS grids...')
        layers = container.getIMTs()

        # Package up all of these files into one zip file.
        zfilename = os.path.join(datadir, 'rasters.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)

        files_written = []
        for layer in layers:
            fileimt = oq_to_file(layer)
            # This is a bit hacky -- we only produce the raster for the
            # first IMC returned. It should work as long as we only have
            # one IMC produced per ShakeMap run.
            imclist = container.getComponents(layer)
            imtdict = container.getIMTGrids(layer, imclist[0])
            mean_grid = imtdict['mean']
            std_grid = imtdict['std']
            mean_gdal = GDALGrid.copyFromGrid(mean_grid)
            std_gdal = GDALGrid.copyFromGrid(std_grid)
            mean_fname = os.path.join(datadir, '%s_mean.flt' % fileimt)
            mean_hdr = os.path.join(datadir, '%s_mean.hdr' % fileimt)
            std_fname = os.path.join(datadir, '%s_std.flt' % fileimt)
            std_hdr = os.path.join(datadir, '%s_std.hdr' % fileimt)
            self.logger.debug('Saving %s...' % mean_fname)
            mean_gdal.save(mean_fname)
            files_written.append(mean_fname)
            files_written.append(mean_hdr)
            self.logger.debug('Saving %s...' % std_fname)
            std_gdal.save(std_fname)
            files_written.append(std_fname)
            files_written.append(std_hdr)
            zfile.write(mean_fname, '%s_mean.flt' % fileimt)
            zfile.write(mean_hdr, '%s_mean.hdr' % fileimt)
            zfile.write(std_fname, '%s_std.flt' % fileimt)
            zfile.write(std_hdr, '%s_std.hdr' % fileimt)

        zfile.close()
        container.close()

        # nuke all of the copies of the files we just put in the zipfile
        for file_written in files_written:
            os.remove(file_written)
Beispiel #13
0
def create_polygons(container, datadir, logger, max_workers):
    """ Generates a set of closed polygons (with or without holes) using
    the pcontour function, and uses fiona to convert the resulting GeoJSON
    objects into ESRI-style shape files which are then zipped into an
    archive along with .prj, .lyr, and metadata .xml files. A warning will
    be emitted if .lyr, or .xml files cannot be found for the ground motion
    parameter in question.

    Args:
        container (ShakeMapOutputContainer): An open ShakeMap output
            container object.
        datadir (str): The products directory for the event in question.
        logger (logger): This module's logger object.

    Returns:
        (nothing): Nothing.
    """

    component = list(container.getComponents())[0]
    imts = container.getIMTs(component)

    schema = {
        'properties':
        OrderedDict([('AREA', 'float:13.3'), ('PERIMETER', 'float:14.3'),
                     ('PGAPOL_', 'int:12'), ('PGAPOL_ID', 'int:12'),
                     ('GRID_CODE', 'int:12'), ('PARAMVALUE', 'float:14.4')]),
        'geometry':
        'Polygon'
    }

    smdata = os.path.join(get_data_path(), 'gis')
    # Make a directory for the files to live in prior to being zipped
    alist = []
    with tempfile.TemporaryDirectory(dir=datadir) as tdir:
        for imt in imts:
            gdict = container.getIMTGrids(imt, component)
            fgrid = gdict['mean']
            if imt == 'MMI':
                contour_levels = np.arange(0.1, 10.2, 0.2)
                fname = 'mi'
            elif imt == 'PGV':
                fgrid = np.exp(fgrid)
                cont_max = np.ceil(np.max(fgrid)) + 2.0
                contour_levels = np.arange(1.0, cont_max, 2.0)
                if contour_levels.size == 0:
                    contour_levels = np.array([1.0])
                fname = 'pgv'
            else:
                fgrid = np.exp(fgrid)
                cont_max = (np.ceil(100 * np.max(fgrid)) + 2.0) / 100.0
                contour_levels = np.arange(0.01, cont_max, 0.02)
                if contour_levels.size == 0:
                    contour_levels = np.array([0.01])
                fname = oq_to_file(imt)
            a = {
                'fgrid': fgrid,
                'dx': gdict['mean_metadata']['dx'],
                'dy': gdict['mean_metadata']['dy'],
                'xmin': gdict['mean_metadata']['xmin'],
                'ymax': gdict['mean_metadata']['ymax'],
                'contour_levels': contour_levels,
                'tdir': tdir,
                'fname': fname,
                'schema': schema
            }
            alist.append(a)
            copyfile(os.path.join(smdata, 'WGS1984.prj'),
                     os.path.join(tdir, fname + '.prj'))
            lyrfile = os.path.join(smdata, fname + '.lyr')
            if not os.path.isfile(lyrfile):
                logger.warning("No " + fname + ".lyr file in " + smdata)
            else:
                copyfile(lyrfile, os.path.join(tdir, fname + '.lyr'))
            xmlfile = os.path.join(smdata, fname + '.shp.xml')
            if not os.path.isfile(xmlfile):
                logger.warning("No " + fname + ".shp.xml file in " + smdata)
            else:
                copyfile(xmlfile, os.path.join(tdir, fname + '.shp.xml'))

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_shape_files, alist)
                list(results)
        else:
            for adict in alist:
                make_shape_files(adict)

        zfilename = os.path.join(datadir, 'shape.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)
        filelist = []
        for (dirpath, dirnames, filenames) in os.walk(tdir):
            filelist.extend(filenames)
            break
        for sfile in filelist:
            zfile.write(os.path.join(tdir, sfile), sfile)
        zfile.close()
Beispiel #14
0
def contour_to_files(container, config, output_dir, logger):
    """
    Generate contours of all configured IMT values.

    Args:
      container (ShakeMapOutputContainer): ShakeMapOutputContainer with
          ShakeMap output data.
      config (dict): Product configuration information (from product.conf).
      output_dir (str): Path to directory where output files will be written.
      logger (logging.Logger): Python logging Logger instance.

    Raises:
        LookupError: When configured file format is not supported, or
            when configured IMT is not found in container.

    """
    jsonstr = container.getString('info.json')
    infojson = json.loads(jsonstr)
    event_info = {
        'event_id': infojson['input']['event_information']['event_id'],
        'longitude': infojson['input']['event_information']['longitude'],
        'latitude': infojson['input']['event_information']['latitude']
    }

    imtlist = config['products']['contours']['IMTS'].keys()

    file_format = config['products']['contours']['format']
    # open a file for writing
    if file_format not in FORMATS:
        raise LookupError('File format %s not supported for contours.' %
                          file_format)
    driver, extension = FORMATS[file_format]
    schema = {
        'geometry': 'MultiLineString',
        'properties': {
            'value': 'float',
            'units': 'str'
        }
    }
    crs = {
        'no_defs': True,
        'ellps': 'WGS84',
        'datum': 'WGS84',
        'proj': 'longlat'
    }

    for imtype in imtlist:
        fileimt = oq_to_file(imtype)
        try:
            components = container.getComponents(imtype)
        except LookupError as look_error:
            fmt = 'No IMT called %s in container %s. Skipping.'
            logger.warn(fmt % (imtype, container.getFileName()))
            continue
        imtype_spec = config['products']['contours']['IMTS'][imtype]
        filter_size = int(imtype_spec['filter_size'])
        for component in components:
            fname = '%s_%s.%s' % (fileimt, component, extension)
            filename = os.path.join(output_dir, fname)
            if os.path.isfile(filename):
                fpath, fext = os.path.splitext(filename)
                flist = glob.glob(fpath + '.*')
                for fname in flist:
                    os.remove(fname)

            # fiona spews a warning here when driver is geojson
            # this warning appears to be un-catchable using
            # with warnings.catch_warnings()
            # or
            # logging.captureWarning()
            # or
            # even redirecting stderr/stdout to IO streams
            # not sure where the warning is coming from,
            # but there appears to be no way to stop it...
            with fiona.drivers():
                vector_file = fiona.open(filename,
                                         'w',
                                         driver=driver,
                                         schema=schema,
                                         crs=crs)

                intervals = None
                if 'intervals' in imtype_spec:
                    intervals = [float(i) for i in imtype_spec['intervals']]

                line_strings = contour(container, imtype, component, intervals)
                for feature in line_strings:
                    vector_file.write(feature)

                logger.debug('Writing contour file %s' % filename)
                vector_file.close()
Beispiel #15
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        oc = ShakeMapOutputContainer.load(datafile)
        if oc.getDataType() != 'grid':
            raise NotImplementedError('plotregr module can only operate on '
                                      'gridded data not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # If mapping runs in parallel, then we want this module too, as well.
        # Otherwise we get weird errors from matplotlib
        max_workers = config['products']['mapping']['max_workers']

        #
        # Cheating here a bit by assuming that the IMTs are the same
        # as the regression IMTs
        #
        rockgrid = {}
        soilgrid = {}
        rocksd = {}
        soilsd = {}
        imtlist = oc.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            rockgrid[myimt], _ = oc.getArray(['attenuation', 'rock', myimt],
                                             'mean')
            soilgrid[myimt], _ = oc.getArray(['attenuation', 'soil', myimt],
                                             'mean')
            rocksd[myimt], _ = oc.getArray(['attenuation', 'rock', myimt],
                                           'std')
            soilsd[myimt], _ = oc.getArray(['attenuation', 'soil', myimt],
                                           'std')
        distances, _ = oc.getArray(['attenuation', 'distances'], 'rrup')

        stations = oc.getStationDict()

        #
        # Make plots
        #
        alist = []
        for myimt in imtlist:
            a = {
                'myimt': myimt,
                'rockgrid': rockgrid,
                'soilgrid': soilgrid,
                'rocksd': rocksd,
                'soilsd': soilsd,
                'stations': stations,
                'distances': distances,
                'eventid': self._eventid,
                'datadir': datadir
            }
            alist.append(a)
            if myimt == 'MMI':
                self.contents.addFile(
                    'miRegr', 'Intensity Regression',
                    'Regression plot of macroseismic '
                    'intensity.', 'mmi_regr.png', 'image/png')
            elif myimt == 'PGA':
                self.contents.addFile(
                    'pgaRegr', 'PGA Regression', 'Regression plot of peak '
                    'ground acceleration (%g).', 'pga_regr.png', 'image/png')
            elif myimt == 'PGV':
                self.contents.addFile(
                    'pgvRegr', 'PGV Regression',
                    'Regression plot of peak ground '
                    'velocity (cm/s).', 'pgv_regr.png', 'image/png')
            else:
                oqimt = imt.from_string(myimt)
                period = str(oqimt.period)
                filebase = oq_to_file(myimt)
                psacap = 'Regression plot of ' + period + ' sec 5% damped ' \
                         'pseudo-spectral acceleration(%g).'
                self.contents.addFile(filebase + 'Regr',
                                      'PSA ' + period + ' sec Regression',
                                      psacap, filebase + '_regr.png',
                                      'image/png')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_plots, alist)
                list(results)
        else:
            for adict in alist:
                make_plots(adict)

        #
        # Make attenuation_curves.json
        #
        jdict = {'eventid': self._eventid}
        jdict['gmpe'] = {}
        for site in ['soil', 'rock']:
            jdict['gmpe'][site] = {}
            for myimt in imtlist:
                jdict['gmpe'][site][myimt] = {}
                jdict['gmpe'][site][myimt]['mean'] = oc.getArray(
                    ['attenuation', site, myimt],
                    'mean')[0].round(decimals=5).tolist()
                jdict['gmpe'][site][myimt]['stddev'] = oc.getArray(
                    ['attenuation', site, myimt],
                    'std')[0].round(decimals=5).tolist()
        jdict['distances'] = {}
        for dtype in ['repi', 'rhypo', 'rjb', 'rrup']:
            jdict['distances'][dtype] = oc.getArray(
                ['attenuation', 'distances'],
                dtype)[0].round(decimals=5).tolist()
        jdict['mean_bias'] = {}
        info = oc.getMetadata()
        for myimt in imtlist:
            jdict['mean_bias'][myimt] = info['output']['ground_motions'][
                myimt]['bias']
        jstring = json.dumps(jdict, allow_nan=False)
        jfile = os.path.join(datadir, 'attenuation_curves.json')
        f = open(jfile, 'wt')
        f.write(jstring)
        f.close()
        oc.close()
        cap = "Nominal attenuation curves"
        self.contents.addFile('attenuationCurves', 'Attenuation Curves', cap,
                              'attenuation_curves.json', 'application/json')
Beispiel #16
0
def make_plots(adict):
    myimt = adict['myimt']
    eventid = adict['eventid']
    datadir = adict['datadir']
    rockgrid = adict['rockgrid']
    soilgrid = adict['soilgrid']
    rocksd = adict['rocksd']
    soilsd = adict['soilsd']
    stations = adict['stations']
    distances = adict['distances']

    plt.figure(figsize=(10, 10))

    plt.semilogx(distances, rockgrid[myimt], 'r', label='rock')
    plt.semilogx(distances, soilgrid[myimt], 'g', label='soil')
    plt.semilogx(distances,
                 rockgrid[myimt] + rocksd[myimt],
                 'r--',
                 label='rock +/- stddev')
    plt.semilogx(distances, rockgrid[myimt] - rocksd[myimt], 'r--')
    plt.semilogx(distances,
                 soilgrid[myimt] + soilsd[myimt],
                 'g--',
                 label='soil +/- stddev')
    plt.semilogx(distances, soilgrid[myimt] - soilsd[myimt], 'g--')

    for station in stations['features']:
        dist = station['properties']['distances']['rrup']
        if dist > distances[-1]:
            continue
        if station['properties']['station_type'] == 'seismic':
            symbol = '^'
            if myimt == 'MMI':
                value = station['properties']['intensity']
                if value != 'null':
                    plt.semilogx(dist, value, symbol + 'k', mfc='none')
            else:
                imtstr = myimt.lower()
                value = np.nan
                for chan in station['properties']['channels']:
                    if chan['name'].endswith('Z') or \
                       chan['name'].endswith('U'):
                        continue
                    for amp in chan['amplitudes']:
                        if amp['name'] != imtstr:
                            continue
                        if amp['flag'] != '' and amp['flag'] != '0':
                            break
                        if amp['value'] is None or \
                                amp['value'] == 'null':
                            break
                        if isinstance(amp['value'], str):
                            thisamp = float(amp['value'])
                        else:
                            thisamp = amp['value']
                        if thisamp <= 0:
                            break
                        if myimt == 'PGV':
                            tmpval = np.log(thisamp)
                        else:
                            tmpval = np.log(thisamp / 100.)
                        if np.isnan(value) or tmpval > value:
                            value = tmpval
                        break
                if not np.isnan(value):
                    plt.semilogx(dist, value, symbol + 'k', mfc='none')
        else:
            symbol = 'o'
            if myimt == 'MMI':
                amp = station['properties']['intensity']
                flag = station['properties']['intensity_flag']
                if flag == '' or flag == '0':
                    if amp is not None and amp != 'null':
                        if isinstance(amp, str):
                            value = float(amp)
                        else:
                            value = amp
                        plt.semilogx(dist, value, symbol + 'k', mfc='none')
            else:
                imtstr = myimt.lower()
                for thing in station['properties']['pgm_from_mmi']:
                    if thing['name'] != imtstr:
                        continue
                    amp = thing['value']
                    if amp is not None and amp != 'null' and amp != 0:
                        if myimt == 'PGV':
                            amp = np.log(amp)
                        else:
                            amp = np.log(amp / 100.)
                        plt.semilogx(dist, amp, symbol + 'k', mfc='none')
                    break

    plt.title(eventid + ': ' + myimt + ' mean')
    plt.xlabel('Rrup (km)')
    if myimt == 'MMI':
        plt.ylabel('MMI')
    elif myimt == 'PGV':
        plt.ylabel('PGV ln(cm/s)')
    else:
        plt.ylabel(myimt + ' ln(g)')
    plt.legend()

    fileimt = oq_to_file(myimt)
    pfile = os.path.join(datadir, fileimt + '_regr.png')
    plt.savefig(pfile)
    plt.close()
Beispiel #17
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('uncertaintymaps module can only '
                                      'operate on gridded data, not sets of '
                                      'points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Uncertainty mapping...')

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the uncertainty mapping functions
        layers = config['products']['mapping']['layers']
        if 'countries' in layers and layers['countries'] != '':
            countries_file = layers['countries']
        else:
            countries_file = None
        if 'states_provs' in layers and layers['states_provs'] != '':
            states_provs_file = layers['states_provs']
        else:
            states_provs_file = None
        if 'oceans' in layers and layers['oceans'] != '':
            oceans_file = layers['oceans']
        else:
            oceans_file = None
        if 'lakes' in layers and layers['lakes'] != '':
            lakes_file = layers['lakes']
        else:
            lakes_file = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
        topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        faults = None
        roads = None
        if states_provs_file is not None:
            states_provs = ShapelyFeature(
                Reader(states_provs_file).geometries(),
                ccrs.PlateCarree(),
                facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            # The feature constructor doesn't necessarily download the
            # data, but we want it to so that multiple threads don't
            # try to do it at once when they actually access the data.
            # So below we just call the geometries() method to trigger
            # the download if necessary.
            _ = states_provs.geometries()

        if countries_file is not None:
            countries = ShapelyFeature(Reader(countries_file).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')
            _ = countries.geometries()

        if oceans_file is not None:
            oceans = ShapelyFeature(Reader(oceans_file).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)
            _ = oceans.geometries()

        if lakes_file is not None:
            lakes = ShapelyFeature(Reader(lakes_file).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)
            _ = lakes.geometries()

        alist = []
        llogo = config['products']['mapping'].get('license_logo') or None
        ltext = config['products']['mapping'].get('license_text') or None
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'roadcolor': layers['roadcolor'],
                'roadwidth': layers['roadwidth'],
                'faults': faults,
                'faultcolor': layers['faultcolor'],
                'faultwidth': layers['faultwidth'],
                'datadir': datadir,
                'operator': operator,
                'filter_size': 0,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict,
                'display_magnitude': self.display_magnitude,
                'pdf_dpi': config['products']['mapping']['pdf_dpi'],
                'img_dpi': config['products']['mapping']['img_dpi'],
                'license_logo': llogo,
                'license_text': ltext,
            }
            alist.append(d)

            #
            # Populate the contents.xml
            #
            for key in ('std', 'phi', 'tau'):
                if key not in d['imtdict'] or d['imtdict'][key] is None:
                    continue

                if key == 'std':
                    ext = '_sigma'
                    utype = ' Total'
                elif key == 'phi':
                    ext = '_phi'
                    utype = ' Within-event'
                else:
                    ext = '_tau'
                    utype = ' Between-event'

                if imtype == 'MMI':
                    fileimt = 'intensity'
                else:
                    fileimt = oq_to_file(imtype)

                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.jpg', 'image/jpeg')
                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
Beispiel #18
0
def create_polygons(container,
                    datadir,
                    logger,
                    max_workers,
                    method='pcontour'):
    """ Generates a set of closed polygons (with or without holes) using the
    specified method (either pcontour or skimage), and uses fiona to convert
    the resulting GeoJSON objects into ESRI-style shape files which are then
    zipped into an archive along with .prj, .lyr, and metadata .xml files. A
    warning will be emitted if .lyr, or .xml files cannot be found for the
    ground motion parameter in question.

    Args:
        container (ShakeMapOutputContainer): An open ShakeMap output
            container object.
        datadir (str): The products directory for the event in question.
        logger (logger): This module's logger object.
        method (str): Contouring implementation to use (either 'pcontour' or
            'skimage')

    Returns:
        (nothing): Nothing.
    """

    # gmice info for shakelib.plotting.contour
    config = container.getConfig()
    gmice = get_object_from_config('gmice', 'modeling', config)
    gmice_imts = gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES
    gmice_pers = gmice.DEFINED_FOR_SA_PERIODS

    component = list(container.getComponents())[0]
    imts = container.getIMTs(component)

    if method == 'pcontour':
        schema = {
            'properties':
            OrderedDict([('AREA', 'float:13.3'), ('PERIMETER', 'float:14.3'),
                         ('PGAPOL_', 'int:12'), ('PGAPOL_ID', 'int:12'),
                         ('GRID_CODE', 'int:12'),
                         ('PARAMVALUE', 'float:14.4')]),
            'geometry':
            'Polygon'
        }
    elif method == 'skimage':
        schema = {
            'properties':
            OrderedDict([('value', 'float:2.1'), ('units', 'str'),
                         ('color', 'str'), ('weight', 'float:13.3')]),
            'geometry':
            'MultiLineString'
        }
    else:
        raise ValueError('Unknown contouring method {}'.format(method))

    smdata = os.path.join(get_data_path(), 'gis')
    # Make a directory for the files to live in prior to being zipped
    alist = []
    with tempfile.TemporaryDirectory(dir=datadir) as tdir:
        for imt in imts:
            gdict = container.getIMTGrids(imt, component)
            fgrid = gdict['mean']
            if imt == 'MMI':
                fname = 'mi'
            elif imt == 'PGV':
                fname = 'pgv'
            else:
                fname = oq_to_file(imt)

            if method == 'pcontour':
                my_gmice = None
                if imt == 'MMI':
                    contour_levels = np.arange(0.1, 10.2, 0.2)
                elif imt == 'PGV':
                    fgrid = np.exp(fgrid)
                    cont_max = np.ceil(np.max(fgrid)) + 2.0
                    contour_levels = np.arange(1.0, cont_max, 2.0)
                    if contour_levels.size == 0:
                        contour_levels = np.array([1.0])
                else:
                    fgrid = np.exp(fgrid)
                    cont_max = (np.ceil(100 * np.max(fgrid)) + 2.0) / 100.0
                    contour_levels = np.arange(0.01, cont_max, 0.02)
                    if contour_levels.size == 0:
                        contour_levels = np.array([0.01])
            else:
                # skimage method chooses its own levels
                contour_levels = None
                # but wants gmice info
                oqimt = OQIMT.from_string(imt)
                if imt == 'MMI' or not isinstance(oqimt, tuple(gmice_imts)) or \
                   (isinstance(oqimt, OQIMT.SA) and oqimt.period not in gmice_pers):
                    my_gmice = None
                else:
                    my_gmice = gmice
            a = {
                'fgrid': fgrid,
                'dx': gdict['mean_metadata']['dx'],
                'dy': gdict['mean_metadata']['dy'],
                'xmin': gdict['mean_metadata']['xmin'],
                'ymax': gdict['mean_metadata']['ymax'],
                'contour_levels': contour_levels,
                'tdir': tdir,
                'fname': fname,
                'schema': schema,
                'imt': imt,
                'gmice': my_gmice,
                'gdict': gdict
            }
            alist.append(a)
            copyfile(os.path.join(smdata, 'WGS1984.prj'),
                     os.path.join(tdir, fname + '.prj'))
            lyrfile = os.path.join(smdata, fname + '.lyr')
            if not os.path.isfile(lyrfile):
                logger.warning("No " + fname + ".lyr file in " + smdata)
            else:
                copyfile(lyrfile, os.path.join(tdir, fname + '.lyr'))
            xmlfile = os.path.join(smdata, fname + '.shp.xml')
            if not os.path.isfile(xmlfile):
                logger.warning("No " + fname + ".shp.xml file in " + smdata)
            else:
                copyfile(xmlfile, os.path.join(tdir, fname + '.shp.xml'))

        worker = partial(make_shape_files, method=method)

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(worker, alist)
                list(results)
        else:
            for adict in alist:
                worker(adict)

        zfilename = os.path.join(datadir, 'shape.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)
        filelist = []
        for (dirpath, dirnames, filenames) in os.walk(tdir):
            filelist.extend(filenames)
            break
        for sfile in filelist:
            zfile.write(os.path.join(tdir, sfile), sfile)
        zfile.close()
Beispiel #19
0
    def execute(self):
        """Create high, medium, and low resolution coverage of the mapped
        parameters.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('coverage module can only function on '
                                      'gridded data, not sets of points')

        imtlist = container.getIMTs()
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            fileimt = oq_to_file(imtype)
            oqimt = imt.from_string(imtype)

            imtdict = container.getIMTGrids(imtype, component)
            grid_data = imtdict['mean']
            metadata = imtdict['mean_metadata']

            if imtype == 'MMI':
                description = 'Modified Mercalli Intensity',
                property_id = "https://earthquake.usgs.gov/learn/topics/mercalli.php",  # noqa
                decimals = 1
            elif imtype == 'PGA':
                description = 'Peak Ground Acceleration',
                units = 'natural logarithm of "g"'
                symbol = 'ln(g)'
                decimals = 2
            elif imtype == 'PGV':
                description = 'Peak Ground Velocity',
                units = 'natural logarithm of centimeters per second'
                symbol = 'ln(cm/s)'
                decimals = 2
            elif imtype.startswith('SA'):
                description = str(oqimt.period) + \
                    '-second Spectral Acceleration',
                units = 'natural logarithm of "g"'
                symbol = 'ln(g)'
                decimals = 2
            else:
                raise TypeError("Unknown IMT in coverage module")

            for i in range(3):
                if i == 0:
                    resolution = 'high'
                    fgrid = grid_data
                    decimation = 1
                elif i == 1:
                    resolution = 'medium'
                    fgrid = gaussian_filter(grid_data, sigma=1)
                    decimation = 2
                elif i == 2:
                    resolution = 'low'
                    fgrid = gaussian_filter(grid_data, sigma=2)
                    decimation = 4

                rgrid = fgrid[::decimation, ::decimation]
                ny, nx = rgrid.shape
                rnd_grd = np.flipud(np.around(rgrid,
                                              decimals=decimals)).flatten()
                if imtype == 'MMI':
                    rnd_grd = np.clip(rnd_grd, 1.0, 10.0)
                xstart = metadata["xmin"]
                xstop = metadata["xmin"] + \
                    (nx - 1) * decimation * metadata["dx"]
                ystart = metadata["ymin"]
                ystop = metadata["ymin"] + \
                    (ny - 1) * decimation * metadata["dy"]

                coverage = {
                    "type": "Coverage",
                    "domain": {
                        "type":
                        "Domain",
                        "domainType":
                        "Grid",
                        "axes": {
                            "x": {
                                "start": xstart,
                                "stop": xstop,
                                "num": nx
                            },
                            "y": {
                                "start": ystart,
                                "stop": ystop,
                                "num": ny
                            }
                        },
                        "referencing": [{
                            "coordinates": ["x", "y"],
                            "system": {
                                "type":
                                "GeographicCRS",
                                "id":
                                "http://www.opengis.net/def/crs/OGC/1.3/CRS84"  # noqa
                            }
                        }]
                    },
                    "parameters": {
                        imtype: {
                            "type": "Parameter",
                            "description": {
                                "en": description
                            },
                            "observedProperty": {
                                "id": property_id,
                                "label": {
                                    "en": imtype
                                }
                            },
                        }
                    },
                    "ranges": {
                        imtype: {
                            "type": "NdArray",
                            "dataType": "float",
                            "axisNames": ["y", "x"],
                            "shape": [ny, nx],
                            "values": rnd_grd.tolist()
                        }
                    }
                }
                if imtype == 'MMI':
                    coverage["parameters"]["MMI"]["preferredPalette"] = {
                        "colors": [
                            "rgb(255, 255, 255)", "rgb(255, 255, 255)",
                            "rgb(191, 204, 255)", "rgb(160, 230, 255)",
                            "rgb(128, 255, 255)", "rgb(122, 255, 147)",
                            "rgb(255, 255, 0)", "rgb(255, 200, 0)",
                            "rgb(255, 145, 0)", "rgb(255, 0, 0)",
                            "rgb(200, 0, 0)"
                        ],
                        "extent": [0, 10],
                        "interpolation":
                        "linear"
                    }
                else:
                    coverage["parameters"][imtype]["unit"] = {
                        'label': {
                            "en": units
                        },
                        "symbol": {
                            'value': symbol,
                            'type': "http://www.opengis.net/def/uom/UCUM/"
                        }
                    }

                if component == 'GREATER_OF_TWO_HORIZONTAL':
                    fname = 'coverage_%s_%s_res.covjson' % (fileimt,
                                                            resolution)
                else:
                    fname = 'coverage_%s_%s_%s_res.covjson' % (
                        fileimt, resolution, component)
                filepath = os.path.join(datadir, fname)
                with open(filepath, 'w') as outfile:
                    json.dump(coverage, outfile, separators=(',', ':'))
                self.contents.addFile(
                    imtype + "_" + resolution + '_res_coverage',
                    resolution + '-res ' + imtype.upper() + ' Coverage',
                    'Coverage of ' + resolution + ' resolution ' + imtype,
                    fname, 'application/json')
        container.close()
Beispiel #20
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        ic = ShakeMapOutputContainer.load(datafile)
        if ic.getDataType() != 'grid':
            raise NotImplementedError('plotregr module can only operate on '
                                      'gridded data not sets of points')

        #
        # Cheating here a bit by assuming that the IMTs are the same
        # as the regression IMTs
        #
        rockgrid = {}
        soilgrid = {}
        rocksd = {}
        soilsd = {}
        imtlist = ic.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            rockgrid[myimt], _ = ic.getArray('regression_' + myimt +
                                             '_rock_mean')
            soilgrid[myimt], _ = ic.getArray('regression_' + myimt +
                                             '_soil_mean')
            rocksd[myimt], _ = ic.getArray('regression_' + myimt + '_rock_sd')
            soilsd[myimt], _ = ic.getArray('regression_' + myimt + '_soil_sd')
        distances, _ = ic.getArray('regression_distances')

        stations = ic.getStationDict()

        #
        # Make plots
        #
        for myimt in imtlist:
            plt.figure(figsize=(10, 10))

            plt.semilogx(distances, rockgrid[myimt], 'r', label='rock')
            plt.semilogx(distances, soilgrid[myimt], 'g', label='soil')
            plt.semilogx(distances,
                         rockgrid[myimt] + rocksd[myimt],
                         'r--',
                         label='rock +/- stddev')
            plt.semilogx(distances, rockgrid[myimt] - rocksd[myimt], 'r--')
            plt.semilogx(distances,
                         soilgrid[myimt] + soilsd[myimt],
                         'g--',
                         label='soil +/- stddev')
            plt.semilogx(distances, soilgrid[myimt] - soilsd[myimt], 'g--')

            for station in stations['features']:
                dist = station['properties']['distance']
                if dist > distances[-1]:
                    continue
                if station['properties']['station_type'] == 'seismic':
                    symbol = '^'
                    if myimt == 'MMI':
                        value = station['properties']['intensity']
                        if value != 'null':
                            plt.semilogx(dist, value, symbol + 'k', mfc='none')
                    else:
                        imtstr = myimt.lower()
                        value = np.nan
                        for chan in station['properties']['channels']:
                            if chan['name'].endswith('Z') or \
                               chan['name'].endswith('U'):
                                continue
                            for amp in chan['amplitudes']:
                                if amp['name'] != imtstr:
                                    continue
                                if amp['flag'] != '' and amp['flag'] != '0':
                                    break
                                if amp['value'] == 'null':
                                    break
                                if isinstance(amp['value'], str):
                                    thisamp = float(amp['value'])
                                else:
                                    thisamp = amp['value']
                                if thisamp <= 0:
                                    break
                                if myimt == 'PGV' or myimt == 'IA' or myimt == 'PGD' or myimt == 'IH':
                                    tmpval = np.log(thisamp)
                                else:
                                    tmpval = np.log(thisamp / 100.)
                                if np.isnan(value) or tmpval > value:
                                    value = tmpval
                                break
                        if not np.isnan(value):
                            plt.semilogx(dist, value, symbol + 'k', mfc='none')
                else:
                    symbol = 'o'
                    if myimt == 'MMI':
                        amp = station['properties']['channels'][0][
                            'amplitudes'][0]
                        if amp['flag'] == '' or amp['flag'] == '0':
                            if amp['value'] != 'null':
                                if isinstance(amp['value'], str):
                                    value = float(amp['value'])
                                else:
                                    value = amp['value']
                                plt.semilogx(dist,
                                             value,
                                             symbol + 'k',
                                             mfc='none')
                    else:
                        imtstr = myimt.lower()
                        if imtstr in station['properties']['pgm_from_mmi']:
                            amp = station['properties']['pgm_from_mmi'][
                                imtstr]['value']
                            if amp != 'null' and amp != 0:
                                if myimt == 'PGV' or myimt == 'IA' or myimt == 'PGD' or myimt == 'IH':
                                    amp = np.log(amp)
                                else:
                                    amp = np.log(amp / 100.)
                                plt.semilogx(dist,
                                             amp,
                                             symbol + 'k',
                                             mfc='none')

            plt.title(self._eventid + ': ' + myimt + ' mean')
            plt.xlabel('Rrup (km)')
            if myimt == 'MMI':
                plt.ylabel('MMI')
            elif myimt == 'PGV':
                plt.ylabel('PGV ln(cm/s)')
            elif myimt == 'IA':
                plt.ylabel('IA ln(cm/s)')
            elif myimt == 'PGD':
                plt.ylabel('PGD ln(cm)')
            elif myimt == 'IH':
                plt.ylabel('IH ln(cm)')
            else:
                plt.ylabel(myimt + ' ln(g)')
            plt.legend()

            fileimt = oq_to_file(myimt)
            pfile = os.path.join(datadir, fileimt + '_regr.png')
            plt.savefig(pfile)
            plt.close()
Beispiel #21
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('xtestimage module can only operate on '
                                      'gridded data not sets of points')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTGrids(
                myimt, 'GREATER_OF_TWO_HORIZONTAL')

        container.close()

        #
        # Make plots
        #
        for myimt in imtlist:
            if myimt == 'MMI':
                yunits = '(MMI)'
            elif myimt == 'PGV':
                yunits = '[ln(cm/s)]'
            else:
                yunits = '[ln(g)]'

            fileimt = oq_to_file(myimt)

            #
            # Do the ground motion plots
            #
            data = datadict[myimt]
            grddata = data['mean']
            metadata = data['mean_metadata']

            fig = plt.figure(figsize=(10, 10))
            gs = plt.GridSpec(4, 4, hspace=0.2, wspace=0.1)
            ax0 = fig.add_subplot(gs[:-1, 1:])
            plt.title(self._eventid + ': ' + myimt + ' mean')
            im1 = ax0.imshow(grddata,
                             extent=(metadata['xmin'], metadata['xmax'],
                                     metadata['ymin'], metadata['ymax']))
            cbax = fig.add_axes([0.915, .34, .02, .5])
            plt.colorbar(im1, ax=ax0, cax=cbax)
            ycut = fig.add_subplot(gs[:-1, 0], sharey=ax0)
            xcut = fig.add_subplot(gs[-1, 1:], sharex=ax0)
            rows, cols = grddata.shape
            midrow = int(rows / 2)
            midcol = int(cols / 2)
            xvals = np.linspace(metadata['xmin'], metadata['xmax'], cols)
            yvals = np.linspace(metadata['ymin'], metadata['ymax'], rows)
            ycut.plot(grddata[:, midcol], yvals)
            xcut.plot(xvals, grddata[midrow, :])
            ycut.set(xlabel=myimt + ' ' + yunits, ylabel='Latitude')
            xcut.set(xlabel='Longitude', ylabel=myimt + ' ' + yunits)
            ycut.set_ylim((metadata['ymin'], metadata['ymax']))
            xcut.set_xlim((metadata['xmin'], metadata['xmax']))
            ax0.label_outer()

            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile, bbox_inches='tight')
            plt.close()

            #
            # Do the stddev plots
            #
            grddata = data['std']

            fig = plt.figure(figsize=(10, 10))
            gs = plt.GridSpec(4, 4, hspace=0.2, wspace=0.1)
            ax0 = fig.add_subplot(gs[:-1, 1:])
            plt.title(self._eventid + ': ' + myimt + ' stddev')
            im1 = ax0.imshow(grddata,
                             extent=(metadata['xmin'], metadata['xmax'],
                                     metadata['ymin'], metadata['ymax']))
            cbax = fig.add_axes([0.915, .34, .02, .5])
            plt.colorbar(im1, ax=ax0, cax=cbax)
            ycut = fig.add_subplot(gs[:-1, 0], sharey=ax0)
            xcut = fig.add_subplot(gs[-1, 1:], sharex=ax0)
            rows, cols = grddata.shape
            midrow = int(rows / 2)
            midcol = int(cols / 2)
            xvals = np.linspace(metadata['xmin'], metadata['xmax'], cols)
            yvals = np.linspace(metadata['ymin'], metadata['ymax'], rows)
            ycut.plot(grddata[:, midcol], yvals)
            xcut.plot(xvals, grddata[midrow, :])
            ycut.set(xlabel='stddev ' + yunits, ylabel='Latitude')
            xcut.set(xlabel='Longitude', ylabel='stddev ' + yunits)
            xcut.set_xlim((metadata['xmin'], metadata['xmax']))
            xcut.set_ylim(bottom=0, top=np.max(grddata[midrow, :]) * 1.1)
            ycut.set_xlim(left=0, right=np.max(grddata[:, midcol] * 1.1))
            ycut.set_ylim((metadata['ymin'], metadata['ymax']))
            ax0.label_outer()

            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '_sd.pdf')
            plt.savefig(pfile, bbox_inches='tight')
            plt.close()
Beispiel #22
0
    def drawContourMap(self, imt, outfolder, cmin=None, cmax=None):
        """
        Render IMT data as contours over topography, with oceans, coastlines,
        etc.

        Args:
            outfolder (str): Path to directory where output map should be
                saved.

        Returns:
            str: Path to output IMT map.
        """
        if self.contour_colormap is None:
            raise Exception('MapMaker.setGMTColormap() has not been called.')
        t0 = time.time()
        # resample shakemap to topogrid
        # get the geodict for the topo file
        topodict = GMTGrid.getFileGeoDict(self.topofile)[0]
        # get the geodict for the ShakeMap
        comp = self.container.getComponents(imt)[0]
        imtdict = self.container.getIMTGrids(imt, comp)
        imtgrid = imtdict['mean']
        smdict = imtgrid.getGeoDict()
        # get a geodict that is aligned with topo, but inside shakemap
        sampledict = topodict.getBoundsWithin(smdict)

        imtgrid = imtgrid.interpolateToGrid(sampledict)

        gd = imtgrid.getGeoDict()

        # establish the basemap object
        m = self._setMap(gd)

        # get topo layer and project it
        topogrid = GMTGrid.load(
            self.topofile, samplegeodict=sampledict, resample=False)
        topodata = topogrid.getData().copy()
        ptopo = self._projectGrid(topodata, m, gd)

        # get contour layer and project it1
        imtdata = imtgrid.getData().copy()

        # convert units if necessary
        if imt == 'MMI':
            pass
        elif imt == 'PGV':
            imtdata = np.exp(imtdata)
        else:
            imtdata = np.exp(imtdata) * 100

        pimt = self._projectGrid(imtdata, m, gd)

        # get the draped intensity data
        hillshade = self._getShaded(ptopo)

        # draw the draped intensity data
        m.imshow(hillshade, interpolation='none', zorder=IMG_ZORDER)

        # draw the contours of imt data
        xmin = gd.xmin
        if gd.xmax < gd.xmin:
            xmin -= 360
        lons = np.linspace(xmin, gd.xmax, gd.nx)
        # backwards so it plots right side up
        lats = np.linspace(gd.ymax, gd.ymin, gd.ny)
        x, y = m(*np.meshgrid(lons, lats))
        pimt = gaussian_filter(pimt, 5.0)
        dmin = pimt.min()
        dmax = pimt.max()
        levels = self.getContourLevels(dmin, dmax, imt)
        cs = m.contour(x, y, np.flipud(pimt), colors='w',
                       cmap=None, levels=levels, zorder=CONTOUR_ZORDER)
        clabels = plt.clabel(cs, colors='k', fmt='%.1f',
                             fontsize=8.0, zorder=CONTOUR_ZORDER)
        for cl in clabels:
            bbox = dict(boxstyle="round", facecolor='white', edgecolor='w')
            cl.set_bbox(bbox)
            cl.set_zorder(CONTOUR_ZORDER)

        # draw country/state boundaries
        self._drawBoundaries(m)

        # draw lakes
        self._drawLakes(m, gd)

        # draw oceans (pre-processed with islands taken out)
        t1 = time.time()
        self._drawOceans(m, gd)
        t2 = time.time()
        self.logger.debug('%.1f seconds to render oceans.' % (t2 - t1))

        # draw coastlines
        self._drawCoastlines(m, gd)

        # draw meridians, parallels, labels, ticks
        self._drawGraticules(m, gd)

        # draw filled symbols for MMI and instrumented measures
        self._drawStations(m, fill=True, imt=imt)

        # draw map scale
        self._drawMapScale(m, gd)

        # draw fault polygon, if present
        self._drawFault(m)  # get the fault loaded

        # draw epicenter
        origin = self.fault.getOrigin()
        hlon = origin.lon
        hlat = origin.lat
        m.plot(hlon, hlat, 'k*', latlon=True, fillstyle='none',
               markersize=22, mew=1.2, zorder=EPICENTER_ZORDER)

        # draw cities
        # reduce the number of cities to those whose labels don't collide
        # set up cities
        if self.city_cols is not None:
            self.cities = self.cities.limitByBounds(
                (gd.xmin, gd.xmax, gd.ymin, gd.ymax))
            self.cities = self.cities.limitByGrid(
                nx=self.city_cols, ny=self.city_rows,
                cities_per_grid=self.cities_per_grid)
            if 'Times New Roman' in self.cities._fontlist:
                font = 'Times New Roman'
            else:
                font = 'DejaVu Sans'
            self.cities = self.cities.limitByMapCollision(m, fontname=font)
        self.cities.renderToMap(m.ax, zorder=CITIES_ZORDER)

        # draw title and supertitle
        self._drawTitle(imt)

        # save plot to file
        fileimt = oq_to_file(imt)
        plt.draw()
        outfile = os.path.join(outfolder, 'contour_%s.pdf' %
                               (fileimt))
        plt.savefig(outfile)
        tn = time.time()
        self.logger.debug('%.1f seconds to render entire map.' % (tn - t0))
        return outfile
Beispiel #23
0
def contour_to_files(container,
                     output_dir,
                     logger,
                     filter_size=DEFAULT_FILTER_SIZE):
    """
    Generate contours of all IMT values.

    Args:
      container (ShakeMapOutputContainer): ShakeMapOutputContainer with
          ShakeMap output data.
      output_dir (str): Path to directory where output files will be written.
      logger (logging.Logger): Python logging Logger instance.

    Raises:
        LookupError: When configured file format is not supported
    """

    # Right now geojson is all we support; if that changes, we'll have
    # to add a configuration or command-line option
    file_format = 'geojson'
    # open a file for writing
    driver, extension = FORMATS[file_format]
    sa_schema = {
        'geometry': 'MultiLineString',
        'properties': {
            'value': 'float',
            'units': 'str',
            'color': 'str',
            'weight': 'int'
        }
    }
    mmi_schema = {
        'geometry': 'MultiLineString',
        'properties': {
            'value': 'float',
            'units': 'str',
            'color': 'str',
            'weight': 'int'
        }
    }
    crs = {
        'no_defs': True,
        'ellps': 'WGS84',
        'datum': 'WGS84',
        'proj': 'longlat'
    }

    config = container.getConfig()
    gmice = get_object_from_config('gmice', 'modeling', config)
    gmice_imts = gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES
    gmice_pers = gmice.DEFINED_FOR_SA_PERIODS

    imtlist = container.getIMTs()
    for imtype in imtlist:
        component, imtype = imtype.split('/')
        fileimt = oq_to_file(imtype)
        oqimt = imt.from_string(imtype)
        if component == 'GREATER_OF_TWO_HORIZONTAL':
            fname = 'cont_%s.%s' % (fileimt, extension)
        else:
            fname = 'cont_%s_%s.%s' % (fileimt, component, extension)
        filename = os.path.join(output_dir, fname)
        if os.path.isfile(filename):
            fpath, fext = os.path.splitext(filename)
            flist = glob.glob(fpath + '.*')
            for fname in flist:
                os.remove(fname)

        if imtype == 'MMI' or not isinstance(oqimt, tuple(gmice_imts)) or \
           (isinstance(oqimt, imt.SA) and oqimt.period not in gmice_pers):
            my_gmice = None
        else:
            my_gmice = gmice

        # fiona spews a warning here when driver is geojson
        # this warning appears to be un-catchable using
        # with warnings.catch_warnings()
        # or
        # logging.captureWarning()
        # or
        # even redirecting stderr/stdout to IO streams
        # not sure where the warning is coming from,
        # but there appears to be no way to stop it...
        with fiona.drivers():
            if imtype == 'MMI':
                selected_schema = mmi_schema
            else:
                selected_schema = sa_schema
            vector_file = fiona.open(filename,
                                     'w',
                                     driver=driver,
                                     schema=selected_schema,
                                     crs=crs)

            line_strings = contour(container, imtype, component, filter_size,
                                   my_gmice)

            for feature in line_strings:
                vector_file.write(feature)

                # Grab some metadata
            meta = container.getMetadata()
            event_info = meta['input']['event_information']
            mdict = {
                'eventid': event_info['event_id'],
                'longitude': float(event_info['longitude']),
                'latitude': float(event_info['latitude'])
            }

            logger.debug('Writing contour file %s' % filename)
            vector_file.close()

            # Get bounds
            tmp = fiona.open(filename)
            bounds = tmp.bounds
            tmp.close()

            # Read back in to add metadata/bounds
            data = json.load(open(filename))
            data['metadata'] = mdict
            data['bbox'] = bounds
            with open(filename, 'w') as outfile:
                json.dump(data, outfile)

            #####################################
            # Make an extra version of the MMI contour file
            # so that the current web rendering code can find it.
            # Delete this file once everyone has moved to new version
            # of ComCat code.

            if imtype == 'MMI':
                old_file = os.path.join(output_dir, 'cont_mi.json')
                shutil.copy(filename, old_file)
Beispiel #24
0
    def execute(self):
        """
        Write raster.zip file containing ESRI Raster files of all the IMTs
        in shake_result.hdf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('raster module can only operate on '
                                      'gridded data, not sets of points')

        # create GIS-readable .flt files of imt and uncertainty
        self.logger.debug('Creating GIS grids...')
        layers = container.getIMTs()

        # Package up all of these files into one zip file.
        zfilename = os.path.join(datadir, 'raster.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)

        files_written = []
        for layer in layers:
            _, layer = layer.split('/')
            fileimt = oq_to_file(layer)
            # This is a bit hacky -- we only produce the raster for the
            # first IMC returned. It should work as long as we only have
            # one IMC produced per ShakeMap run.
            imclist = container.getComponents(layer)
            imtdict = container.getIMTGrids(layer, imclist[0])
            mean_grid = Grid2D(imtdict['mean'],
                               GeoDict(imtdict['mean_metadata']))
            std_grid = Grid2D(imtdict['std'], GeoDict(imtdict['std_metadata']))
            mean_gdal = GDALGrid.copyFromGrid(mean_grid)
            std_gdal = GDALGrid.copyFromGrid(std_grid)
            mean_fname = os.path.join(datadir, '%s_mean.flt' % fileimt)
            mean_hdr = os.path.join(datadir, '%s_mean.hdr' % fileimt)
            std_fname = os.path.join(datadir, '%s_std.flt' % fileimt)
            std_hdr = os.path.join(datadir, '%s_std.hdr' % fileimt)
            self.logger.debug('Saving %s...' % mean_fname)
            mean_gdal.save(mean_fname)
            files_written.append(mean_fname)
            files_written.append(mean_hdr)
            self.logger.debug('Saving %s...' % std_fname)
            std_gdal.save(std_fname)
            files_written.append(std_fname)
            files_written.append(std_hdr)
            zfile.write(mean_fname, '%s_mean.flt' % fileimt)
            zfile.write(mean_hdr, '%s_mean.hdr' % fileimt)
            zfile.write(std_fname, '%s_std.flt' % fileimt)
            zfile.write(std_hdr, '%s_std.hdr' % fileimt)

        zfile.close()

        # nuke all of the copies of the files we just put in the zipfile
        for file_written in files_written:
            os.remove(file_written)

        # make a transparent PNG of intensity and a world file
        imclist = container.getComponents('MMI')
        mmidict = container.getIMTGrids('MMI', imclist[0])
        mmi_array = mmidict['mean']
        geodict = GeoDict(mmidict['mean_metadata'])
        palette = ColorPalette.fromPreset('mmi')
        mmi_rgb = palette.getDataColor(mmi_array, color_format='array')
        img = Image.fromarray(mmi_rgb)
        pngfile = os.path.join(datadir, 'intensity_overlay.png')
        img.save(pngfile, "PNG")

        # write out a world file
        # https://en.wikipedia.org/wiki/World_file
        worldfile = os.path.join(datadir, 'intensity_overlay.pngw')
        with open(worldfile, 'wt') as f:
            f.write('%.4f\n' % geodict.dx)
            f.write('0.0\n')
            f.write('0.0\n')
            f.write('-%.4f\n' % geodict.dy)
            f.write('%.4f\n' % geodict.xmin)
            f.write('%.4f\n' % geodict.ymax)
        container.close()