Esempio n. 1
0
    def execute(self):
        """
        Output the version history of an event.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        backups = glob.glob(os.path.join(data_path, self._eventid, 'backup*'))
        backups.sort(reverse=True)
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        # First try the current results file...
        datafile = os.path.join(datadir, 'products', 'shake_result.hdf')
        if os.path.isfile(datafile):
            # Open the ShakeMapOutputContainer and extract the data
            container = ShakeMapOutputContainer.load(datafile)
            try:
                metadata = container.getMetadata()
            except LookupError:
                print("\nNo version history available for this event.\n")
                return
            history = (metadata['processing']['shakemap_versions']
                       ['map_data_history'])
            final = False
            if len(backups) > 0:
                last_ver = int(backups[0][-4:])
                last_hist = history[-1][2]
                if last_ver == last_hist:
                    final = True
            print_history(history, final=final)
            return

        # Nope. Are there any backup files?
        if len(backups) == 0:
            print("\nNo version history available for this event.\n")
            return

        # There should be a results file in the backup directory...
        datafile = os.path.join(data_path, self._eventid, backups[0],
                                'products', 'shake_result.hdf')
        if os.path.isfile(datafile):
            # Open the ShakeMapOutputContainer and extract the data
            container = ShakeMapOutputContainer.load(datafile)
            try:
                metadata = container.getMetadata()
            except LookupError:
                print("\nNo version history available for this event.\n")
                return
            history = (metadata['processing']['shakemap_versions']
                       ['map_data_history'])
            print_history(history, final=True)
            return

        print("\nNo version history available for this event.\n")
        return
Esempio n. 2
0
    def execute(self):
        """
        Write rupture.json file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # create ShakeMap rupture file
        for fformat in ALLOWED_FORMATS:
            if fformat == 'json':
                self.logger.info('Writing rupture.json file...')
                rupture_dict = container.getRuptureDict()
                rupture_file = os.path.join(datadir, 'rupture.json')
                f = open(rupture_file, 'w')
                json.dump(rupture_dict, f)
                f.close()

        container.close()

        self.contents.addFile('ruptureJSON', 'Fault Rupture',
                              'JSON Representation of Fault Rupture.',
                              'rupture.json', 'application/json')
Esempio n. 3
0
def dummy_test_transfer():
    homedir = os.path.dirname(os.path.abspath(__file__))
    cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                         'northridge', 'shake_result.hdf')
    products_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                                'northridge', 'current')
    pdl_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                           'northridge', 'current', 'pdl')
    eventid = 'ci3144585'

    container = ShakeMapOutputContainer.load(cfile)
    try:
        tdir = tempfile.mkdtemp()
        remote_dir = os.path.join(tdir, eventid)
        config = {'copy': {'local': {'remote_directory': tdir}}}
        # transfermod = TransferModule(eventid)
        _transfer(config, container.getMetadata(), pdl_dir, products_dir)
        nfiles = len(os.listdir(remote_dir))
        nsrcfiles = len(os.listdir(products_dir))
        assert nfiles == nsrcfiles
    except Exception as e:
        print('Exception: %s' % str(e))
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Esempio n. 4
0
def test_macros():
    homedir = os.path.dirname(os.path.abspath(__file__))
    shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..'))
    out_file = os.path.join(shakedir, 'tests', 'data', 'containers',
                            'northridge', 'shake_result.hdf')
    container = ShakeMapOutputContainer.load(out_file)
    info = container.getMetadata()
    macros = get_macros(info)
    test_dict = {
        'LON': '-118.5357',
        'LAT': '34.213',
        'DATE': 'Jan 17, 1994',
        'DEP': '18.0',
        'MAG': '6.7',
        'LOC': 'Northridge',
        'NETID': 'ci',
        'DATETIME': '1994-01-17T12:30:55.000000Z',
        'EVENTID': 'northridge',
        'VERSION': '1',
        'PRODUCT_CODE': 'northridge',
        'TIME': '12:30:55'
    }
    assert list(sorted(macros.keys())) == list(sorted(test_dict.keys()))
    for key, mvalue in macros.items():
        tvalue = test_dict[key]
        print('Testing key %s: %s vs %s.' % (key, mvalue, tvalue))
        assert mvalue == tvalue
Esempio n. 5
0
    def execute(self):
        """
        Create KML files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('kml module can only contour '
                                      'gridded data, not sets of points')

        # call create_kmz function
        create_kmz(container, datadir, self.logger, self.contents)

        container.close()
Esempio n. 6
0
def test_masking():
    install_path, data_path = get_config_paths()
    event_path = os.path.join(data_path, 'masking_test', 'current')
    set_files(
        event_path, {
            'event.xml': 'event.xml',
            'model.conf': 'model.conf',
            'au_continental_shelf.geojson': 'au_continental_shelf.geojson',
        })
    assemble = AssembleModule('masking_test', comment='Test comment.')
    assemble.execute()
    model = ModelModule('masking_test')
    model.execute()
    clear_files(event_path)
    hdf_file = os.path.join(event_path, 'products', 'shake_result.hdf')
    oc = ShakeMapOutputContainer.load(hdf_file)
    sa3 = oc.getIMTGrids('SA(3.0)', 'GREATER_OF_TWO_HORIZONTAL')['mean']
    removed = np.isnan(sa3).astype(int)
    assert (removed[240, 240] == 1)
    assert (removed[260, 240] == 0)
    np.testing.assert_equal(
        removed[::100, ::100],
        [[1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 0, 1],
         [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]])
    oc.close()
Esempio n. 7
0
def test_directivity():

    #
    # Turned on directivity in model.conf
    #
    install_path, data_path = get_config_paths()
    event_path = os.path.join(data_path, 'directivity_test', 'current')
    set_files(
        event_path, {
            'event.xml': 'event.xml',
            'model.conf': 'model.conf',
            'dir_fault.txt': 'dir_fault.txt'
        })
    assemble = AssembleModule('directivity_test', comment='Test comment.')
    assemble.execute()
    model = ModelModule('directivity_test')
    model.execute()
    clear_files(event_path)
    hdf_file = os.path.join(event_path, 'products', 'shake_result.hdf')
    oc = ShakeMapOutputContainer.load(hdf_file)
    sa3 = np.exp(
        oc.getIMTGrids('SA(3.0)', 'GREATER_OF_TWO_HORIZONTAL')['mean'])
    # np.testing.assert_allclose(np.max(sa3), 1.15864273)
    np.testing.assert_allclose(np.max(sa3), 1.1567265149442174)
    # np.testing.assert_allclose(np.min(sa3), 0.9278920)
    np.testing.assert_allclose(np.min(sa3), 0.88508818541678)
    oc.close()
Esempio n. 8
0
def test_mapmaker_intensity():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..'))
    out_file = os.path.join(shakedir, 'tests', 'data', 'containers',
                            'northridge', 'shake_result.hdf')
    container = ShakeMapOutputContainer.load(out_file)
    topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                            'mapping', 'CA_topo.grd')

    info = container.getMetadata()
    xmin = info['output']['map_information']['min']['longitude']
    xmax = info['output']['map_information']['max']['longitude']
    ymin = info['output']['map_information']['min']['latitude']
    ymax = info['output']['map_information']['max']['latitude']
    xmin = float(xmin) - 0.1
    xmax = float(xmax) + 0.1
    ymin = float(ymin) - 0.1
    ymax = float(ymax) + 0.1
    dy = float(info['output']['map_information']['grid_spacing']['latitude'])
    dx = float(info['output']['map_information']['grid_spacing']['longitude'])
    sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    topogrid = GMTGrid.load(topofile, samplegeodict=sampledict, resample=False)

    outpath = mkdtemp()

    model_config = container.getConfig()
    comp = container.getComponents('MMI')[0]
    textfile = os.path.join(get_data_path(), 'mapping', 'map_strings.en')
    text_dict = get_text_strings(textfile)

    cities = Cities.fromDefault()
    d = {
        'imtype': 'MMI',
        'topogrid': topogrid,
        'allcities': cities,
        'states_provinces': None,
        'countries': None,
        'oceans': None,
        'lakes': None,
        'roads': None,
        'faults': None,
        'datadir': outpath,
        'operator': 'NEIC',
        'filter_size': 10,
        'info': info,
        'component': comp,
        'imtdict': container.getIMTGrids('MMI', comp),
        'ruptdict': copy.deepcopy(container.getRuptureDict()),
        'stationdict': container.getStationDict(),
        'config': model_config,
        'tdict': text_dict
    }

    try:
        fig1, fig2 = draw_map(d)
    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(outpath)
Esempio n. 9
0
    def execute(self):
        """Write stationlist.json file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # create ShakeMap station data file
        for fformat in ALLOWED_FORMATS:
            if fformat == 'json':
                self.logger.debug('Writing rupture.json file...')
                station_dict = container.getStationDict()
                station_file = os.path.join(datadir, 'stationlist.json')
                f = open(station_file, 'w')
                json.dump(station_dict, f)
                f.close()

        container.close()
Esempio n. 10
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTArrays(
                myimt, 'GREATER_OF_TWO_HORIZONTAL')
        container.close()

        #
        # Make plots
        #
        for myimt in imtlist:
            data = datadict[myimt]
            fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
            plt.subplots_adjust(hspace=0.1)
            axa[0].plot(data['lons'], data['mean'], color='k', label='mean')
            axa[0].plot(data['lons'],
                        data['mean'] + data['std'],
                        '--b',
                        label='mean +/- stddev')
            axa[0].plot(data['lons'], data['mean'] - data['std'], '--b')
            axa[1].plot(data['lons'], data['std'], '-.r', label='stddev')
            plt.xlabel('Longitude')
            axa[0].set_ylabel('Mean ln(%s) (g)' % myimt)
            axa[1].set_ylabel('Stddev ln(%s) (g)' % myimt)
            axa[0].legend(loc='best')
            axa[1].legend(loc='best')
            axa[0].set_title(self._eventid)
            axa[0].grid()
            axa[1].grid()
            axa[1].set_ylim(bottom=0)
            fileimt = oq_to_file(myimt)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.png')
            plt.savefig(pfile)
            plt.close()
Esempio n. 11
0
def test_create_kmz():
    tempdir = tempfile.mkdtemp()
    try:
        homedir = os.path.dirname(os.path.abspath(__file__))
        cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                             'northridge', 'shake_result.hdf')
        container = ShakeMapOutputContainer.load(cfile)
        install_path, data_path = get_config_paths()

        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        pconfig = configobj.ConfigObj(product_config_file,
                                      configspec=spec_file)
        results = pconfig.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(pconfig, results)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']

        logger = logging.getLogger(__name__)
        kmzfile = create_kmz(container, tempdir, oceanfile, logger)
        myzip = zipfile.ZipFile(kmzfile, mode='r')
        kmlstr = myzip.read('shakemap.kml').decode('utf-8')
        root = minidom.parseString(kmlstr)
        document = root.getElementsByTagName('Document')[0]
        folders = document.getElementsByTagName('Folder')
        names = []
        nstations = 0
        nmmi = 0
        for folder in folders:
            name = folder.getElementsByTagName('name')[0].firstChild.data
            names.append(name)
            if name == 'Instrumented Stations':
                nstations = len(folder.getElementsByTagName('Placemark'))
            elif name == 'Macroseismic Stations':
                nmmi = len(folder.getElementsByTagName('Placemark'))
        assert sorted(names) == [
            'Contours', 'Instrumented Stations', 'MMI 4 Polygons',
            'MMI 5 Polygons', 'MMI 6 Polygons', 'MMI 7 Polygons',
            'MMI 8 Polygons', 'MMI 8.5 Polygons', 'MMI Contours', 'MMI Labels',
            'MMI Polygons', 'Macroseismic Stations', 'PGA Contours',
            'PGV Contours', 'SA(0.3) Contours', 'SA(1.0) Contours',
            'SA(3.0) Contours'
        ]
        assert nstations == 185
        assert nmmi == 547
        myzip.close()

    except Exception as e:
        print(str(e))
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Esempio n. 12
0
    def execute(self):
        install_path, data_path = get_config_paths()
        self.datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(self.datadir):
            raise NotADirectoryError('%s is not a valid directory.' %
                                     self.datadir)

        # look for the presence of a NO_TRANSFER file in the datadir.
        notransfer = os.path.join(self.datadir, NO_TRANSFER)
        if os.path.isfile(notransfer):
            self.logger.info('Event has a %s file blocking transfer.' %
                             NO_TRANSFER)
            return

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(self.datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(install_path, 'config',
                                         'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        self.config = ConfigObj(transfer_conf, configspec=configspec)
        results = self.config.validate(Validator())
        if not isinstance(results, bool) or not results:
            config_error(self.config, results)

        # get the output container with all the things in it
        products_dir = os.path.join(self.datadir, 'products')
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        # extract the info.json object from the container
        self.info = container.getMetadata()
        container.close()

        # check for the presence of a .saved file. If found, do nothing.
        # Otherwise, create the backup directory.
        save_file = os.path.join(self.datadir, SAVE_FILE)
        if not os.path.isfile(save_file):
            logging.info('Making backup directory...')
            self._make_backup(data_path)
            with open(save_file, 'wt') as f:
                tnow = datetime.utcnow().strftime(constants.TIMEFMT)
                f.write('Saved %s by %s\n' % (tnow, self.command_name))
Esempio n. 13
0
def run_event(evid):
    installpath, datapath = get_config_paths()
    assemble = AssembleModule(evid, comment='Test comment.')
    assemble.execute()
    model = ModelModule(evid)
    model.execute()
    res_file = os.path.join(datapath, evid, 'current', 'products',
                            'shake_result.hdf')
    oc = ShakeMapOutputContainer.load(res_file)
    imts = oc.getIMTs()
    imt = imts[0].split('/')[1]
    comps = oc.getComponents(imt)
    imtdict = oc.getIMTArrays(imt, comps[0])
    return imtdict
Esempio n. 14
0
    def execute(self):
        """
        Create shape files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('shape module can only contour '
                                      'gridded data, not sets of points')

        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        max_workers = config['products']['mapping']['max_workers']
        method = config['products']['shape']['method']

        create_polygons(container,
                        datadir,
                        self.logger,
                        max_workers,
                        method=method)

        container.close()

        self.contents.addFile('shakemap_shapefiles', 'ShakeMap Shape Files',
                              'Shape Files.', 'shape.zip', 'application/zip')
Esempio n. 15
0
def test_create_kmz():
    tempdir = tempfile.mkdtemp()
    try:
        homedir = os.path.dirname(os.path.abspath(__file__))
        cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                             'northridge', 'shake_result.hdf')
        container = ShakeMapOutputContainer.load(cfile)
        install_path, data_path = get_config_paths()

        logger = logging.getLogger(__name__)
        contents = Contents(None, None, 'northridge')
        kmzfile = create_kmz(container, tempdir, logger, contents)
        myzip = zipfile.ZipFile(kmzfile, mode='r')
        kmlstr = myzip.read('shakemap.kml').decode('utf-8')
        root = minidom.parseString(kmlstr)
        document = root.getElementsByTagName('Document')[0]
        folders = document.getElementsByTagName('Folder')
        names = []
        nstations = 0
        nmmi = 0
        for folder in folders:
            name = folder.getElementsByTagName('name')[0].firstChild.data
            names.append(name)
            if name == 'Instrumented Stations':
                nstations = len(folder.getElementsByTagName('Placemark'))
            elif name == 'Macroseismic Stations':
                nmmi = len(folder.getElementsByTagName('Placemark'))
        assert sorted(names) == [
            'Contours', 'Instrumented Stations', 'MMI 4 Polygons',
            'MMI 5 Polygons', 'MMI 6 Polygons', 'MMI 7 Polygons',
            'MMI 8 Polygons', 'MMI 8.5 Polygons', 'MMI Contours', 'MMI Labels',
            'MMI Polygons', 'Macroseismic Stations', 'PGA Contours',
            'PGV Contours', 'SA(0.3) Contours', 'SA(1.0) Contours',
            'SA(3.0) Contours'
        ]
        assert nstations == 185
        assert nmmi == 547
        myzip.close()

    except Exception as e:
        print(str(e))
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Esempio n. 16
0
def test_mapmaker_contour():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..'))
    out_file = os.path.join(shakedir, 'tests', 'data',
                            'containers', 'northridge',
                            'shake_result.hdf')
    container = ShakeMapOutputContainer.load(out_file)
    topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                            'mapping', 'CA_topo.grd')

    info = container.getMetadata()
    xmin = info['output']['map_information']['min']['longitude']
    xmax = info['output']['map_information']['max']['longitude']
    ymin = info['output']['map_information']['min']['latitude']
    ymax = info['output']['map_information']['max']['latitude']
    xmin = float(xmin) - 0.1
    xmax = float(xmax) + 0.1
    ymin = float(ymin) - 0.1
    ymax = float(ymax) + 0.1
    dy = float(info['output']['map_information']
               ['grid_spacing']['latitude'])
    dx = float(info['output']['map_information']
               ['grid_spacing']['longitude'])
    sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    topogrid = GMTGrid.load(topofile,
                            samplegeodict=sampledict,
                            resample=False)

    oceanfile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                             'mapping', 'northridge_ocean.json')
    outpath = mkdtemp()
    filter_size = 10
    try:
        pdf, png = draw_contour(container, 'PGA', topogrid, oceanfile,
                                outpath, 'NEIC', filter_size)
        print(pdf)
    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(outpath)
Esempio n. 17
0
    def execute(self):
        """
        Write info.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # Create ShakeMap metadata file
        self.logger.debug('Writing info.json file...')
        info = container.getMetadata()

        # Clean up strec results to be valid json
        if 'strec' in info:
            for k, v in info['strec'].items():
                if isinstance(v, float):
                    if not np.isfinite(v):
                        info['strec'][k] = None

        infostring = json.dumps(info, allow_nan=False)
        info_file = os.path.join(datadir, 'info.json')
        f = open(info_file, 'wt')
        f.write(infostring)
        f.close()
        container.close()
        cap = 'ShakeMap processing parameters and map summary information.'
        self.contents.addFile('supplementalInformation',
                              'Supplemental Information', cap,
                              'info.json', 'application/json')
Esempio n. 18
0
    def execute(self):
        """
        Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        if container.getDataType() != 'grid':
            raise NotImplementedError('contour module can only contour '
                                      'gridded data, not sets of points')

        # get the filter size from the products.conf
        filter_size = config['products']['contour']['filter_size']

        # create contour files
        self.logger.debug('Contouring to files...')
        contour_to_files(container, datadir, self.logger, self.contents,
                         filter_size)
        container.close()
Esempio n. 19
0
def dummy_scp_test(remote_host, remote_directory, private_key):
    homedir = os.path.dirname(os.path.abspath(__file__))
    cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                         'northridge', 'shake_result.hdf')
    products_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                                'northridge', 'current')
    pdl_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                           'northridge', 'current', 'pdl')

    container = ShakeMapOutputContainer.load(cfile)
    config = {
        'ssh': {
            'dest1': {
                'remote_host': remote_host,
                'remote_directory': remote_directory,
                'private_key': private_key
            }
        }
    }
    # transfermod = TransferModule(eventid)
    _transfer(config, container.getMetadata(), pdl_dir, products_dir)
Esempio n. 20
0
    def execute(self):
        """
        Create KML files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('kml module can only contour '
                                      'gridded data, not sets of points')

        # find the low res ocean vector dataset
        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        pconfig = configobj.ConfigObj(product_config_file,
                                      configspec=spec_file)
        results = pconfig.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(pconfig, results)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']

        # call create_kmz function
        create_kmz(container, datadir, oceanfile, self.logger)

        container.close()
Esempio n. 21
0
def dummy_pdl_test(java, jarfile, keyfile, configfile):
    homedir = os.path.dirname(os.path.abspath(__file__))
    cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                         'northridge', 'shake_result.hdf')
    products_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                                'northridge', 'current')
    pdl_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                           'northridge', 'current', 'pdl')

    container = ShakeMapOutputContainer.load(cfile)
    config = {
        'pdl': {
            'dest1': {
                'java': java,
                'jarfile': jarfile,
                'privatekey': keyfile,
                'configfile': configfile,
                'source': 'us'
            }
        }
    }
    # transfermod = TransferModule(eventid)
    _transfer(config, container.getMetadata(), pdl_dir, products_dir)
Esempio n. 22
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('xtestimage module can only operate on '
                                      'gridded data not sets of points')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTGrids(
                myimt, 'GREATER_OF_TWO_HORIZONTAL')

        container.close()

        #
        # Make plots
        #
        for myimt in imtlist:
            if myimt == 'MMI':
                yunits = '(MMI)'
            elif myimt == 'PGV':
                yunits = '[ln(cm/s)]'
            else:
                yunits = '[ln(g)]'

            fileimt = oq_to_file(myimt)

            #
            # Do the ground motion plots
            #
            data = datadict[myimt]
            grddata = data['mean']
            metadata = data['mean_metadata']

            fig = plt.figure(figsize=(10, 10))
            gs = plt.GridSpec(4, 4, hspace=0.2, wspace=0.1)
            ax0 = fig.add_subplot(gs[:-1, 1:])
            plt.title(self._eventid + ': ' + myimt + ' mean')
            im1 = ax0.imshow(grddata,
                             extent=(metadata['xmin'], metadata['xmax'],
                                     metadata['ymin'], metadata['ymax']))
            cbax = fig.add_axes([0.915, .34, .02, .5])
            plt.colorbar(im1, ax=ax0, cax=cbax)
            ycut = fig.add_subplot(gs[:-1, 0], sharey=ax0)
            xcut = fig.add_subplot(gs[-1, 1:], sharex=ax0)
            rows, cols = grddata.shape
            midrow = int(rows / 2)
            midcol = int(cols / 2)
            xvals = np.linspace(metadata['xmin'], metadata['xmax'], cols)
            yvals = np.linspace(metadata['ymin'], metadata['ymax'], rows)
            ycut.plot(grddata[:, midcol], yvals)
            xcut.plot(xvals, grddata[midrow, :])
            ycut.set(xlabel=myimt + ' ' + yunits, ylabel='Latitude')
            xcut.set(xlabel='Longitude', ylabel=myimt + ' ' + yunits)
            ycut.set_ylim((metadata['ymin'], metadata['ymax']))
            xcut.set_xlim((metadata['xmin'], metadata['xmax']))
            ax0.label_outer()

            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile, bbox_inches='tight')
            plt.close()

            #
            # Do the stddev plots
            #
            grddata = data['std']

            fig = plt.figure(figsize=(10, 10))
            gs = plt.GridSpec(4, 4, hspace=0.2, wspace=0.1)
            ax0 = fig.add_subplot(gs[:-1, 1:])
            plt.title(self._eventid + ': ' + myimt + ' stddev')
            im1 = ax0.imshow(grddata,
                             extent=(metadata['xmin'], metadata['xmax'],
                                     metadata['ymin'], metadata['ymax']))
            cbax = fig.add_axes([0.915, .34, .02, .5])
            plt.colorbar(im1, ax=ax0, cax=cbax)
            ycut = fig.add_subplot(gs[:-1, 0], sharey=ax0)
            xcut = fig.add_subplot(gs[-1, 1:], sharex=ax0)
            rows, cols = grddata.shape
            midrow = int(rows / 2)
            midcol = int(cols / 2)
            xvals = np.linspace(metadata['xmin'], metadata['xmax'], cols)
            yvals = np.linspace(metadata['ymin'], metadata['ymax'], rows)
            ycut.plot(grddata[:, midcol], yvals)
            xcut.plot(xvals, grddata[midrow, :])
            ycut.set(xlabel='stddev ' + yunits, ylabel='Latitude')
            xcut.set(xlabel='Longitude', ylabel='stddev ' + yunits)
            xcut.set_xlim((metadata['xmin'], metadata['xmax']))
            xcut.set_ylim(bottom=0, top=np.max(grddata[midrow, :]) * 1.1)
            ycut.set_xlim(left=0, right=np.max(grddata[:, midcol] * 1.1))
            ycut.set_ylim((metadata['ymin'], metadata['ymax']))
            ax0.label_outer()

            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '_sd.pdf')
            plt.savefig(pfile, bbox_inches='tight')
            plt.close()
Esempio n. 23
0
def test_output_arrays():

    f, datafile = tempfile.mkstemp()
    os.close(f)

    try:
        container = ShakeMapOutputContainer.create(datafile)
        #
        # Test that no data type is set
        #
        assert container.getDataType() is None

        #
        # Make some array data and metadata
        #
        mean = np.random.rand(100)
        std = np.random.rand(100)
        lats = np.random.rand(100)
        lons = np.random.rand(100)
        ids = np.array([randomword(4).encode('ascii') for x in range(100)])
        metadata = {'units': '%g', 'digits': 4}
        #
        # Put the data in the container
        #
        container.setIMTArrays('PGA', lons, lats, ids, mean, metadata, std,
                               metadata, 'Larger')
        #
        # Now extract it and compare it to what we put in there
        #
        dout = container.getIMTArrays('PGA', 'Larger')
        assert all(dout['lons'] == lons)
        assert all(dout['lats'] == lats)
        assert all(dout['ids'] == ids)
        assert all(dout['mean'] == mean)
        assert all(dout['std'] == std)
        #
        # Check the data type
        #
        assert container.getDataType() == 'points'
        #
        # Try raising some exceptions
        #
        # Shouldn't be able to find this IMT
        with pytest.raises(LookupError):
            container.getIMTArrays('JUNK', 'Larger')
        # Shapes of inputs not the same
        with pytest.raises(ValueError):
            empty = np.array([])
            container.setIMTArrays('PGV', empty, lats, ids, mean, metadata,
                                   std, metadata, 'Larger')
        # IMT already exists
        with pytest.raises(LookupError):
            container.setIMTArrays('PGA', lons, lats, ids, mean, metadata, std,
                                   metadata, 'Larger')
        # Trying to set a grid in a file with points
        with pytest.raises(TypeError):
            container.setIMTGrids('PGV', mean, metadata, std, metadata,
                                  'Larger')
        # Trying to get a grid in a file with points
        with pytest.raises(TypeError):
            container.getIMTGrids('PGA', 'Larger')

        container.close()

    except Exception as e:
        if os.path.isfile(datafile):
            os.remove(datafile)
        raise (e)
    finally:
        if os.path.isfile(datafile):
            os.remove(datafile)
Esempio n. 24
0
def test_output_container():
    nrows = 400
    ncols = 230
    # create MMI mean data for maximum component
    mean_mmi_maximum_data = np.random.rand(nrows, ncols)
    mean_mmi_maximum_metadata = {
        'xmin': -118.5,
        'xmax': -114.5,
        'ymin': 32.1,
        'ymax': 36.7,
        'dx': 0.01,
        'dy': 0.02,
        'nx': 400,
        'ny': 230,
        'name': 'Gandalf',
        'color': 'white',
        'powers': 'magic'
    }

    # create MMI std data for maximum component
    std_mmi_maximum_data = mean_mmi_maximum_data / 10
    std_mmi_maximum_metadata = {
        'xmin': -118.5,
        'xmax': -114.5,
        'ymin': 32.1,
        'ymax': 36.7,
        'dx': 0.01,
        'dy': 0.02,
        'nx': 400,
        'ny': 230,
        'name': 'Legolas',
        'color': 'green',
        'powers': 'good hair'
    }

    # create MMI mean data for rotd50 component
    mean_mmi_rotd50_data = np.random.rand(nrows, ncols)
    mean_mmi_rotd50_metadata = {
        'xmin': -118.5,
        'xmax': -114.5,
        'ymin': 32.1,
        'ymax': 36.7,
        'dx': 0.01,
        'dy': 0.02,
        'nx': 400,
        'ny': 230,
        'name': 'Gimli',
        'color': 'brown',
        'powers': 'axing'
    }

    # create MMI std data for rotd50 component
    std_mmi_rotd50_data = mean_mmi_rotd50_data / 10
    std_mmi_rotd50_metadata = {
        'xmin': -118.5,
        'xmax': -114.5,
        'ymin': 32.1,
        'ymax': 36.7,
        'dx': 0.01,
        'dy': 0.02,
        'nx': 400,
        'ny': 230,
        'name': 'Aragorn',
        'color': 'white',
        'powers': 'scruffiness'
    }

    # create PGA mean data for maximum component
    mean_pga_maximum_data = np.random.rand(nrows, ncols)
    mean_pga_maximum_metadata = {
        'xmin': -118.5,
        'xmax': -114.5,
        'ymin': 32.1,
        'ymax': 36.7,
        'dx': 0.01,
        'dy': 0.02,
        'nx': 400,
        'ny': 230,
        'name': 'Pippin',
        'color': 'purple',
        'powers': 'rashness'
    }

    # create PGA std data for maximum component
    std_pga_maximum_data = mean_pga_maximum_data / 10
    std_pga_maximum_metadata = {
        'xmin': -118.5,
        'xmax': -114.5,
        'ymin': 32.1,
        'ymax': 36.7,
        'dx': 0.01,
        'dy': 0.02,
        'nx': 400,
        'ny': 230,
        'name': 'Merry',
        'color': 'grey',
        'powers': 'hunger'
    }

    f, datafile = tempfile.mkstemp()
    os.close(f)

    try:
        container = ShakeMapOutputContainer.create(datafile)
        # LookupError raised if trying to dropIMTs if there are none
        with pytest.raises(LookupError):
            container.dropIMT('mmi')

        # Add imts
        container.setIMTGrids('mmi',
                              mean_mmi_maximum_data,
                              mean_mmi_maximum_metadata,
                              std_mmi_maximum_data,
                              std_mmi_maximum_metadata,
                              component='maximum')
        container.setIMTGrids('mmi',
                              mean_mmi_rotd50_data,
                              mean_mmi_rotd50_metadata,
                              std_mmi_rotd50_data,
                              std_mmi_rotd50_metadata,
                              component='rotd50')
        container.setIMTGrids('pga',
                              mean_pga_maximum_data,
                              mean_pga_maximum_metadata,
                              std_pga_maximum_data,
                              std_pga_maximum_metadata,
                              component='maximum')

        # get the maximum MMI imt data
        mmi_max_dict = container.getIMTGrids('mmi', component='maximum')
        np.testing.assert_array_equal(mmi_max_dict['mean'],
                                      mean_mmi_maximum_data)
        np.testing.assert_array_equal(mmi_max_dict['std'],
                                      std_mmi_maximum_data)
        assert mmi_max_dict['mean_metadata'] == mean_mmi_maximum_metadata
        assert mmi_max_dict['std_metadata'] == std_mmi_maximum_metadata

        # get the rotd50 MMI imt data
        mmi_rot_dict = container.getIMTGrids('mmi', component='rotd50')
        np.testing.assert_array_equal(mmi_rot_dict['mean'],
                                      mean_mmi_rotd50_data)
        np.testing.assert_array_equal(mmi_rot_dict['std'], std_mmi_rotd50_data)
        assert mmi_rot_dict['mean_metadata'] == mean_mmi_rotd50_metadata
        assert mmi_rot_dict['std_metadata'] == std_mmi_rotd50_metadata

        # get list of all imts
        imts = container.getIMTs()

        # get list of maximum imts
        max_imts = container.getIMTs(component='maximum')
        assert sorted(max_imts) == ['mmi', 'pga']

        # get list of components for mmi
        mmi_comps = container.getComponents('mmi')
        assert sorted(mmi_comps) == ['maximum', 'rotd50']

        # Test dropIMT
        imts = container.getIMTs('maximum')
        assert imts == ['mmi', 'pga']
        container.dropIMT('mmi')
        imts = container.getIMTs('maximum')
        assert imts == ['pga']
        container.close()

    except Exception as e:
        raise (e)
    finally:
        os.remove(datafile)
Esempio n. 25
0
    def execute(self):
        """Create high, medium, and low resolution coverage of the mapped
        parameters.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('coverage module can only function on '
                                      'gridded data, not sets of points')

        imtlist = container.getIMTs()
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            fileimt = oq_to_file(imtype)
            oqimt = imt.from_string(imtype)

            imtdict = container.getIMTGrids(imtype, component)
            grid_data = imtdict['mean']
            metadata = imtdict['mean_metadata']

            if imtype == 'MMI':
                description = 'Modified Mercalli Intensity',
                property_id = "https://earthquake.usgs.gov/learn/topics/mercalli.php",  # noqa
                decimals = 1
            elif imtype == 'PGA':
                description = 'Peak Ground Acceleration',
                units = 'natural logarithm of "g"'
                symbol = 'ln(g)'
                decimals = 2
            elif imtype == 'PGV':
                description = 'Peak Ground Velocity',
                units = 'natural logarithm of centimeters per second'
                symbol = 'ln(cm/s)'
                decimals = 2
            elif imtype.startswith('SA'):
                description = str(oqimt.period) + \
                    '-second Spectral Acceleration',
                units = 'natural logarithm of "g"'
                symbol = 'ln(g)'
                decimals = 2
            else:
                raise TypeError("Unknown IMT in coverage module")

            for i in range(3):
                if i == 0:
                    resolution = 'high'
                    fgrid = grid_data
                    decimation = 1
                elif i == 1:
                    resolution = 'medium'
                    fgrid = gaussian_filter(grid_data, sigma=1)
                    decimation = 2
                elif i == 2:
                    resolution = 'low'
                    fgrid = gaussian_filter(grid_data, sigma=2)
                    decimation = 4

                rgrid = fgrid[::decimation, ::decimation]
                ny, nx = rgrid.shape
                rnd_grd = np.flipud(np.around(rgrid,
                                              decimals=decimals)).flatten()
                if imtype == 'MMI':
                    rnd_grd = np.clip(rnd_grd, 1.0, 10.0)
                xstart = metadata["xmin"]
                xstop = metadata["xmin"] + \
                    (nx - 1) * decimation * metadata["dx"]
                ystart = metadata["ymin"]
                ystop = metadata["ymin"] + \
                    (ny - 1) * decimation * metadata["dy"]

                coverage = {
                    "type": "Coverage",
                    "domain": {
                        "type":
                        "Domain",
                        "domainType":
                        "Grid",
                        "axes": {
                            "x": {
                                "start": xstart,
                                "stop": xstop,
                                "num": nx
                            },
                            "y": {
                                "start": ystart,
                                "stop": ystop,
                                "num": ny
                            }
                        },
                        "referencing": [{
                            "coordinates": ["x", "y"],
                            "system": {
                                "type":
                                "GeographicCRS",
                                "id":
                                "http://www.opengis.net/def/crs/OGC/1.3/CRS84"  # noqa
                            }
                        }]
                    },
                    "parameters": {
                        imtype: {
                            "type": "Parameter",
                            "description": {
                                "en": description
                            },
                            "observedProperty": {
                                "id": property_id,
                                "label": {
                                    "en": imtype
                                }
                            },
                        }
                    },
                    "ranges": {
                        imtype: {
                            "type": "NdArray",
                            "dataType": "float",
                            "axisNames": ["y", "x"],
                            "shape": [ny, nx],
                            "values": rnd_grd.tolist()
                        }
                    }
                }
                if imtype == 'MMI':
                    coverage["parameters"]["MMI"]["preferredPalette"] = {
                        "colors": [
                            "rgb(255, 255, 255)", "rgb(255, 255, 255)",
                            "rgb(191, 204, 255)", "rgb(160, 230, 255)",
                            "rgb(128, 255, 255)", "rgb(122, 255, 147)",
                            "rgb(255, 255, 0)", "rgb(255, 200, 0)",
                            "rgb(255, 145, 0)", "rgb(255, 0, 0)",
                            "rgb(200, 0, 0)"
                        ],
                        "extent": [0, 10],
                        "interpolation":
                        "linear"
                    }
                else:
                    coverage["parameters"][imtype]["unit"] = {
                        'label': {
                            "en": units
                        },
                        "symbol": {
                            'value': symbol,
                            'type': "http://www.opengis.net/def/uom/UCUM/"
                        }
                    }

                if component == 'GREATER_OF_TWO_HORIZONTAL':
                    fname = 'coverage_%s_%s_res.covjson' % (fileimt,
                                                            resolution)
                else:
                    fname = 'coverage_%s_%s_%s_res.covjson' % (
                        fileimt, resolution, component)
                filepath = os.path.join(datadir, fname)
                with open(filepath, 'w') as outfile:
                    json.dump(coverage, outfile, separators=(',', ':'))
                self.contents.addFile(
                    imtype + "_" + resolution + '_res_coverage',
                    resolution + '-res ' + imtype.upper() + ' Coverage',
                    'Coverage of ' + resolution + ' resolution ' + imtype,
                    fname, 'application/json')
        container.close()
Esempio n. 26
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('mapping module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Mapping...')

        # get the filter size from the products.conf
        filter_size = config['products']['contour']['filter_size']

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the mapping functions
        layers = config['products']['mapping']['layers']
        if 'topography' in layers and layers['topography'] != '':
            topofile = layers['topography']
        else:
            topofile = None
        if 'roads' in layers and layers['roads'] != '':
            roadfile = layers['roads']
        else:
            roadfile = None
        if 'faults' in layers and layers['faults'] != '':
            faultfile = layers['faults']
        else:
            faultfile = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        if topofile:
            topogrid = read(topofile, samplegeodict=sampledict, resample=False)
        else:
            tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
            topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        extent = (float(xmin), float(ymin), float(xmax), float(ymax))
        if 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            states_provs = list(states_provs.intersecting_geometries(extent))
            if len(states_provs) > 300:
                states_provs = None
            else:
                states_provs = cfeature.NaturalEarthFeature(
                    category='cultural',
                    name='admin_1_states_provinces_lines',
                    scale='10m',
                    facecolor='none')

            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')

            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)

            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)

        if faultfile is not None:
            faults = ShapelyFeature(Reader(faultfile).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor='none')
        else:
            faults = None

        if roadfile is not None:
            roads = ShapelyFeature(Reader(roadfile).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor='none')
            if len(list(roads.intersecting_geometries(extent))) > 200:
                roads = None
            else:
                roads = ShapelyFeature(Reader(roadfile).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        else:
            roads = None

        alist = []
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'faults': faults,
                'datadir': datadir,
                'operator': operator,
                'filter_size': filter_size,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict
            }
            alist.append(d)
            if imtype == 'MMI':
                g = copy.deepcopy(d)
                g['imtype'] = 'thumbnail'
                alist.append(g)
                h = copy.deepcopy(d)
                h['imtype'] = 'overlay'
                alist.append(h)
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.jpg', 'image/jpeg')
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.pdf', 'application/pdf')
                self.contents.addFile('intensityThumbnail',
                                      'Intensity Thumbnail',
                                      'Thumbnail of intensity map.',
                                      'pin-thumbnail.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.pngw', 'text/plain')
            else:
                fileimt = oq_to_file(imtype)
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.jpg', 'image/jpeg')
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
Esempio n. 27
0
    def execute(self):
        """Create grid.xml and uncertainty.xml files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        logger = logging.getLogger(__name__)
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('gridxml module can only function on '
                                      'gridded data, not sets of points')
        gridnames = container.getIMTs(COMPONENT)
        xml_types = ['grid', 'uncertainty']
        for xml_type in xml_types:
            layers = OrderedDict()
            field_keys = OrderedDict()
            for gridname in gridnames:
                imt_field = _oq_to_gridxml(gridname)
                imtdict = container.getIMTGrids(gridname, COMPONENT)
                if xml_type == 'grid':
                    grid_data = imtdict['mean']
                    metadata = imtdict['mean_metadata']
                elif xml_type == 'uncertainty':
                    grid_data = imtdict['std']
                    metadata = imtdict['std_metadata']

                units = metadata['units']
                digits = metadata['digits']
                # convert from HDF units to legacy grid.xml units
                if xml_type == 'grid':
                    if units == 'ln(cm/s)':
                        grid_data = np.exp(grid_data)
                        units = 'cm/s'
                    elif units == 'ln(g)':
                        grid_data = np.exp(grid_data) * 100
                        units = '%g'
                    else:
                        pass

                if xml_type == 'grid':
                    layers[imt_field] = grid_data
                    field_keys[imt_field] = (units, digits)
                else:
                    layers['STD' + imt_field] = grid_data
                    field_keys['STD' + imt_field] = (units, digits)

            if xml_type == 'grid':
                grid_data, _ = container.getArray([], 'vs30')
                units = 'm/s'
                digits = metadata['digits']
                layers['SVEL'] = grid_data
                field_keys['SVEL'] = (units, digits)

            geodict = GeoDict(metadata)

            config = container.getConfig()

            # event dictionary
            info = container.getMetadata()
            event_info = info['input']['event_information']
            event_dict = {}
            event_dict['event_id'] = event_info['event_id']
            event_dict['magnitude'] = float(event_info['magnitude'])
            event_dict['depth'] = float(event_info['depth'])
            event_dict['lat'] = float(event_info['latitude'])
            event_dict['lon'] = float(event_info['longitude'])
            try:
                event_dict['event_timestamp'] = datetime.strptime(
                    event_info['origin_time'], constants.TIMEFMT)
            except ValueError:
                event_dict['event_timestamp'] = datetime.strptime(
                    event_info['origin_time'], constants.ALT_TIMEFMT)
            event_dict['event_description'] = event_info['location']
            event_dict['event_network'] = \
                info['input']['event_information']['eventsource']
            event_dict['intensity_observations'] =\
                info['input']['event_information']['intensity_observations']
            event_dict['seismic_stations'] =\
                info['input']['event_information']['seismic_stations']
            if info['input']['event_information']['fault_ref'] == 'Origin':
                event_dict['point_source'] = 'True'
            else:
                event_dict['point_source'] = 'False'

            # shake dictionary
            shake_dict = {}
            shake_dict['event_id'] = event_dict['event_id']
            shake_dict['shakemap_id'] = event_dict['event_id']
            shake_dict['shakemap_version'] = \
                info['processing']['shakemap_versions']['map_version']
            shake_dict['code_version'] = shakemap.__version__
            ptime = info['processing']['shakemap_versions']['process_time']
            try:
                shake_dict['process_timestamp'] = datetime.strptime(
                    ptime, constants.TIMEFMT)
            except ValueError:
                shake_dict['process_timestamp'] = datetime.strptime(
                    ptime, constants.ALT_TIMEFMT)

            shake_dict['shakemap_originator'] = \
                config['system']['source_network']
            shake_dict['map_status'] = config['system']['map_status']
            shake_dict['shakemap_event_type'] = 'ACTUAL'
            if event_dict['event_id'].endswith('_se'):
                shake_dict['shakemap_event_type'] = 'SCENARIO'

            shake_grid = ShakeGrid(layers,
                                   geodict,
                                   event_dict,
                                   shake_dict, {},
                                   field_keys=field_keys)
            fname = os.path.join(datadir, '%s.xml' % xml_type)
            logger.debug('Saving IMT grids to %s' % fname)
            shake_grid.save(fname)  # TODO - set grid version number

        self.contents.addFile('xmlGrids', 'XML Grid',
                              'XML grid of ground motions', 'grid.xml',
                              'text/xml')
        self.contents.addFile('uncertaintyGrid', 'Uncertainty Grid',
                              'XML grid of uncertainties', 'uncertainty.xml',
                              'text/xml')

        container.close()
Esempio n. 28
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        event_paths = glob.glob(os.path.join(data_path, "%s*" % self._eventid))
        datalist = []
        sigmas = []
        for path in event_paths:
            datadir = os.path.join(path, 'current', 'products')
            if not os.path.isdir(datadir):
                raise NotADirectoryError('%s is not a valid directory.' %
                                         datadir)
            datafile = os.path.join(datadir, 'shake_result.hdf')
            if not os.path.isfile(datafile):
                raise FileNotFoundError('%s does not exist.' % datafile)

            # Open the ShakeMapOutputContainer and extract the data
            container = ShakeMapOutputContainer.load(datafile)
            if container.getDataType() != 'points':
                raise NotImplementedError('xtestplot_multi module can only '
                                          'operate on sets of points, not '
                                          'gridded data')

            stas = container.getStationDict()
            ampd = stas['features'][0]['properties']['channels'][0][
                'amplitudes'][0]
            if 'ln_sigma' in ampd:
                sigmas.append(ampd['ln_sigma'])
            else:
                sigmas.append(0)
            datadict = {}
            imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
            for myimt in imtlist:
                datadict[myimt] = container.getIMTArrays(
                    myimt, 'GREATER_OF_TWO_HORIZONTAL')
            datalist.append(datadict)

        container.close()
        #
        # Make plots
        #
        colors = ['k', 'b', 'g', 'r', 'c', 'm']
        for myimt in imtlist:
            fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
            plt.subplots_adjust(hspace=0.1)
            for ix, dd in enumerate(datalist):
                data = dd[myimt]
                axa[0].plot(data['lons'],
                            data['mean'],
                            color=colors[ix],
                            label=r'$\sigma_\epsilon = %.2f$' % sigmas[ix])
                axa[1].plot(data['lons'],
                            data['std'],
                            '-.',
                            color=colors[ix],
                            label=r'$\sigma_\epsilon = %.2f$' % sigmas[ix])
            plt.xlabel('Longitude')
            axa[0].set_ylabel('Mean ln(%s) (g)' % myimt)
            axa[1].set_ylabel('Stddev ln(%s) (g)' % myimt)
            axa[0].legend(loc='best')
            axa[1].legend(loc='best')
            axa[0].set_title(self._eventid)
            axa[0].grid()
            axa[1].grid()
            axa[1].set_ylim(bottom=0)
            fileimt = oq_to_file(myimt)
            pfile = os.path.join(event_paths[0], 'current', 'products',
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile)
            pfile = os.path.join(event_paths[0], 'current', 'products',
                                 self._eventid + '_' + fileimt + '.png')
            plt.savefig(pfile)
            plt.close()
Esempio n. 29
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        oc = ShakeMapOutputContainer.load(datafile)
        if oc.getDataType() != 'grid':
            raise NotImplementedError('plotregr module can only operate on '
                                      'gridded data not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # If mapping runs in parallel, then we want this module too, as well.
        # Otherwise we get weird errors from matplotlib
        max_workers = config['products']['mapping']['max_workers']

        #
        # Cheating here a bit by assuming that the IMTs are the same
        # as the regression IMTs
        #
        rockgrid = {}
        soilgrid = {}
        rocksd = {}
        soilsd = {}
        imtlist = oc.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            rockgrid[myimt], _ = oc.getArray(['attenuation', 'rock', myimt],
                                             'mean')
            soilgrid[myimt], _ = oc.getArray(['attenuation', 'soil', myimt],
                                             'mean')
            rocksd[myimt], _ = oc.getArray(['attenuation', 'rock', myimt],
                                           'std')
            soilsd[myimt], _ = oc.getArray(['attenuation', 'soil', myimt],
                                           'std')
        distances, _ = oc.getArray(['attenuation', 'distances'], 'rrup')

        stations = oc.getStationDict()

        #
        # Make plots
        #
        alist = []
        for myimt in imtlist:
            a = {
                'myimt': myimt,
                'rockgrid': rockgrid,
                'soilgrid': soilgrid,
                'rocksd': rocksd,
                'soilsd': soilsd,
                'stations': stations,
                'distances': distances,
                'eventid': self._eventid,
                'datadir': datadir
            }
            alist.append(a)
            if myimt == 'MMI':
                self.contents.addFile(
                    'miRegr', 'Intensity Regression',
                    'Regression plot of macroseismic '
                    'intensity.', 'mmi_regr.png', 'image/png')
            elif myimt == 'PGA':
                self.contents.addFile(
                    'pgaRegr', 'PGA Regression', 'Regression plot of peak '
                    'ground acceleration (%g).', 'pga_regr.png', 'image/png')
            elif myimt == 'PGV':
                self.contents.addFile(
                    'pgvRegr', 'PGV Regression',
                    'Regression plot of peak ground '
                    'velocity (cm/s).', 'pgv_regr.png', 'image/png')
            else:
                oqimt = imt.from_string(myimt)
                period = str(oqimt.period)
                filebase = oq_to_file(myimt)
                psacap = 'Regression plot of ' + period + ' sec 5% damped ' \
                         'pseudo-spectral acceleration(%g).'
                self.contents.addFile(filebase + 'Regr',
                                      'PSA ' + period + ' sec Regression',
                                      psacap, filebase + '_regr.png',
                                      'image/png')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_plots, alist)
                list(results)
        else:
            for adict in alist:
                make_plots(adict)

        #
        # Make attenuation_curves.json
        #
        jdict = {'eventid': self._eventid}
        jdict['gmpe'] = {}
        for site in ['soil', 'rock']:
            jdict['gmpe'][site] = {}
            for myimt in imtlist:
                jdict['gmpe'][site][myimt] = {}
                jdict['gmpe'][site][myimt]['mean'] = oc.getArray(
                    ['attenuation', site, myimt],
                    'mean')[0].round(decimals=5).tolist()
                jdict['gmpe'][site][myimt]['stddev'] = oc.getArray(
                    ['attenuation', site, myimt],
                    'std')[0].round(decimals=5).tolist()
        jdict['distances'] = {}
        for dtype in ['repi', 'rhypo', 'rjb', 'rrup']:
            jdict['distances'][dtype] = oc.getArray(
                ['attenuation', 'distances'],
                dtype)[0].round(decimals=5).tolist()
        jdict['mean_bias'] = {}
        info = oc.getMetadata()
        for myimt in imtlist:
            jdict['mean_bias'][myimt] = info['output']['ground_motions'][
                myimt]['bias']
        jstring = json.dumps(jdict, allow_nan=False)
        jfile = os.path.join(datadir, 'attenuation_curves.json')
        f = open(jfile, 'wt')
        f.write(jstring)
        f.close()
        oc.close()
        cap = "Nominal attenuation curves"
        self.contents.addFile('attenuationCurves', 'Attenuation Curves', cap,
                              'attenuation_curves.json', 'application/json')
Esempio n. 30
0
def test_products():

    installpath, datapath = get_config_paths()

    #
    # Use a real event for checks of products against the contents of
    # the output container
    #
    evid = 'integration_test_0001'
    try:
        #
        # Make sure an output file exists
        #
        assemble = AssembleModule(evid, comment='Test comment.')
        assemble.execute()
        del assemble
        model = ModelModule(evid)
        model.execute()
        del model

        res_file = os.path.join(datapath, evid, 'current', 'products',
                                'shake_result.hdf')
        oc = ShakeMapOutputContainer.load(res_file)

        #
        # The history module just outputs some info to the operator, so
        # here we just run it to make sure it doesn't crash anything.
        # Actual testing should be done via bug reports/feature requests
        # from users.
        #
        history = HistoryModule(evid)
        history.execute()
        del history

        #
        # Test the creation of products -- currently not checking results
        # for validity or consistency, but probably should
        #

        #
        # TODO: The stationlist.json should be validated, but we need a
        # function that will read it and convert it to something
        # we can test against.
        #
        check_failures(evid, datapath, StationModule)
        mod = StationModule(evid)
        mod.execute()
        mod.writeContents()

        check_failures(evid, datapath, MappingModule)
        mod = MappingModule(evid)
        mod.execute()
        mod.writeContents()

        check_failures(evid, datapath, PlotRegr)
        #
        # PlotRegr gets tested in the model tests for event 72282711
        #
#        mod = PlotRegr(evid)
#        mod.execute()
#        mod.writeContents()

        check_failures(evid, datapath, KMLModule)
        mod = KMLModule(evid)
        mod.execute()
        mod.writeContents()
        del mod

        # This just exercises the ShapeModule code without actually
        # checking for valid results.
        mod = ShapeModule(evid)
        mod.execute()
        mod.writeContents()
        del mod

        #
        # These check that the results are consistent with the output
        # container
        #
        do_rupture(evid, datapath, oc)

        # do_info(evid, datapath, oc)

        do_raster(evid, datapath, oc)

        do_gridxml(evid, datapath, oc)

        oc.close()
        #
        # Checks contours against saved versions; if something
        # changes, will need to update the files in
        # data/integration_test_0001
        #
        # TODO: (7/28/2021)I am disabling this test for a little while until
        # the float32 HDF version of the impactutils containers are in
        # wide distribution. At that point, I'll update the data files
        # and the test results.
        # do_contour(evid, datapath)
#        do_contour_command_line(evid, datapath)

    finally:
        pass
        data_file = os.path.join(datapath, evid, 'current', 'shake_data.hdf')
        if os.path.isfile(data_file):
            os.remove(data_file)