Exemple #1
0
def test_output_repr():
    out_file = os.path.join(shakedir, 'tests', 'data', 'containers',
                            'northridge', 'shake_result.hdf')
    shake_result = ShakeMapOutputContainer.load(out_file)
    container_str = repr(shake_result)
    assert container_str == '''Data type: grid
    use "getIMTGrids" method to access interpolated IMTs
Rupture: <class 'shakelib.rupture.quad_rupture.QuadRupture'>
    locstring: 1km NNW of Reseda, CA
    magnitude: 6.7
    time: 1994-01-17T12:30:55.000000Z
Config: use 'getConfig' method
Stations: use 'getStationDict' method
    # instrumental stations: 185
    # macroseismic stations: 977
Metadata: use 'getMetadata' method
Available IMTs (components):
    MMI (GREATER_OF_TWO_HORIZONTAL)
    PGA (GREATER_OF_TWO_HORIZONTAL)
    PGV (GREATER_OF_TWO_HORIZONTAL)
    SA(0.3) (GREATER_OF_TWO_HORIZONTAL)
    SA(1.0) (GREATER_OF_TWO_HORIZONTAL)
    SA(3.0) (GREATER_OF_TWO_HORIZONTAL)
'''

    shake_result.getIMTGrids("SA(1.0)", "GREATER_OF_TWO_HORIZONTAL")
Exemple #2
0
    def execute(self):
        """
        Write info.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # create ShakeMap metadata file
        self.logger.debug('Writing info.json file...')
        info = container.getMetadata()
        infostring = json.dumps(info)
        info_file = os.path.join(datadir, 'info.json')
        f = open(info_file, 'wt')
        f.write(infostring)
        f.close()
        container.close()
Exemple #3
0
    def execute(self):
        """Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create contour files
        self.logger.info('Contouring to files...')
        contour_to_files(container, config, datadir, self.logger)
Exemple #4
0
    def execute(self):
        """Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create contour files
        self.logger.info('Contouring to files...')
        contour_to_files(container, config, datadir, self.logger)
Exemple #5
0
    def execute(self):
        """
        Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('contour module can only contour '
                                      'gridded data, not sets of points')

        # create contour files
        self.logger.debug('Contouring to files...')
        contour_to_files(container, datadir, self.logger, self.filter_size)
Exemple #6
0
    def execute(self):
        """
        Create KML files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('kml module can only contour '
                                      'gridded data, not sets of points')

        # find the low res ocean vector dataset
        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        pconfig = configobj.ConfigObj(product_config_file)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']
        oceanfile = path_macro_sub(oceanfile, install_path, data_path)

        # call create_kmz function
        create_kmz(container, datadir, oceanfile, self.logger)
Exemple #7
0
    def execute(self):
        """
        Write rupture.json file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # create ShakeMap rupture file
        for fformat in ALLOWED_FORMATS:
            if fformat == 'json':
                self.logger.info('Writing rupture.json file...')
                rupture_dict = container.getRuptureDict()
                rupture_file = os.path.join(datadir, 'rupture.json')
                f = open(rupture_file, 'w')
                json.dump(rupture_dict, f)
                f.close()
Exemple #8
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTArrays(
                myimt, 'GREATER_OF_TWO_HORIZONTAL')
        container.close()

        #
        # Make plots
        #
        for myimt in imtlist:
            data = datadict[myimt]
            fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
            plt.subplots_adjust(hspace=0.1)
            axa[0].plot(data['lons'], data['mean'], color='k', label='mean')
            axa[0].plot(data['lons'],
                        data['mean'] + data['std'],
                        '--b',
                        label='mean +/- stddev')
            axa[0].plot(data['lons'], data['mean'] - data['std'], '--b')
            axa[1].plot(data['lons'], data['std'], '-.r', label='stddev')
            plt.xlabel('Longitude')
            axa[0].set_ylabel('Mean ln(%s) (g)' % myimt)
            axa[1].set_ylabel('Stddev ln(%s) (g)' % myimt)
            axa[0].legend(loc='best')
            axa[1].legend(loc='best')
            axa[0].set_title(self._eventid)
            axa[0].grid()
            axa[1].grid()
            axa[1].set_ylim(ymin=0)
            fileimt = oq_to_file(myimt)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile, tight_layout=True)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.png')
            plt.savefig(pfile, tight_layout=True)
            plt.close()
Exemple #9
0
    def execute(self):
        """
        Cancel ShakeMap products using methods configured in transfer.conf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        products_dir = os.path.join(datadir, 'products')
        if not os.path.isdir(products_dir):
            raise NotADirectoryError('%s does not exist.' % products_dir)

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(
                install_path, 'config', 'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        config = ConfigObj(transfer_conf, configspec=configspec)

        # get the output container with all the things in it
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # call the transfer method
        self.logger.info('Sending cancel message...')
        _transfer(config, container, products_dir, cancel=True)

        # Create a file called CANCEL in the data directory. The
        # shake program will look for this and not run if present.
        self.logger.info('Creating cancel file...')
        cancelfile = os.path.join(datadir, 'CANCEL')
        with open(cancelfile, 'wt') as cfile:
            cfile.write('Event cancelled at %s\n' %
                        datetime.utcnow().strftime(TIMEFMT))

        # delete the event from the database
        handler = AmplitudeHandler(install_path, data_path)
        handler.deleteEvent(self._eventid)
        container.close()
Exemple #10
0
    def execute(self):
        """
        Tranfer ShakeMap products using methods configured in transfer.conf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        # look for the presence of a NO_TRANSFER file in the datadir.
        notransfer = os.path.join(datadir, NO_TRANSFER)
        if os.path.isfile(notransfer):
            self.logger.info(
                'Event has a %s file blocking transfer.' % NO_TRANSFER)
            return

        products_dir = os.path.join(datadir, 'products')
        if not os.path.isdir(products_dir):
            raise NotADirectoryError('%s does not exist.' % products_dir)

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(
                install_path, 'config', 'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        config = ConfigObj(transfer_conf, configspec=configspec)

        # get the output container with all the things in it
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # call the transfer method
        _transfer(config, container, products_dir)

        # copy the current folder to a new backup directory
        self._make_backup(data_path)

        container.close()
Exemple #11
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('mapping module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create contour files
        self.logger.info('Mapping...')

        # get all of the pieces needed for the mapmaker
        layerdict = {}
        layers = config['products']['mapping']['layers']
        layerdict['coast'] = path_macro_sub(
            layers['coasts'], install_path, data_path)
        layerdict['ocean'] = path_macro_sub(
            layers['oceans'], install_path, data_path)
        layerdict['lake'] = path_macro_sub(
            layers['lakes'], install_path, data_path)
        layerdict['country'] = path_macro_sub(
            layers['countries'], install_path, data_path)
        layerdict['state'] = path_macro_sub(
            layers['states'], install_path, data_path)
        topofile = path_macro_sub(
            layers['topography'], install_path, data_path)
        cities = path_macro_sub(layers['cities'], install_path, data_path)
        mapmaker = MapMaker(container, topofile, layerdict, cities,
                            self.logger)
        self.logger.info('Drawing intensity map...')
        intensity_map = mapmaker.drawIntensityMap(datadir)
        self.logger.info('Created intensity map %s' % intensity_map)
        for imt in config['products']['mapping']['imts']:
            self.logger.info('Drawing %s contour map...' % imt)
            contour_file = mapmaker.drawContourMap(imt, datadir)
            self.logger.info('Created contour map %s' % contour_file)
Exemple #12
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTArrays(myimt,
                    'GREATER_OF_TWO_HORIZONTAL')

        #
        # Make plots
        #
        for myimt in imtlist:
            data = datadict[myimt]
            fig = plt.figure(figsize=(10, 8))
            plt.plot(data['lons'], 
                     data['mean'], 
                     color='k', label='mean')
            plt.plot(data['lons'], 
                     data['mean'] + data['std'], 
                     '--b', label='mean +/- stddev')
            plt.plot(data['lons'], 
                     data['mean'] - data['std'], 
                     '--b')
            plt.plot(data['lons'], 
                     data['std'], 
                     '-.r', label='stddev')
            plt.xlabel('Longitude')
            plt.ylabel('ln(%s) (g)' % myimt)
            plt.legend(loc='best')
            plt.title(self._eventid)
            plt.grid()
            pfile = os.path.join(datadir, self._eventid + '_' + myimt + '.pdf')
            plt.savefig(pfile)
Exemple #13
0
def run_event(evid):
    installpath, datapath = get_config_paths()
    assemble = AssembleModule(evid, comment='Test comment.')
    assemble.execute()
    model = ModelModule(evid)
    model.execute()
    res_file = os.path.join(datapath, evid, 'current', 'products',
                            'shake_result.hdf')
    oc = ShakeMapOutputContainer.load(res_file)
    imts = oc.getIMTs()
    comps = oc.getComponents(imts[0])
    imtdict = oc.getIMTArrays(imts[0], comps[0])
    return imtdict
Exemple #14
0
def test_create_kmz():
    tempdir = tempfile.mkdtemp()
    try:
        homedir = os.path.dirname(os.path.abspath(__file__))
        cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                             'northridge', 'shake_result.hdf')
        container = ShakeMapOutputContainer.load(cfile)
        install_path, data_path = get_config_paths()
        global_data_path = os.path.join(os.path.expanduser('~'),
                                        'shakemap_data')
        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        pconfig = configobj.ConfigObj(product_config_file)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']
        oceanfile = path_macro_sub(oceanfile, install_path, data_path,
                                   global_data_path)
        logger = logging.getLogger(__name__)
        kmzfile = create_kmz(container, tempdir, oceanfile, logger)
        myzip = zipfile.ZipFile(kmzfile, mode='r')
        kmlstr = myzip.read('shakemap.kml').decode('utf-8')
        root = minidom.parseString(kmlstr)
        document = root.getElementsByTagName('Document')[0]
        folders = document.getElementsByTagName('Folder')
        names = []
        nstations = 0
        nmmi = 0
        for folder in folders:
            name = folder.getElementsByTagName('name')[0].firstChild.data
            names.append(name)
            if name == 'Instrumented Stations':
                nstations = len(folder.getElementsByTagName('Placemark'))
            elif name == 'Macroseismic Stations':
                nmmi = len(folder.getElementsByTagName('Placemark'))
        assert sorted(names) == [
            'Contours', 'Instrumented Stations', 'Macroseismic Stations'
        ]
        assert nstations == 185
        assert nmmi == 977
        myzip.close()

    except Exception as e:
        print(str(e))
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Exemple #15
0
def scp_test(remote_host, remote_directory, private_key):
    homedir = os.path.dirname(os.path.abspath(__file__))
    cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                         'northridge', 'shake_result.hdf')
    products_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                                'northridge', 'current')
    eventid = 'ci3144585'

    container = ShakeMapOutputContainer.load(cfile)
    config = {
        'ssh': {
            'dest1': {
                'remote_host': remote_host,
                'remote_directory': remote_directory,
                'private_key': private_key
            }
        }
    }
    transfermod = TransferModule(eventid)
    transfermod._transfer(config, container, products_dir)
Exemple #16
0
def pdl_test(java, jarfile, keyfile, configfile):
    homedir = os.path.dirname(os.path.abspath(__file__))
    cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                         'northridge', 'shake_result.hdf')
    products_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                                'northridge', 'current')
    eventid = 'ci3144585'

    container = ShakeMapOutputContainer.load(cfile)
    config = {
        'pdl': {
            'dest1': {
                'java': java,
                'jarfile': jarfile,
                'privatekey': keyfile,
                'configfile': configfile,
                'source': 'us'
            }
        }
    }
    transfermod = TransferModule(eventid)
    transfermod._transfer(config, container, products_dir)
Exemple #17
0
    def execute(self):
        """
        Write info.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create ShakeMap metadata file
        formats = config['products']['info']['formats']
        for fformat in formats:
            if fformat not in ALLLOWED_FORMATS:
                self.logger.warn('Specified format %s not in list of defined '
                                 'formats.  Skipping.' % fformat)
                continue
            if fformat == 'json':
                self.logger.info('Writing info.json file...')
                infostring = container.getString('info.json')
                info_file = os.path.join(datadir, 'info.json')
                f = open(info_file, 'wt')
                f.write(infostring)
                f.close()
Exemple #18
0
def test_transfer():
    homedir = os.path.dirname(os.path.abspath(__file__))
    cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                         'northridge', 'shake_result.hdf')
    products_dir = os.path.join(homedir, '..', '..', 'data', 'eventdata',
                                'northridge', 'current')
    eventid = 'ci3144585'

    container = ShakeMapOutputContainer.load(cfile)
    try:
        tdir = tempfile.mkdtemp()
        remote_dir = os.path.join(tdir, eventid)
        config = {'copy': {'local': {'remote_directory': tdir}}}
        transfermod = TransferModule(eventid)
        transfermod._transfer(config, container, products_dir)
        nfiles = len(os.listdir(remote_dir))
        nsrcfiles = len(os.listdir(products_dir))
        assert nfiles == nsrcfiles
    except Exception as e:
        pass
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Exemple #19
0
    def execute(self):
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('raster module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create GIS-readable .flt files of imt and uncertainty
        self.logger.info('Creating GIS grids...')
        layers = config['products']['raster']['layers']
        for layer in layers:
            fileimt = oq_to_file(layer)
            imtdict = container.getIMTGrids(layer, 'Larger')
            mean_grid = imtdict['mean']
            std_grid = imtdict['std']
            mean_gdal = GDALGrid.copyFromGrid(mean_grid)
            std_gdal = GDALGrid.copyFromGrid(std_grid)
            mean_fname = os.path.join(datadir, '%s_mean.flt' % fileimt)
            std_fname = os.path.join(datadir, '%s_std.flt' % fileimt)
            self.logger.info('Saving %s...' % mean_fname)
            mean_gdal.save(mean_fname)
            self.logger.info('Saving %s...' % std_fname)
            std_gdal.save(std_fname)
Exemple #20
0
import matplotlib.pyplot as plt
from matplotlib import cm
import cartopy.crs as ccrs
import cartopy.feature as cfeat

# Local imports
import gmpe

# Define projection.
data_crs = ccrs.PlateCarree()

# Path to ShakeMap 4 file
file = '/Users/tnye/PROJECTS/Duration/data/events/usp000a1b0/shakemap/shake_result.hdf'

# Obtain output container for file.
oc = ShakeMapOutputContainer.load(file)

# Obtain magnitude from output container.
mag = oc.getRuptureObject().getOrigin().mag

# Create a dictionary of pga data.
pga_dict = oc.getIMTGrids('PGA', 'GREATER_OF_TWO_HORIZONTAL')
pga_mean_grid2d = pga_dict['mean']
pga_stddev_grid2d = pga_dict['std']
pga_mean = pga_mean_grid2d.getData()
pga_stddev = pga_stddev_grid2d.getData()

min_lon = pga_dict['mean'].getGeoDict().xmin
max_lon = pga_dict['mean'].getGeoDict().xmax
min_lat = pga_dict['mean'].getGeoDict().ymin
max_lat = pga_dict['mean'].getGeoDict().ymax
Exemple #21
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        event_paths = glob.glob(os.path.join(data_path, "%s*" % self._eventid))
        datalist = []
        sigmas = []
        for path in event_paths:
            datadir = os.path.join(path, 'current', 'products')
            if not os.path.isdir(datadir):
                raise NotADirectoryError('%s is not a valid directory.' %
                                         datadir)
            datafile = os.path.join(datadir, 'shake_result.hdf')
            if not os.path.isfile(datafile):
                raise FileNotFoundError('%s does not exist.' % datafile)

            # Open the ShakeMapOutputContainer and extract the data
            container = ShakeMapOutputContainer.load(datafile)
            if container.getDataType() != 'points':
                raise NotImplementedError('xtestplot_multi module can only '
                                          'operate on sets of points, not '
                                          'gridded data')

            stas = container.getStationDict()
            ampd = stas['features'][0]['properties'][
                        'channels'][0]['amplitudes'][0]
            if 'ln_sigma' in ampd:
                sigmas.append(ampd['ln_sigma'])
            else:
                sigmas.append(0)
            datadict = {}
            imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
            for myimt in imtlist:
                datadict[myimt] = container.getIMTArrays(
                    myimt, 'GREATER_OF_TWO_HORIZONTAL')
            datalist.append(datadict)

        container.close()
        #
        # Make plots
        #
        colors = ['k', 'b', 'g', 'r', 'c', 'm']
        for myimt in imtlist:
            fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
            plt.subplots_adjust(hspace=0.1)
            for ix, dd in enumerate(datalist):
                data = dd[myimt]
                axa[0].plot(data['lons'],
                            data['mean'],
                            color=colors[ix],
                            label=r'$\sigma_\epsilon = %.2f$' %
                            sigmas[ix])
                axa[1].plot(data['lons'],
                            data['std'],
                            '-.', color=colors[ix],
                            label=r'$\sigma_\epsilon = %.2f$' %
                            sigmas[ix])
            plt.xlabel('Longitude')
            axa[0].set_ylabel('Mean ln(%s) (g)' % myimt)
            axa[1].set_ylabel('Stddev ln(%s) (g)' % myimt)
            axa[0].legend(loc='best')
            axa[1].legend(loc='best')
            axa[0].set_title(self._eventid)
            axa[0].grid()
            axa[1].grid()
            axa[1].set_ylim(ymin=0)
            fileimt = oq_to_file(myimt)
            pfile = os.path.join(event_paths[0], 'current', 'products',
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile, tight_layout=True)
            pfile = os.path.join(event_paths[0], 'current', 'products',
                                 self._eventid + '_' + fileimt + '.png')
            plt.savefig(pfile, tight_layout=True)
            plt.close()
Exemple #22
0
    def execute(self):
        """
        Write raster.zip file containing ESRI Raster files of all the IMTs
        in shake_result.hdf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('raster module can only operate on '
                                      'gridded data, not sets of points')

        # create GIS-readable .flt files of imt and uncertainty
        self.logger.debug('Creating GIS grids...')
        layers = container.getIMTs()

        # Package up all of these files into one zip file.
        zfilename = os.path.join(datadir, 'rasters.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)

        files_written = []
        for layer in layers:
            fileimt = oq_to_file(layer)
            # This is a bit hacky -- we only produce the raster for the
            # first IMC returned. It should work as long as we only have
            # one IMC produced per ShakeMap run.
            imclist = container.getComponents(layer)
            imtdict = container.getIMTGrids(layer, imclist[0])
            mean_grid = imtdict['mean']
            std_grid = imtdict['std']
            mean_gdal = GDALGrid.copyFromGrid(mean_grid)
            std_gdal = GDALGrid.copyFromGrid(std_grid)
            mean_fname = os.path.join(datadir, '%s_mean.flt' % fileimt)
            mean_hdr = os.path.join(datadir, '%s_mean.hdr' % fileimt)
            std_fname = os.path.join(datadir, '%s_std.flt' % fileimt)
            std_hdr = os.path.join(datadir, '%s_std.hdr' % fileimt)
            self.logger.debug('Saving %s...' % mean_fname)
            mean_gdal.save(mean_fname)
            files_written.append(mean_fname)
            files_written.append(mean_hdr)
            self.logger.debug('Saving %s...' % std_fname)
            std_gdal.save(std_fname)
            files_written.append(std_fname)
            files_written.append(std_hdr)
            zfile.write(mean_fname, '%s_mean.flt' % fileimt)
            zfile.write(mean_hdr, '%s_mean.hdr' % fileimt)
            zfile.write(std_fname, '%s_std.flt' % fileimt)
            zfile.write(std_hdr, '%s_std.hdr' % fileimt)

        zfile.close()
        container.close()

        # nuke all of the copies of the files we just put in the zipfile
        for file_written in files_written:
            os.remove(file_written)
Exemple #23
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        ic = ShakeMapOutputContainer.load(datafile)
        if ic.getDataType() != 'grid':
            raise NotImplementedError('plotregr module can only operate on '
                                      'gridded data not sets of points')

        #
        # Cheating here a bit by assuming that the IMTs are the same
        # as the regression IMTs
        #
        rockgrid = {}
        soilgrid = {}
        rocksd = {}
        soilsd = {}
        imtlist = ic.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            rockgrid[myimt], _ = ic.getArray('regression_' + myimt +
                                             '_rock_mean')
            soilgrid[myimt], _ = ic.getArray('regression_' + myimt +
                                             '_soil_mean')
            rocksd[myimt], _ = ic.getArray('regression_' + myimt + '_rock_sd')
            soilsd[myimt], _ = ic.getArray('regression_' + myimt + '_soil_sd')
        distances, _ = ic.getArray('regression_distances')

        stations = ic.getStationDict()

        #
        # Make plots
        #
        for myimt in imtlist:
            plt.figure(figsize=(10, 10))

            plt.semilogx(distances, rockgrid[myimt], 'r', label='rock')
            plt.semilogx(distances, soilgrid[myimt], 'g', label='soil')
            plt.semilogx(distances,
                         rockgrid[myimt] + rocksd[myimt],
                         'r--',
                         label='rock +/- stddev')
            plt.semilogx(distances, rockgrid[myimt] - rocksd[myimt], 'r--')
            plt.semilogx(distances,
                         soilgrid[myimt] + soilsd[myimt],
                         'g--',
                         label='soil +/- stddev')
            plt.semilogx(distances, soilgrid[myimt] - soilsd[myimt], 'g--')

            for station in stations['features']:
                dist = station['properties']['distance']
                if dist > distances[-1]:
                    continue
                if station['properties']['station_type'] == 'seismic':
                    symbol = '^'
                    if myimt == 'MMI':
                        value = station['properties']['intensity']
                        if value != 'null':
                            plt.semilogx(dist, value, symbol + 'k', mfc='none')
                    else:
                        imtstr = myimt.lower()
                        value = np.nan
                        for chan in station['properties']['channels']:
                            if chan['name'].endswith('Z') or \
                               chan['name'].endswith('U'):
                                continue
                            for amp in chan['amplitudes']:
                                if amp['name'] != imtstr:
                                    continue
                                if amp['flag'] != '' and amp['flag'] != '0':
                                    break
                                if amp['value'] == 'null':
                                    break
                                if isinstance(amp['value'], str):
                                    thisamp = float(amp['value'])
                                else:
                                    thisamp = amp['value']
                                if thisamp <= 0:
                                    break
                                if myimt == 'PGV' or myimt == 'IA' or myimt == 'PGD' or myimt == 'IH':
                                    tmpval = np.log(thisamp)
                                else:
                                    tmpval = np.log(thisamp / 100.)
                                if np.isnan(value) or tmpval > value:
                                    value = tmpval
                                break
                        if not np.isnan(value):
                            plt.semilogx(dist, value, symbol + 'k', mfc='none')
                else:
                    symbol = 'o'
                    if myimt == 'MMI':
                        amp = station['properties']['channels'][0][
                            'amplitudes'][0]
                        if amp['flag'] == '' or amp['flag'] == '0':
                            if amp['value'] != 'null':
                                if isinstance(amp['value'], str):
                                    value = float(amp['value'])
                                else:
                                    value = amp['value']
                                plt.semilogx(dist,
                                             value,
                                             symbol + 'k',
                                             mfc='none')
                    else:
                        imtstr = myimt.lower()
                        if imtstr in station['properties']['pgm_from_mmi']:
                            amp = station['properties']['pgm_from_mmi'][
                                imtstr]['value']
                            if amp != 'null' and amp != 0:
                                if myimt == 'PGV' or myimt == 'IA' or myimt == 'PGD' or myimt == 'IH':
                                    amp = np.log(amp)
                                else:
                                    amp = np.log(amp / 100.)
                                plt.semilogx(dist,
                                             amp,
                                             symbol + 'k',
                                             mfc='none')

            plt.title(self._eventid + ': ' + myimt + ' mean')
            plt.xlabel('Rrup (km)')
            if myimt == 'MMI':
                plt.ylabel('MMI')
            elif myimt == 'PGV':
                plt.ylabel('PGV ln(cm/s)')
            elif myimt == 'IA':
                plt.ylabel('IA ln(cm/s)')
            elif myimt == 'PGD':
                plt.ylabel('PGD ln(cm)')
            elif myimt == 'IH':
                plt.ylabel('IH ln(cm)')
            else:
                plt.ylabel(myimt + ' ln(g)')
            plt.legend()

            fileimt = oq_to_file(myimt)
            pfile = os.path.join(datadir, fileimt + '_regr.png')
            plt.savefig(pfile)
            plt.close()
Exemple #24
0
    def execute(self):
        """Create grid.xml and uncertainty.xml files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        logger = logging.getLogger(__name__)
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('gridxml module can only function on '
                                      'gridded data, not sets of points')
        gridnames = container.getIMTs(COMPONENT)
        layers = {}
        field_keys = {}
        xml_types = ['grid', 'uncertainty']
        for xml_type in xml_types:
            for gridname in gridnames:
                imt_field = _oq_to_gridxml(gridname)
                imtdict = container.getIMTGrids(gridname, COMPONENT)
                if xml_type == 'grid':
                    grid = imtdict['mean']
                    metadata = imtdict['mean_metadata']
                elif xml_type == 'uncertainty':
                    grid = imtdict['mean']
                    metadata = imtdict['mean_metadata']

                units = metadata['units']
                digits = metadata['digits']
                grid_data = grid.getData()
                # convert from HDF units to legacy grid.xml units
                if units == 'ln(cm/s)':
                    grid_data = np.exp(grid_data)
                    units = 'cm/s'
                elif units == 'ln(g)':
                    grid_data = np.exp(grid_data) * 100
                    units = '%g'
                else:
                    pass
                layers[imt_field] = grid_data

                field_keys[imt_field] = (units, digits)
            geodict = grid.getGeoDict()

            config = container.getConfig()

            # event dictionary
            info_data = container.getString('info.json')
            info = json.loads(info_data)
            event_info = info['input']['event_information']
            event_dict = {}
            event_dict['event_id'] = event_info['event_id']
            event_dict['magnitude'] = float(event_info['magnitude'])
            event_dict['depth'] = float(event_info['depth'])
            event_dict['lat'] = float(event_info['latitude'])
            event_dict['lon'] = float(event_info['longitude'])
            event_dict['event_timestamp'] = datetime.strptime(
                event_info['origin_time'], TIMEFMT)
            event_dict['event_description'] = event_info['location']
            # TODO the following is SUPER-SKETCHY - we need to save the event
            # network info!!!
            event_dict['event_network'] = event_dict['event_id'][0:2]

            # shake dictionary
            shake_dict = {}
            shake_dict['event_id'] = event_dict['event_id']
            shake_dict['shakemap_id'] = event_dict['event_id']
            # TODO - where are we supposed to get shakemap version
            shake_dict['shakemap_version'] = 1
            shake_dict['code_version'] = shakemap.__version__
            shake_dict['process_timestamp'] = datetime.utcnow()
            shake_dict['shakemap_originator'] = config['system']['source_network']
            shake_dict['map_status'] = config['system']['map_status']
            # TODO - we need a source for this!!!
            shake_dict['shakemap_event_type'] = 'ACTUAL'

            shake_grid = ShakeGrid(
                layers, geodict, event_dict,
                shake_dict, {}, field_keys=field_keys)
            fname = os.path.join(datadir, '%s.xml' % xml_type)
            logger.info('Saving IMT grids to %s' % fname)
            shake_grid.save(fname)  # TODO - set grid version number
Exemple #25
0
def test_output_container():
    geodict = GeoDict.createDictFromBox(-118.5, -114.5, 32.1, 36.7, 0.01, 0.02)
    nrows, ncols = geodict.ny, geodict.nx

    # create MMI mean data for maximum component
    mean_mmi_maximum_data = np.random.rand(nrows, ncols)
    mean_mmi_maximum_metadata = {
        'name': 'Gandalf',
        'color': 'white',
        'powers': 'magic'
    }
    mean_mmi_maximum_grid = Grid2D(mean_mmi_maximum_data, geodict)

    # create MMI std data for maximum component
    std_mmi_maximum_data = mean_mmi_maximum_data / 10
    std_mmi_maximum_metadata = {
        'name': 'Legolas',
        'color': 'green',
        'powers': 'good hair'
    }
    std_mmi_maximum_grid = Grid2D(std_mmi_maximum_data, geodict)

    # create MMI mean data for rotd50 component
    mean_mmi_rotd50_data = np.random.rand(nrows, ncols)
    mean_mmi_rotd50_metadata = {
        'name': 'Gimli',
        'color': 'brown',
        'powers': 'axing'
    }
    mean_mmi_rotd50_grid = Grid2D(mean_mmi_rotd50_data, geodict)

    # create MMI std data for rotd50 component
    std_mmi_rotd50_data = mean_mmi_rotd50_data / 10
    std_mmi_rotd50_metadata = {
        'name': 'Aragorn',
        'color': 'white',
        'powers': 'scruffiness'
    }
    std_mmi_rotd50_grid = Grid2D(std_mmi_rotd50_data, geodict)

    # create PGA mean data for maximum component
    mean_pga_maximum_data = np.random.rand(nrows, ncols)
    mean_pga_maximum_metadata = {
        'name': 'Pippin',
        'color': 'purple',
        'powers': 'rashness'
    }
    mean_pga_maximum_grid = Grid2D(mean_pga_maximum_data, geodict)

    # create PGA std data for maximum component
    std_pga_maximum_data = mean_pga_maximum_data / 10
    std_pga_maximum_metadata = {
        'name': 'Merry',
        'color': 'grey',
        'powers': 'hunger'
    }
    std_pga_maximum_grid = Grid2D(std_pga_maximum_data, geodict)

    f, datafile = tempfile.mkstemp()
    os.close(f)
    try:
        container = ShakeMapOutputContainer.create(datafile)
        container.setIMTGrids('mmi',
                              mean_mmi_maximum_grid,
                              mean_mmi_maximum_metadata,
                              std_mmi_maximum_grid,
                              std_mmi_maximum_metadata,
                              component='maximum')
        container.setIMTGrids('mmi',
                              mean_mmi_rotd50_grid,
                              mean_mmi_rotd50_metadata,
                              std_mmi_rotd50_grid,
                              std_mmi_rotd50_metadata,
                              component='rotd50')
        container.setIMTGrids('pga',
                              mean_pga_maximum_grid,
                              mean_pga_maximum_metadata,
                              std_pga_maximum_grid,
                              std_pga_maximum_metadata,
                              component='maximum')

        # get the maximum MMI imt data
        mmi_max_dict = container.getIMTGrids('mmi', component='maximum')
        np.testing.assert_array_equal(mmi_max_dict['mean'].getData(),
                                      mean_mmi_maximum_data)
        np.testing.assert_array_equal(mmi_max_dict['std'].getData(),
                                      std_mmi_maximum_data)
        assert mmi_max_dict['mean_metadata'] == mean_mmi_maximum_metadata
        assert mmi_max_dict['std_metadata'] == std_mmi_maximum_metadata

        # get the rotd50 MMI imt data
        mmi_rot_dict = container.getIMTGrids('mmi', component='rotd50')
        np.testing.assert_array_equal(mmi_rot_dict['mean'].getData(),
                                      mean_mmi_rotd50_data)
        np.testing.assert_array_equal(mmi_rot_dict['std'].getData(),
                                      std_mmi_rotd50_data)
        assert mmi_rot_dict['mean_metadata'] == mean_mmi_rotd50_metadata
        assert mmi_rot_dict['std_metadata'] == std_mmi_rotd50_metadata

        # Check repr method
        assert repr(container) == '''Data type: grid
    use "getIMTGrids" method to access interpolated IMTs
Rupture: None
Config: None
Stations: None
Metadata: None
Available IMTs (components):
    mmi (maximum, rotd50)
    pga (maximum)
'''

        # get list of all imts
        imts = container.getIMTs()

        # get list of maximum imts
        max_imts = container.getIMTs(component='maximum')
        assert sorted(max_imts) == ['mmi', 'pga']

        # get list of components for mmi
        mmi_comps = container.getComponents('mmi')
        assert sorted(mmi_comps) == ['maximum', 'rotd50']

        # Test dropIMT
        imts = container.getIMTs('maximum')
        assert imts == ['mmi', 'pga']
        container.dropIMT('mmi')
        imts = container.getIMTs('maximum')
        assert imts == ['pga']
        container.close()

    except Exception as e:
        raise (e)
    finally:
        os.remove(datafile)
Exemple #26
0
def test_output_container():
    geodict = GeoDict.createDictFromBox(-118.5,-114.5,32.1,36.7,0.01,0.02)
    nrows,ncols = geodict.ny,geodict.nx

    #create MMI mean data for maximum component
    mean_mmi_maximum_data = np.random.rand(nrows,ncols)
    mean_mmi_maximum_metadata = {'name':'Gandalf',
                                 'color':'white',
                                 'powers':'magic'}
    mean_mmi_maximum_grid = Grid2D(mean_mmi_maximum_data,geodict)

    #create MMI std data for maximum component
    std_mmi_maximum_data = mean_mmi_maximum_data/10
    std_mmi_maximum_metadata = {'name':'Legolas',
                                'color':'green',
                                'powers':'good hair'}
    std_mmi_maximum_grid = Grid2D(std_mmi_maximum_data,geodict)

    #create MMI mean data for rotd50 component
    mean_mmi_rotd50_data = np.random.rand(nrows,ncols)
    mean_mmi_rotd50_metadata = {'name':'Gimli',
                                 'color':'brown',
                                 'powers':'axing'}
    mean_mmi_rotd50_grid = Grid2D(mean_mmi_rotd50_data,geodict)

    #create MMI std data for rotd50 component
    std_mmi_rotd50_data = mean_mmi_rotd50_data/10
    std_mmi_rotd50_metadata = {'name':'Aragorn',
                                'color':'white',
                                'powers':'scruffiness'}
    std_mmi_rotd50_grid = Grid2D(std_mmi_rotd50_data,geodict)

    #create PGA mean data for maximum component
    mean_pga_maximum_data = np.random.rand(nrows,ncols)
    mean_pga_maximum_metadata = {'name':'Pippin',
                                 'color':'purple',
                                 'powers':'rashness'}
    mean_pga_maximum_grid = Grid2D(mean_pga_maximum_data,geodict)

    #create PGA std data for maximum component
    std_pga_maximum_data = mean_pga_maximum_data/10
    std_pga_maximum_metadata = {'name':'Merry',
                                'color':'grey',
                                'powers':'hunger'}
    std_pga_maximum_grid = Grid2D(std_pga_maximum_data,geodict)

    f,datafile = tempfile.mkstemp()
    os.close(f)
    try:
        container = ShakeMapOutputContainer.create(datafile)
        container.setIMTGrids('mmi',
                         mean_mmi_maximum_grid,mean_mmi_maximum_metadata,
                         std_mmi_maximum_grid,std_mmi_maximum_metadata,
                         component='maximum')
        container.setIMTGrids('mmi',
                         mean_mmi_rotd50_grid,mean_mmi_rotd50_metadata,
                         std_mmi_rotd50_grid,std_mmi_rotd50_metadata,
                         component='rotd50')
        container.setIMTGrids('pga',
                         mean_pga_maximum_grid,mean_pga_maximum_metadata,
                         std_pga_maximum_grid,std_pga_maximum_metadata,
                         component='maximum')

        #get the maximum MMI imt data
        mmi_max_dict = container.getIMTGrids('mmi',component='maximum')
        np.testing.assert_array_equal(mmi_max_dict['mean'].getData(),
                                      mean_mmi_maximum_data)
        np.testing.assert_array_equal(mmi_max_dict['std'].getData(),
                                      std_mmi_maximum_data)
        assert mmi_max_dict['mean_metadata'] == mean_mmi_maximum_metadata
        assert mmi_max_dict['std_metadata'] == std_mmi_maximum_metadata

        #get the rotd50 MMI imt data
        mmi_rot_dict = container.getIMTGrids('mmi',component='rotd50')
        np.testing.assert_array_equal(mmi_rot_dict['mean'].getData(),
                                      mean_mmi_rotd50_data)
        np.testing.assert_array_equal(mmi_rot_dict['std'].getData(),
                                      std_mmi_rotd50_data)
        assert mmi_rot_dict['mean_metadata'] == mean_mmi_rotd50_metadata
        assert mmi_rot_dict['std_metadata'] == std_mmi_rotd50_metadata

        #get list of maximum imts
        max_imts = container.getIMTs(component='maximum')
        assert sorted(max_imts) == ['mmi','pga']

        #get list of components for mmi
        mmi_comps = container.getComponents('mmi')
        assert sorted(mmi_comps) == ['maximum','rotd50']
    except Exception as e:
        raise(e)
    finally:
        os.remove(datafile)
def test_products():

    installpath, datapath = get_config_paths()

    #
    # Use a real event for checks of products against the contents of
    # the output container
    #
    evid = 'integration_test_0001'
    try:
        #
        # Make sure an output file exists
        #
        assemble = AssembleModule(evid, comment='Test comment.')
        assemble.execute()
        del assemble
        model = ModelModule(evid)
        model.execute()
        del model

        res_file = os.path.join(datapath, evid, 'current', 'products',
                                'shake_result.hdf')
        oc = ShakeMapOutputContainer.load(res_file)

        #
        # Test the creation of products -- currently not checking results
        # for validity or consistency, but probably should
        #

        #
        # TODO: The stationlist.json should be validated, but we need a
        # function that will read it and convert it to something
        # we can test against.
        #
        check_failures(evid, datapath, StationModule)
        mod = StationModule(evid)
        mod.execute()
        mod.writeContents()

        check_failures(evid, datapath, MappingModule)
        mod = MappingModule(evid)
        mod.execute()
        mod.writeContents()

        check_failures(evid, datapath, PlotRegr)
        #
        # PlotRegr gets tested in the model tests for event 72282711
        #
#        mod = PlotRegr(evid)
#        mod.execute()
#        mod.writeContents()

        check_failures(evid, datapath, KMLModule)
        mod = KMLModule(evid)
        mod.execute()
        mod.writeContents()

        del mod

        #
        # These check that the results are consistent with the output
        # container
        #
        do_rupture(evid, datapath, oc)

        do_info(evid, datapath, oc)

        do_raster(evid, datapath, oc)

        do_gridxml(evid, datapath, oc)

        oc.close()
        #
        # Checks contours against saved versions; if something
        # changes, will need to update the files in
        # data/integration_test_0001
        #
        do_contour(evid, datapath)
#        do_contour_command_line(evid, datapath)

        check_failures(evid, datapath, TransferModule)
        mod = TransferModule(evid)
        mod.execute()
        bufiles = glob.glob(os.path.join(datapath, evid, 'backup*'))
        for bufile in bufiles:
            shutil.rmtree(bufile)

    finally:
        pass
        data_file = os.path.join(datapath, evid, 'current', 'shake_data.hdf')
        if os.path.isfile(data_file):
            os.remove(data_file)
Exemple #28
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datalist = []
        stddevlist = []
        periodlist = []
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            if not myimt.startswith('SA('):
                continue
            ddict = container.getIMTArrays(myimt, 'GREATER_OF_TWO_HORIZONTAL')
            datalist.append(ddict['mean'][0])
            stddevlist.append(ddict['std'][0])
            periodlist.append(float(myimt.replace('SA(', '').replace(')', '')))
            self.logger.debug(myimt, datalist[-1])
        datalist = np.array(datalist)
        stddevlist = np.array(stddevlist)
        periodlist = np.array(periodlist)
        indxx = np.argsort(periodlist)

        container.close()

        #
        # Make plots
        #
        fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
        plt.subplots_adjust(hspace=0.1)
        axa[0].semilogx(periodlist[indxx],
                        datalist[indxx],
                        color='k',
                        label='mean')
        axa[0].semilogx(periodlist[indxx],
                        datalist[indxx] + stddevlist[indxx],
                        '--b',
                        label='mean +/- stddev')
        axa[0].semilogx(periodlist[indxx], datalist[indxx] - stddevlist[indxx],
                        '--b')
        axa[1].semilogx(periodlist[indxx],
                        stddevlist[indxx],
                        '-.r',
                        label='stddev')
        axa[1].set_xlabel('Period (s)')
        axa[0].set_ylabel('Mean ln(SA) (g)')
        axa[1].set_ylabel('Stddev ln(SA) (g)')
        axa[0].legend(loc='best')
        axa[1].legend(loc='best')
        axa[0].set_title(self._eventid)
        axa[0].grid()
        axa[1].grid()
        axa[1].set_ylim(ymin=0)
        pfile = os.path.join(datadir, self._eventid + '_spectra_plot.pdf')
        plt.savefig(pfile)
        pfile = os.path.join(datadir, self._eventid + '_spectra_plot.png')
        plt.savefig(pfile)
        plt.close()
Exemple #29
0
def test_output_arrays():

    f, datafile = tempfile.mkstemp()
    os.close(f)

    try:
        container = ShakeMapOutputContainer.create(datafile)
        #
        # Test that no data type is set
        #
        assert container.getDataType() is None

        #
        # Make some array data and metadata
        #
        mean = np.random.rand(100)
        std = np.random.rand(100)
        lats = np.random.rand(100)
        lons = np.random.rand(100)
        ids = np.array([randomword(4).encode('ascii') for x in range(100)])
        metadata = {'units': '%g',
                    'digits': 4}
        #
        # Put the data in the container
        #
        container.setIMTArrays('PGA', lons, lats, ids,
                               mean, metadata,
                               std, metadata, 'Larger')
        #
        # Now extract it and compare it to what we put in there
        #
        dout = container.getIMTArrays('PGA', 'Larger')
        assert all(dout['lons'] == lons)
        assert all(dout['lats'] == lats)
        assert all(dout['ids'] == ids)
        assert all(dout['mean'] == mean)
        assert all(dout['std'] == std)
        #
        # Check the data type
        #
        assert container.getDataType() == 'points'
        #
        # Try raising some exceptions
        #
        # Shouldn't be able to find this IMT
        with pytest.raises(LookupError):
            junk = container.getIMTArrays('JUNK', 'Larger')
        # Shapes of inputs not the same
        with pytest.raises(ValueError):
            empty = np.array([])
            container.setIMTArrays('PGV', empty, lats, ids,
                                   mean, metadata,
                                   std, metadata, 'Larger')
        # IMT already exists
        with pytest.raises(ValueError):
            container.setIMTArrays('PGA', lons, lats, ids,
                                   mean, metadata,
                                   std, metadata, 'Larger')
        # Trying to set a grid in a file with points
        with pytest.raises(TypeError):
            container.setIMTGrids('PGV', mean, metadata,
                                  std, metadata, 'Larger')
        # Trying to get a grid in a file with points
        with pytest.raises(TypeError):
            container.getIMTGrids('PGA', 'Larger')

    except Exception as e:
        raise(e)
    finally:
        os.remove(datafile)
Exemple #30
0
def test_output_arrays():

    f, datafile = tempfile.mkstemp()
    os.close(f)

    try:
        container = ShakeMapOutputContainer.create(datafile)
        #
        # Test that no data type is set
        #
        assert container.getDataType() is None

        #
        # Make some array data and metadata
        #
        mean = np.random.rand(100)
        std = np.random.rand(100)
        lats = np.random.rand(100)
        lons = np.random.rand(100)
        ids = np.array([randomword(4).encode('ascii') for x in range(100)])
        metadata = {'units': '%g', 'digits': 4}
        #
        # Put the data in the container
        #
        container.setIMTArrays('PGA', lons, lats, ids, mean, metadata, std,
                               metadata, 'Larger')
        #
        # Now extract it and compare it to what we put in there
        #
        dout = container.getIMTArrays('PGA', 'Larger')
        assert all(dout['lons'] == lons)
        assert all(dout['lats'] == lats)
        assert all(dout['ids'] == ids)
        assert all(dout['mean'] == mean)
        assert all(dout['std'] == std)
        #
        # Check the data type
        #
        assert container.getDataType() == 'points'
        #
        # Try raising some exceptions
        #
        # Shouldn't be able to find this IMT
        with pytest.raises(LookupError):
            container.getIMTArrays('JUNK', 'Larger')
        # Shapes of inputs not the same
        with pytest.raises(ValueError):
            empty = np.array([])
            container.setIMTArrays('PGV', empty, lats, ids, mean, metadata,
                                   std, metadata, 'Larger')
        # IMT already exists
        with pytest.raises(ValueError):
            container.setIMTArrays('PGA', lons, lats, ids, mean, metadata, std,
                                   metadata, 'Larger')
        # Trying to set a grid in a file with points
        with pytest.raises(TypeError):
            container.setIMTGrids('PGV', mean, metadata, std, metadata,
                                  'Larger')
        # Trying to get a grid in a file with points
        with pytest.raises(TypeError):
            container.getIMTGrids('PGA', 'Larger')

        container.close()

    except Exception as e:
        raise (e)
    finally:
        os.remove(datafile)
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('xtestimage module can only operate on '
                                      'gridded data not sets of points')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTGrids(
                myimt, 'GREATER_OF_TWO_HORIZONTAL')

        #
        # Make plots
        #
        for myimt in imtlist:
            if myimt == 'MMI':
                yunits = '(MMI)'
            elif myimt == 'PGV':
                yunits = '[ln(cm/s)]'
            else:
                yunits = '[ln(g)]'

            fileimt = oq_to_file(myimt)

            #
            # Do the ground motion plots
            #
            data = datadict[myimt]
            gridobj = data['mean']
            grddata = gridobj.getData()
            metadata = gridobj.getGeoDict().asDict()

            fig = plt.figure(figsize=(10, 10))
            gs = plt.GridSpec(4, 4, hspace=0.2, wspace=0.1)
            ax0 = fig.add_subplot(gs[:-1, 1:])
            plt.title(self._eventid + ': ' + myimt + ' mean')
            im1 = ax0.imshow(grddata,
                             extent=(metadata['xmin'], metadata['xmax'],
                                     metadata['ymin'], metadata['ymax']))
            cbax = fig.add_axes([0.915, .34, .02, .5])
            plt.colorbar(im1, ax=ax0, cax=cbax)
            ycut = fig.add_subplot(gs[:-1, 0], sharey=ax0)
            xcut = fig.add_subplot(gs[-1, 1:], sharex=ax0)
            rows, cols = grddata.shape
            midrow = int(rows / 2)
            midcol = int(cols / 2)
            xvals = np.linspace(metadata['xmin'], metadata['xmax'], cols)
            yvals = np.linspace(metadata['ymin'], metadata['ymax'], rows)
            ycut.plot(grddata[:, midcol], yvals)
            xcut.plot(xvals, grddata[midrow, :])
            ycut.set(xlabel=myimt + ' ' + yunits, ylabel='Latitude')
            xcut.set(xlabel='Longitude', ylabel=myimt + ' ' + yunits)
            ycut.set_ylim((metadata['ymin'], metadata['ymax']))
            xcut.set_xlim((metadata['xmin'], metadata['xmax']))
            ax0.label_outer()

            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile, bbox_inches='tight')
            plt.close()

            #
            # Do the stddev plots
            #
            gridobj = data['std']
            grddata = gridobj.getData()

            fig = plt.figure(figsize=(10, 10))
            gs = plt.GridSpec(4, 4, hspace=0.2, wspace=0.1)
            ax0 = fig.add_subplot(gs[:-1, 1:])
            plt.title(self._eventid + ': ' + myimt + ' stddev')
            im1 = ax0.imshow(grddata,
                             extent=(metadata['xmin'], metadata['xmax'],
                                     metadata['ymin'], metadata['ymax']))
            cbax = fig.add_axes([0.915, .34, .02, .5])
            plt.colorbar(im1, ax=ax0, cax=cbax)
            ycut = fig.add_subplot(gs[:-1, 0], sharey=ax0)
            xcut = fig.add_subplot(gs[-1, 1:], sharex=ax0)
            rows, cols = grddata.shape
            midrow = int(rows / 2)
            midcol = int(cols / 2)
            xvals = np.linspace(metadata['xmin'], metadata['xmax'], cols)
            yvals = np.linspace(metadata['ymin'], metadata['ymax'], rows)
            ycut.plot(grddata[:, midcol], yvals)
            xcut.plot(xvals, grddata[midrow, :])
            ycut.set(xlabel='stddev ' + yunits, ylabel='Latitude')
            xcut.set(xlabel='Longitude', ylabel='stddev ' + yunits)
            xcut.set_xlim((metadata['xmin'], metadata['xmax']))
            xcut.set_ylim(bottom=0, top=np.max(grddata[midrow, :]) * 1.1)
            ycut.set_xlim(left=0, right=np.max(grddata[:, midcol] * 1.1))
            ycut.set_ylim((metadata['ymin'], metadata['ymax']))
            ax0.label_outer()

            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '_sd.pdf')
            plt.savefig(pfile, bbox_inches='tight')
            plt.close()
Exemple #32
0
    def execute(self):
        """Create grid.xml and uncertainty.xml files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        logger = logging.getLogger(__name__)
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('gridxml module can only function on '
                                      'gridded data, not sets of points')
        gridnames = container.getIMTs(COMPONENT)
        xml_types = ['grid', 'uncertainty']
        for xml_type in xml_types:
            layers = OrderedDict()
            field_keys = OrderedDict()
            for gridname in gridnames:
                imt_field = _oq_to_gridxml(gridname)
                imtdict = container.getIMTGrids(gridname, COMPONENT)
                if xml_type == 'grid':
                    grid = imtdict['mean']
                    metadata = imtdict['mean_metadata']
                elif xml_type == 'uncertainty':
                    grid = imtdict['std']
                    metadata = imtdict['std_metadata']

                units = metadata['units']
                digits = metadata['digits']
                grid_data = grid.getData()
                # convert from HDF units to legacy grid.xml units
                if xml_type == 'grid':
                    if units == 'ln(cm/s)':
                        grid_data = np.exp(grid_data)
                        units = 'cm/s'
                    elif units == 'ln(g)':
                        grid_data = np.exp(grid_data) * 100
                        units = '%g'
                    else:
                        pass

                if xml_type == 'grid':
                    layers[imt_field] = grid_data
                    field_keys[imt_field] = (units, digits)
                else:
                    layers['STD' + imt_field] = grid_data
                    field_keys['STD' + imt_field] = (units, digits)

            geodict = grid.getGeoDict()

            config = container.getConfig()

            # event dictionary
            info = container.getMetadata()
            event_info = info['input']['event_information']
            event_dict = {}
            event_dict['event_id'] = event_info['event_id']
            event_dict['magnitude'] = float(event_info['magnitude'])
            event_dict['depth'] = float(event_info['depth'])
            event_dict['lat'] = float(event_info['latitude'])
            event_dict['lon'] = float(event_info['longitude'])
            event_dict['event_timestamp'] = datetime.strptime(
                event_info['origin_time'], TIMEFMT)
            event_dict['event_description'] = event_info['location']
            event_dict['event_network'] = \
                info['input']['event_information']['eventsource']

            # shake dictionary
            shake_dict = {}
            shake_dict['event_id'] = event_dict['event_id']
            shake_dict['shakemap_id'] = event_dict['event_id']
            shake_dict['shakemap_version'] = \
                info['processing']['shakemap_versions']['map_version']
            shake_dict['code_version'] = shakemap.__version__
            ptime = info['processing']['shakemap_versions']['process_time']
            shake_dict['process_timestamp'] = datetime.strptime(ptime, TIMEFMT)
            shake_dict['shakemap_originator'] = \
                config['system']['source_network']
            shake_dict['map_status'] = config['system']['map_status']
            shake_dict['shakemap_event_type'] = 'ACTUAL'
            if event_dict['event_id'].endswith('_se'):
                shake_dict['shakemap_event_type'] = 'SCENARIO'

            shake_grid = ShakeGrid(
                layers, geodict, event_dict,
                shake_dict, {}, field_keys=field_keys)
            fname = os.path.join(datadir, '%s.xml' % xml_type)
            logger.debug('Saving IMT grids to %s' % fname)
            shake_grid.save(fname)  # TODO - set grid version number

        container.close()
Exemple #33
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datalist = []
        stddevlist = []
        periodlist = []
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            if not myimt.startswith('SA('):
                continue
            ddict = container.getIMTArrays(myimt, 'GREATER_OF_TWO_HORIZONTAL')
            datalist.append(ddict['mean'][0])
            stddevlist.append(ddict['std'][0])
            periodlist.append(float(myimt.replace('SA(', '').replace(')', '')))
            self.logger.debug(myimt, datalist[-1])
        datalist = np.array(datalist)
        stddevlist = np.array(stddevlist)
        periodlist = np.array(periodlist)
        indxx = np.argsort(periodlist)

        #
        # Make plots
        #
        fig = plt.figure(figsize=(10, 8))
        plt.semilogx(periodlist[indxx],
                 datalist[indxx], 
                 color='k', label='mean')
        plt.semilogx(periodlist[indxx],
                 datalist[indxx] + stddevlist[indxx], 
                 '--b', label='mean +/- stddev')
        plt.semilogx(periodlist[indxx],
                 datalist[indxx] - stddevlist[indxx], 
                 '--b')
        plt.semilogx(periodlist[indxx],
                 stddevlist[indxx], 
                 '-.r', label='stddev')
        plt.xlabel('Period (s)')
        plt.ylabel('ln(SA) (g)')
        plt.legend(loc='best')
        plt.title(self._eventid)
        plt.grid()
        pfile = os.path.join(datadir, self._eventid + '_spectra_plot.pdf')
        plt.savefig(pfile)