Beispiel #1
0
    def execute(self):
        """Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create contour files
        self.logger.info('Contouring to files...')
        contour_to_files(container, config, datadir, self.logger)
Beispiel #2
0
    def execute(self):
        """
        Create KML files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('kml module can only contour '
                                      'gridded data, not sets of points')

        # call create_kmz function
        create_kmz(container, datadir, self.logger, self.contents)

        container.close()
Beispiel #3
0
    def execute(self):
        """Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create contour files
        self.logger.info('Contouring to files...')
        contour_to_files(container, config, datadir, self.logger)
Beispiel #4
0
    def execute(self):
        """Write stationlist.json file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # create ShakeMap station data file
        for fformat in ALLOWED_FORMATS:
            if fformat == 'json':
                self.logger.debug('Writing rupture.json file...')
                station_dict = container.getStationDict()
                station_file = os.path.join(datadir, 'stationlist.json')
                f = open(station_file, 'w')
                json.dump(station_dict, f)
                f.close()

        container.close()
Beispiel #5
0
    def execute(self):
        """
        Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('contour module can only contour '
                                      'gridded data, not sets of points')

        # create contour files
        self.logger.debug('Contouring to files...')
        contour_to_files(container, datadir, self.logger, self.filter_size)
Beispiel #6
0
def test_masking():
    install_path, data_path = get_config_paths()
    event_path = os.path.join(data_path, 'masking_test', 'current')
    set_files(
        event_path, {
            'event.xml': 'event.xml',
            'model.conf': 'model.conf',
            'au_continental_shelf.geojson': 'au_continental_shelf.geojson',
        })
    assemble = AssembleModule('masking_test', comment='Test comment.')
    assemble.execute()
    model = ModelModule('masking_test')
    model.execute()
    clear_files(event_path)
    hdf_file = os.path.join(event_path, 'products', 'shake_result.hdf')
    oc = ShakeMapOutputContainer.load(hdf_file)
    sa3 = oc.getIMTGrids('SA(3.0)', 'GREATER_OF_TWO_HORIZONTAL')['mean']
    removed = np.isnan(sa3).astype(int)
    assert (removed[240, 240] == 1)
    assert (removed[260, 240] == 0)
    np.testing.assert_equal(
        removed[::100, ::100],
        [[1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 0, 1],
         [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]])
    oc.close()
Beispiel #7
0
    def execute(self):
        """
        Write info.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            os.makedirs(datadir)

        # try to find the event by our event id
        try:
            detail = get_event_by_id(self._eventid)
            dataframe, msg = _get_dyfi_dataframe(detail)
        except Exception as e:
            fmt = 'Could not retrieve DYFI data for %s - error "%s"'
            self.logger.warning(fmt % (self._eventid, str(e)))
            return

        if dataframe is None:
            self.logger.info(msg)
            return

        reference = 'USGS Did You Feel It? System'
        xmlfile = os.path.join(datadir, 'dyfi_dat.xml')
        dataframe_to_xml(dataframe, xmlfile, reference)
        self.logger.info('Wrote %i DYFI records to %s' %
                         (len(dataframe), xmlfile))
Beispiel #8
0
def test_directivity():

    #
    # Turned on directivity in model.conf
    #
    install_path, data_path = get_config_paths()
    event_path = os.path.join(data_path, 'directivity_test', 'current')
    set_files(
        event_path, {
            'event.xml': 'event.xml',
            'model.conf': 'model.conf',
            'dir_fault.txt': 'dir_fault.txt'
        })
    assemble = AssembleModule('directivity_test', comment='Test comment.')
    assemble.execute()
    model = ModelModule('directivity_test')
    model.execute()
    clear_files(event_path)
    hdf_file = os.path.join(event_path, 'products', 'shake_result.hdf')
    oc = ShakeMapOutputContainer.load(hdf_file)
    sa3 = np.exp(
        oc.getIMTGrids('SA(3.0)', 'GREATER_OF_TWO_HORIZONTAL')['mean'])
    # np.testing.assert_allclose(np.max(sa3), 1.15864273)
    np.testing.assert_allclose(np.max(sa3), 1.1567265149442174)
    # np.testing.assert_allclose(np.min(sa3), 0.9278920)
    np.testing.assert_allclose(np.min(sa3), 0.88508818541678)
    oc.close()
Beispiel #9
0
def migrate_gmpe(old_gmpe, config=None):
    """Return the GMPE that should be used to replace SM3.5 GMPE

    By default, this uses the migrate.conf file found in the ShakeMap
    repository. Users can optionally pass in their own config.

    Args:
        old_gmpe (str):
            ShakeMap 3.5 GMPE string
        config (dict):
            Input configobj dict or None.

    Returns:
        tuple: New GMPE string, and GMPE reference string.
    """
    if config is None:
        install_path, _ = get_config_paths()
        if not os.path.isdir(install_path):
            raise OSError('%s is not a valid directory.' % install_path)
        config_file = os.path.join(install_path, 'config', 'migrate.conf')
        if os.path.isfile(config_file):
            config = ConfigObj(config_file)
        else:
            raise OSError('%s not found.' % config_file)
    if old_gmpe not in config['modules']:
        raise KeyError('ShakeMap 3.5 GMPE %s not found in migrate.conf.' %
                       old_gmpe)
    new_gmpe = config['modules'][old_gmpe]['openquake']
    reference = config['modules'][old_gmpe]['reference']
    return (new_gmpe, reference)
Beispiel #10
0
    def execute(self):
        """
        Write ugroundmotion_dat.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            os.makedirs(datadir)

        # try to find the event by our event id
        try:
            detail = get_event_by_id(self._eventid, host=HOST)
            if not detail.hasProduct('ground-motion'):
                return
            groundmotion = detail.getProducts('ground-motion')[0]
            fname = 'groundmotions_dat.json'
            gbytes, gurl = groundmotion.getContentBytes(fname)
            outname = os.path.join(datadir, 'ugroundmotions_dat.json')
            with open(outname, 'wt') as f:
                f.write(gbytes.decode('utf-8'))
            self.logger.info('Created ground motions data file %s' % outname)
        except Exception as e:
            fmt = 'Could not retrieve ground motion data for %s - error "%s"'
            self.logger.warning(fmt % (self._eventid, str(e)))
            return
Beispiel #11
0
    def execute(self):
        """
        Create KML files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('kml module can only contour '
                                      'gridded data, not sets of points')

        # find the low res ocean vector dataset
        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        pconfig = configobj.ConfigObj(product_config_file)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']
        oceanfile = path_macro_sub(oceanfile, install_path, data_path)

        # call create_kmz function
        create_kmz(container, datadir, oceanfile, self.logger)
Beispiel #12
0
    def execute(self):
        """
        Write rupture.json file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # create ShakeMap rupture file
        for fformat in ALLOWED_FORMATS:
            if fformat == 'json':
                self.logger.info('Writing rupture.json file...')
                rupture_dict = container.getRuptureDict()
                rupture_file = os.path.join(datadir, 'rupture.json')
                f = open(rupture_file, 'w')
                json.dump(rupture_dict, f)
                f.close()

        container.close()

        self.contents.addFile('ruptureJSON', 'Fault Rupture',
                              'JSON Representation of Fault Rupture.',
                              'rupture.json', 'application/json')
Beispiel #13
0
def test_model_2():

    #
    # This is a small grid with station data and dyfi data (should succeed)
    #
    install_path, data_path = get_config_paths()
    event_path = os.path.join(data_path, 'nc72282711', 'current')
    set_files(
        event_path, {
            'event.xml': 'event.xml',
            'stationlist.xml.small': 'stationlist.xml',
            'dyfi_dat.xml.small': 'dyfi_dat.xml',
            'model.conf': 'model.conf',
            'boat_fault.txt': 'boat_fault.txt'
        })
    assemble = AssembleModule('nc72282711', comment='Test comment.')
    assemble.execute()
    model = ModelModule('nc72282711')
    model.execute()
    #
    # Since we've done this, we might as well run plotregr, too
    #
    plotregr = PlotRegr('nc72282711')
    plotregr.execute()
    plotregr.writeContents()
    clear_files(event_path)
Beispiel #14
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTArrays(
                myimt, 'GREATER_OF_TWO_HORIZONTAL')
        container.close()

        #
        # Make plots
        #
        for myimt in imtlist:
            data = datadict[myimt]
            fig, axa = plt.subplots(2, sharex=True, figsize=(10, 8))
            plt.subplots_adjust(hspace=0.1)
            axa[0].plot(data['lons'], data['mean'], color='k', label='mean')
            axa[0].plot(data['lons'],
                        data['mean'] + data['std'],
                        '--b',
                        label='mean +/- stddev')
            axa[0].plot(data['lons'], data['mean'] - data['std'], '--b')
            axa[1].plot(data['lons'], data['std'], '-.r', label='stddev')
            plt.xlabel('Longitude')
            axa[0].set_ylabel('Mean ln(%s) (g)' % myimt)
            axa[1].set_ylabel('Stddev ln(%s) (g)' % myimt)
            axa[0].legend(loc='best')
            axa[1].legend(loc='best')
            axa[0].set_title(self._eventid)
            axa[0].grid()
            axa[1].grid()
            axa[1].set_ylim(bottom=0)
            fileimt = oq_to_file(myimt)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.pdf')
            plt.savefig(pfile)
            pfile = os.path.join(datadir,
                                 self._eventid + '_' + fileimt + '.png')
            plt.savefig(pfile)
            plt.close()
Beispiel #15
0
    def execute(self):
        """
        Output the version history of an event.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        backups = glob.glob(os.path.join(data_path, self._eventid, 'backup*'))
        backups.sort(reverse=True)
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        # First try the current results file...
        datafile = os.path.join(datadir, 'products', 'shake_result.hdf')
        if os.path.isfile(datafile):
            # Open the ShakeMapOutputContainer and extract the data
            container = ShakeMapOutputContainer.load(datafile)
            try:
                metadata = container.getMetadata()
            except LookupError:
                print("\nNo version history available for this event.\n")
                return
            history = (metadata['processing']['shakemap_versions']
                       ['map_data_history'])
            final = False
            if len(backups) > 0:
                last_ver = int(backups[0][-4:])
                last_hist = history[-1][2]
                if last_ver == last_hist:
                    final = True
            print_history(history, final=final)
            return

        # Nope. Are there any backup files?
        if len(backups) == 0:
            print("\nNo version history available for this event.\n")
            return

        # There should be a results file in the backup directory...
        datafile = os.path.join(data_path, self._eventid, backups[0],
                                'products', 'shake_result.hdf')
        if os.path.isfile(datafile):
            # Open the ShakeMapOutputContainer and extract the data
            container = ShakeMapOutputContainer.load(datafile)
            try:
                metadata = container.getMetadata()
            except LookupError:
                print("\nNo version history available for this event.\n")
                return
            history = (metadata['processing']['shakemap_versions']
                       ['map_data_history'])
            print_history(history, final=True)
            return

        print("\nNo version history available for this event.\n")
        return
Beispiel #16
0
def test_get_logger():
    install_path, _ = get_config_paths()
    logpath = os.path.join(install_path, 'logs')
    logger = queue.get_logger(logpath, False)
    logger.info('Testing the logger')
    fd = open(os.path.join(logpath, 'queue.log'), 'r')
    lines = fd.readlines()
    fd.close()
    assert 'Testing the logger' in lines[-1]
Beispiel #17
0
def test_magnitude_too_small():
    install_path, _ = get_config_paths()
    config = queue.get_config(install_path)
    assert queue.magnitude_too_small(2.0, -118.25, 34.0, config) is True
    assert queue.magnitude_too_small(3.6, -118.25, 34.0, config) is False
    assert queue.magnitude_too_small(2.0, -119.25, 35.0, config) is True
    assert queue.magnitude_too_small(3.9, -119.25, 34.6, config) is False
    assert queue.magnitude_too_small(2.0, -129.25, 39.0, config) is True
    assert queue.magnitude_too_small(4.1, -129.25, 39.0, config) is False
Beispiel #18
0
    def execute(self):
        """
        Cancel ShakeMap products using methods configured in transfer.conf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        products_dir = os.path.join(datadir, 'products')
        if not os.path.isdir(products_dir):
            raise NotADirectoryError('%s does not exist.' % products_dir)

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(
                install_path, 'config', 'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        config = ConfigObj(transfer_conf, configspec=configspec)

        # get the output container with all the things in it
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # call the transfer method
        self.logger.info('Sending cancel message...')
        _transfer(config, container, products_dir, cancel=True)

        # Create a file called CANCEL in the data directory. The
        # shake program will look for this and not run if present.
        self.logger.info('Creating cancel file...')
        cancelfile = os.path.join(datadir, 'CANCEL')
        with open(cancelfile, 'wt') as cfile:
            cfile.write('Event cancelled at %s\n' %
                        datetime.utcnow().strftime(TIMEFMT))

        # delete the event from the database
        handler = AmplitudeHandler(install_path, data_path)
        handler.deleteEvent(self._eventid)
        container.close()
Beispiel #19
0
def test_model_1():

    installpath, datapath = get_config_paths()
    #
    # This is Northridge for a set of output points (not a grid)
    #
    assemble = AssembleModule('northridge_points', comment='Test comment.')
    assemble.execute()
    model = ModelModule('northridge_points')
    model.execute()
Beispiel #20
0
    def execute(self):
        """
        Tranfer ShakeMap products using methods configured in transfer.conf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        # look for the presence of a NO_TRANSFER file in the datadir.
        notransfer = os.path.join(datadir, NO_TRANSFER)
        if os.path.isfile(notransfer):
            self.logger.info(
                'Event has a %s file blocking transfer.' % NO_TRANSFER)
            return

        products_dir = os.path.join(datadir, 'products')
        if not os.path.isdir(products_dir):
            raise NotADirectoryError('%s does not exist.' % products_dir)

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(
                install_path, 'config', 'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        config = ConfigObj(transfer_conf, configspec=configspec)

        # get the output container with all the things in it
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # call the transfer method
        _transfer(config, container, products_dir)

        # copy the current folder to a new backup directory
        self._make_backup(data_path)

        container.close()
Beispiel #21
0
def test_model_sim():

    #
    # Run with no data and no fault, and use the default extent.
    #
    install_path, data_path = get_config_paths()
    # event_path = os.path.join(data_path, 'planet9', 'current')
    assemble = AssembleModule('planet9', comment='Test comment.')
    assemble.execute()
    model = ModelModule('planet9')
    model.execute()
Beispiel #22
0
def test_event_queue():
    install_path, _ = get_config_paths()

    db_file = os.path.join(install_path, 'data', 'event_queue.db')
    if os.path.isfile(db_file):
        os.remove(db_file)

    eq = EventQueue(install_path)

    events = eq.getQueuedEvents()
    assert len(events) == 0
    eq.queueEvent('firstevent', ['This', 'is', 'the', 'first', 'event'], 6)
    eq.queueEvent('secondevent', ['This', 'is', 'the', 'second', 'event'], 4.5)
    eq.queueEvent('thirdevent', ['This', 'is', 'the', 'third', 'event'], 6.6)
    events = eq.getQueuedEvents()
    assert events[0][0] == 'thirdevent'
    assert events[0][1] == ['This', 'is', 'the', 'third', 'event']
    assert events[1][0] == 'firstevent'
    assert events[1][1] == ['This', 'is', 'the', 'first', 'event']
    assert events[2][0] == 'secondevent'
    assert events[2][1] == ['This', 'is', 'the', 'second', 'event']
    eq.dequeueEvent('firstevent')
    events = eq.getQueuedEvents()
    assert events[0][0] == 'thirdevent'
    assert events[0][1] == ['This', 'is', 'the', 'third', 'event']
    assert events[1][0] == 'secondevent'
    assert events[1][1] == ['This', 'is', 'the', 'second', 'event']
    eq.dequeueEvent('thirdevent')
    eq.dequeueEvent('secondevent')
    events = eq.getQueuedEvents()
    assert len(events) == 0

    events = eq.getRunningEvents()
    assert len(events) == 0
    eq.insertRunningEvent('firstevent',
                          ['This', 'is', 'the', 'first', 'event'])
    eq.insertRunningEvent('secondevent',
                          ['This', 'is', 'the', 'second', 'event'])
    events = eq.getRunningEvents()
    assert len(events) == 2
    assert events.index(
        ('firstevent', ['This', 'is', 'the', 'first', 'event'])) >= 0
    assert events.index(
        ('secondevent', ['This', 'is', 'the', 'second', 'event'])) >= 0
    eq.deleteRunningEvent('firstevent')
    events = eq.getRunningEvents()
    assert len(events) == 1
    eq.deleteRunningEvent('secondevent')
    events = eq.getRunningEvents()
    assert len(events) == 0

    db_file = eq.db_file
    del eq
    os.remove(db_file)
Beispiel #23
0
def test_create_kmz():
    tempdir = tempfile.mkdtemp()
    try:
        homedir = os.path.dirname(os.path.abspath(__file__))
        cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                             'northridge', 'shake_result.hdf')
        container = ShakeMapOutputContainer.load(cfile)
        install_path, data_path = get_config_paths()

        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        pconfig = configobj.ConfigObj(product_config_file,
                                      configspec=spec_file)
        results = pconfig.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(pconfig, results)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']

        logger = logging.getLogger(__name__)
        kmzfile = create_kmz(container, tempdir, oceanfile, logger)
        myzip = zipfile.ZipFile(kmzfile, mode='r')
        kmlstr = myzip.read('shakemap.kml').decode('utf-8')
        root = minidom.parseString(kmlstr)
        document = root.getElementsByTagName('Document')[0]
        folders = document.getElementsByTagName('Folder')
        names = []
        nstations = 0
        nmmi = 0
        for folder in folders:
            name = folder.getElementsByTagName('name')[0].firstChild.data
            names.append(name)
            if name == 'Instrumented Stations':
                nstations = len(folder.getElementsByTagName('Placemark'))
            elif name == 'Macroseismic Stations':
                nmmi = len(folder.getElementsByTagName('Placemark'))
        assert sorted(names) == [
            'Contours', 'Instrumented Stations', 'MMI 4 Polygons',
            'MMI 5 Polygons', 'MMI 6 Polygons', 'MMI 7 Polygons',
            'MMI 8 Polygons', 'MMI 8.5 Polygons', 'MMI Contours', 'MMI Labels',
            'MMI Polygons', 'Macroseismic Stations', 'PGA Contours',
            'PGV Contours', 'SA(0.3) Contours', 'SA(1.0) Contours',
            'SA(3.0) Contours'
        ]
        assert nstations == 185
        assert nmmi == 547
        myzip.close()

    except Exception as e:
        print(str(e))
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Beispiel #24
0
    def execute(self):
        install_path, data_path = get_config_paths()
        self.datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(self.datadir):
            raise NotADirectoryError('%s is not a valid directory.' %
                                     self.datadir)

        # look for the presence of a NO_TRANSFER file in the datadir.
        notransfer = os.path.join(self.datadir, NO_TRANSFER)
        if os.path.isfile(notransfer):
            self.logger.info('Event has a %s file blocking transfer.' %
                             NO_TRANSFER)
            return

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(self.datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(install_path, 'config',
                                         'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        self.config = ConfigObj(transfer_conf, configspec=configspec)
        results = self.config.validate(Validator())
        if not isinstance(results, bool) or not results:
            config_error(self.config, results)

        # get the output container with all the things in it
        products_dir = os.path.join(self.datadir, 'products')
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        # extract the info.json object from the container
        self.info = container.getMetadata()
        container.close()

        # check for the presence of a .saved file. If found, do nothing.
        # Otherwise, create the backup directory.
        save_file = os.path.join(self.datadir, SAVE_FILE)
        if not os.path.isfile(save_file):
            logging.info('Making backup directory...')
            self._make_backup(data_path)
            with open(save_file, 'wt') as f:
                tnow = datetime.utcnow().strftime(constants.TIMEFMT)
                f.write('Saved %s by %s\n' % (tnow, self.command_name))
Beispiel #25
0
def test_verification_0005():
    installpath, datapath = get_config_paths()
    evid = 'verification_test_0005'
    try:
        imtdict = run_event(evid)
        assert np.allclose(np.zeros_like(imtdict['mean']), imtdict['mean'])
        assert np.allclose([np.min(imtdict['std'])], [0], atol=0.0001)
        assert np.allclose([np.max(imtdict['std'])], [0.933], atol=0.0001)
    finally:
        data_file = os.path.join(datapath, evid, 'current', 'shake_data.hdf')
        if os.path.isfile(data_file):
            os.remove(data_file)
Beispiel #26
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('mapping module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create contour files
        self.logger.info('Mapping...')

        # get all of the pieces needed for the mapmaker
        layerdict = {}
        layers = config['products']['mapping']['layers']
        layerdict['coast'] = path_macro_sub(
            layers['coasts'], install_path, data_path)
        layerdict['ocean'] = path_macro_sub(
            layers['oceans'], install_path, data_path)
        layerdict['lake'] = path_macro_sub(
            layers['lakes'], install_path, data_path)
        layerdict['country'] = path_macro_sub(
            layers['countries'], install_path, data_path)
        layerdict['state'] = path_macro_sub(
            layers['states'], install_path, data_path)
        topofile = path_macro_sub(
            layers['topography'], install_path, data_path)
        cities = path_macro_sub(layers['cities'], install_path, data_path)
        mapmaker = MapMaker(container, topofile, layerdict, cities,
                            self.logger)
        self.logger.info('Drawing intensity map...')
        intensity_map = mapmaker.drawIntensityMap(datadir)
        self.logger.info('Created intensity map %s' % intensity_map)
        for imt in config['products']['mapping']['imts']:
            self.logger.info('Drawing %s contour map...' % imt)
            contour_file = mapmaker.drawContourMap(imt, datadir)
            self.logger.info('Created contour map %s' % contour_file)
Beispiel #27
0
    def writeContents(self):

        if not len(self.contents):
            return

        # create or update the contents.xml file
        _, data_path = get_config_paths()
        pdldir = os.path.join(data_path, self._eventid, 'current', 'pdl')
        if not os.path.isdir(pdldir):
            os.makedirs(pdldir)
        contents_file = os.path.join(pdldir, 'contents.xml')
        if os.path.isfile(contents_file):
            old_contents = self.readContents(contents_file)
            # TODO: should we ensure that keys are globally unique?
            old_contents.update(self.contents)
            contents = old_contents
        else:
            contents = self.contents

        root = etree.Element("contents")

        pages = {}  # dictionary with slugs as keys
        for key, cdict in contents.items():
            file_el = etree.SubElement(root, "file")
            file_el.set('title', cdict['title'])
            file_el.set('id', key)
            caption = etree.SubElement(file_el, "caption")
            caption.text = etree.CDATA(cdict['caption'])
            for format in cdict['formats']:
                format_el = etree.SubElement(file_el, "format")
                format_el.set('href', format['filename'])
                format_el.set('type', format['type'])
            if 'page' in cdict:
                slug = cdict['page']['slug']
                page_title = cdict['page']['title']
                if slug in pages:
                    pages[slug]['files'].append(key)
                else:
                    page = {'title': page_title, 'files': [key]}
                    pages[slug] = page

        for slug, page_dict in pages.items():
            page_el = etree.SubElement(root, "page")
            page_el.set('title', page_dict['title'])
            page_el.set('slug', slug)
            for filekey in page_dict['files']:
                file_el = etree.SubElement(page_el, 'file')
                file_el.set('refid', filekey)

        xmlstr = etree.tostring(root, xml_declaration=True)
        f = open(contents_file, 'wt')
        f.write(xmlstr.decode('utf-8'))
        f.close()
Beispiel #28
0
def test_verification_0001():
    installpath, datapath = get_config_paths()
    evid = 'verification_test_0001'
    try:
        imtdict = run_event(evid)
        assert np.allclose(np.zeros_like(imtdict['mean']), imtdict['mean'])
        np.testing.assert_almost_equal(np.min(imtdict['std']), 0)
        assert np.max(imtdict['std']) > 0.8 and np.max(imtdict['std']) < 1.0
    finally:
        data_file = os.path.join(datapath, evid, 'current', 'shake_data.hdf')
        if os.path.isfile(data_file):
            os.remove(data_file)
Beispiel #29
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datadict = {}
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            datadict[myimt] = container.getIMTArrays(myimt,
                    'GREATER_OF_TWO_HORIZONTAL')

        #
        # Make plots
        #
        for myimt in imtlist:
            data = datadict[myimt]
            fig = plt.figure(figsize=(10, 8))
            plt.plot(data['lons'], 
                     data['mean'], 
                     color='k', label='mean')
            plt.plot(data['lons'], 
                     data['mean'] + data['std'], 
                     '--b', label='mean +/- stddev')
            plt.plot(data['lons'], 
                     data['mean'] - data['std'], 
                     '--b')
            plt.plot(data['lons'], 
                     data['std'], 
                     '-.r', label='stddev')
            plt.xlabel('Longitude')
            plt.ylabel('ln(%s) (g)' % myimt)
            plt.legend(loc='best')
            plt.title(self._eventid)
            plt.grid()
            pfile = os.path.join(datadir, self._eventid + '_' + myimt + '.pdf')
            plt.savefig(pfile)
Beispiel #30
0
def test_select():

    installpath, datapath = get_config_paths()

    # Process a non-existent event (should fail)
    smod = SelectModule('not_an_event')
    with pytest.raises(NotADirectoryError):
        smod.execute()

    # Normal event (should succeed)
    event_path = os.path.join(datapath, 'nc72282711', 'current')
    set_files(event_path, {'event.xml': 'event.xml'})
    conf_file = os.path.join(datapath, 'nc72282711', 'current',
                             'model_select.conf')
    smod = SelectModule('nc72282711')
    smod.execute()
    failed = False
    if not os.path.isfile(conf_file):
        failed = True
    clear_files(event_path)
    if failed:
        assert False

    # Subduction event (not over slab)
    conf_file = os.path.join(datapath, 'usp0004bxs', 'current',
                             'model_select.conf')
    if os.path.isfile(conf_file):
        os.remove(conf_file)
    try:
        smod = SelectModule('usp0004bxs')
        smod.execute()
    finally:
        if not os.path.isfile(conf_file):
            print('select failed!')
            assert False
        else:
            os.remove(conf_file)

    # Northridge, with moment tensor file
    conf_file = os.path.join(datapath, 'northridge2', 'current',
                             'model_select.conf')
    if os.path.isfile(conf_file):
        os.remove(conf_file)
    try:
        smod = SelectModule('northridge2')
        smod.execute()
    finally:
        if not os.path.isfile(conf_file):
            print('select failed!')
            assert False
        else:
            os.remove(conf_file)
Beispiel #31
0
def test_model_6():

    installpath, datapath = get_config_paths()
    #
    # This event exists, but we hide the input hdf file (should fail)
    #
    hdf_file = os.path.join(datapath, 'nc72282711_dyfi', 'current',
                            'shake_data.hdf')
    if os.path.isfile(hdf_file):
        os.remove(hdf_file)
    model = ModelModule('nc72282711_dyfi')
    with pytest.raises(FileNotFoundError):
        model.execute()
Beispiel #32
0
def run_event(evid):
    installpath, datapath = get_config_paths()
    assemble = AssembleModule(evid, comment='Test comment.')
    assemble.execute()
    model = ModelModule(evid)
    model.execute()
    res_file = os.path.join(datapath, evid, 'current', 'products',
                            'shake_result.hdf')
    oc = ShakeMapOutputContainer.load(res_file)
    imts = oc.getIMTs()
    comps = oc.getComponents(imts[0])
    imtdict = oc.getIMTArrays(imts[0], comps[0])
    return imtdict
Beispiel #33
0
def test_model_6():

    installpath, datapath = get_config_paths()
    #
    # This event exists, but we hide the input hdf file (should fail)
    #
    hdf_file = os.path.join(datapath, 'nc72282711_dyfi', 'current',
                            'shake_data.hdf')
    if os.path.isfile(hdf_file):
        os.remove(hdf_file)
    model = ModelModule('nc72282711_dyfi')
    with pytest.raises(FileNotFoundError):
        model.execute()
Beispiel #34
0
def test_model_1():

    installpath, datapath = get_config_paths()
    #
    # This is Northridge for a set of output points (not a grid)
    # Remove the products directory to hit the code that makes it
    # (should succeed)
    #
    assemble = AssembleModule('northridge_points')
    assemble.execute()
    products_dir = os.path.join(datapath, 'northridge_points', 'current',
                                'products')
    if os.path.isdir(products_dir):
        shutil.rmtree(products_dir)
    model = ModelModule('northridge_points')
    model.execute()
Beispiel #35
0
def test_products():

    installpath, datapath = get_config_paths()
    try:
        #
        # Make sure an output file exists
        #
        assemble = AssembleModule('nc72282711')
        assemble.execute()
        model = ModelModule('nc72282711')
        model.execute()

        #
        # Test the creation of products -- currently not checking results
        # for validity or consistency, but probably should
        #
        mod = ContourModule('nc72282711')
        mod.execute()
        mod = GridXMLModule('nc72282711')
        mod.execute()
        mod = InfoModule('nc72282711')
        mod.execute()
        mod = RasterModule('nc72282711')
        mod.execute()
        mod = RuptureModule('nc72282711')
        mod.execute()
        mod = StationModule('nc72282711')
        mod.execute()
        mod = MappingModule('nc72282711')
        mod.execute()
    finally:
        data_file = os.path.join(datapath, 'nc72282711', 'current',
                                 'shake_data.hdf')
        if os.path.isfile(data_file):
            os.remove(data_file)
        res_file = os.path.join(datapath, 'nc72282711', 'current',
                                'products', 'shake_results.hdf')
        if os.path.isfile(res_file):
            os.remove(res_file)
Beispiel #36
0
    def execute(self):
        """
        Write info.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create ShakeMap metadata file
        formats = config['products']['info']['formats']
        for fformat in formats:
            if fformat not in ALLLOWED_FORMATS:
                self.logger.warn('Specified format %s not in list of defined '
                                 'formats.  Skipping.' % fformat)
                continue
            if fformat == 'json':
                self.logger.info('Writing info.json file...')
                infostring = container.getString('info.json')
                info_file = os.path.join(datadir, 'info.json')
                f = open(info_file, 'wt')
                f.write(infostring)
                f.close()
Beispiel #37
0
def test_select():

    installpath, datapath = get_config_paths()

    # Process a non-existent event (should fail)
    smod = SelectModule('not_an_event')
    with pytest.raises(NotADirectoryError):
        smod.execute()

    # Normal event (should succeed)
    conf_file = os.path.join(datapath, 'nc72282711', 'current',
                             'model_zc.conf')
    if os.path.isfile(conf_file):
        os.remove(conf_file)
    try:
        smod = SelectModule('nc72282711')
        smod.execute()
    finally:
        if not os.path.isfile(conf_file):
            print('select failed!')
            assert False
        else:
            os.remove(conf_file)
Beispiel #38
0
    def execute(self):
        """Create grid.xml and uncertainty.xml files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        logger = logging.getLogger(__name__)
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('gridxml module can only function on '
                                      'gridded data, not sets of points')
        gridnames = container.getIMTs(COMPONENT)
        layers = {}
        field_keys = {}
        xml_types = ['grid', 'uncertainty']
        for xml_type in xml_types:
            for gridname in gridnames:
                imt_field = _oq_to_gridxml(gridname)
                imtdict = container.getIMTGrids(gridname, COMPONENT)
                if xml_type == 'grid':
                    grid = imtdict['mean']
                    metadata = imtdict['mean_metadata']
                elif xml_type == 'uncertainty':
                    grid = imtdict['mean']
                    metadata = imtdict['mean_metadata']

                units = metadata['units']
                digits = metadata['digits']
                grid_data = grid.getData()
                # convert from HDF units to legacy grid.xml units
                if units == 'ln(cm/s)':
                    grid_data = np.exp(grid_data)
                    units = 'cm/s'
                elif units == 'ln(g)':
                    grid_data = np.exp(grid_data) * 100
                    units = '%g'
                else:
                    pass
                layers[imt_field] = grid_data

                field_keys[imt_field] = (units, digits)
            geodict = grid.getGeoDict()

            config = container.getConfig()

            # event dictionary
            info_data = container.getString('info.json')
            info = json.loads(info_data)
            event_info = info['input']['event_information']
            event_dict = {}
            event_dict['event_id'] = event_info['event_id']
            event_dict['magnitude'] = float(event_info['magnitude'])
            event_dict['depth'] = float(event_info['depth'])
            event_dict['lat'] = float(event_info['latitude'])
            event_dict['lon'] = float(event_info['longitude'])
            event_dict['event_timestamp'] = datetime.strptime(
                event_info['origin_time'], TIMEFMT)
            event_dict['event_description'] = event_info['location']
            # TODO the following is SUPER-SKETCHY - we need to save the event
            # network info!!!
            event_dict['event_network'] = event_dict['event_id'][0:2]

            # shake dictionary
            shake_dict = {}
            shake_dict['event_id'] = event_dict['event_id']
            shake_dict['shakemap_id'] = event_dict['event_id']
            # TODO - where are we supposed to get shakemap version
            shake_dict['shakemap_version'] = 1
            shake_dict['code_version'] = shakemap.__version__
            shake_dict['process_timestamp'] = datetime.utcnow()
            shake_dict['shakemap_originator'] = config['system']['source_network']
            shake_dict['map_status'] = config['system']['map_status']
            # TODO - we need a source for this!!!
            shake_dict['shakemap_event_type'] = 'ACTUAL'

            shake_grid = ShakeGrid(
                layers, geodict, event_dict,
                shake_dict, {}, field_keys=field_keys)
            fname = os.path.join(datadir, '%s.xml' % xml_type)
            logger.info('Saving IMT grids to %s' % fname)
            shake_grid.save(fname)  # TODO - set grid version number
Beispiel #39
0
    def execute(self):
        """
        Assemble ShakeMap input data and write and ShakeMapInputContainer named
        shake_data.hdf in the event's 'current' directory.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        eventxml = os.path.join(datadir, 'event.xml')
        self.logger.debug('Looking for event.xml file...')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Look for global configs in install_path/config
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        self.logger.debug('Looking for configuration files...')
        modules = ConfigObj(
            os.path.join(install_path, 'config', 'modules.conf'),
            configspec=spec_file)
        gmpe_sets = ConfigObj(
            os.path.join(install_path, 'config', 'gmpe_sets.conf'),
            configspec=spec_file)
        global_config = ConfigObj(
            os.path.join(install_path, 'config', 'model.conf'),
            configspec=spec_file)

        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
        elif os.path.isfile(event_config_zc_file):
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
        else:
            event_config = ConfigObj()

        #
        # start merging event_config
        #
        global_config.merge(event_config)
        global_config.merge(modules)
        global_config.merge(gmpe_sets)

        results = global_config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(global_config, results)

        check_config(global_config, self.logger)

        #
        # The vs30 file may have macros in it
        #
        vs30file = global_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError("vs30 file '%s' is not a valid file" %
                                        vs30file)
            global_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in global_config['interp']['prediction_location']:
            loc_file = global_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError("prediction file '%s' is not "
                                            "a valid file" % loc_file)
                global_config['interp']['prediction_location']['file'] = loc_file

        config = global_config.dict()

        self.logger.debug('Looking for data files...')
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))

        self.logger.debug('Looking for rupture files...')
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        #
        # Sort out the version history. Get the most recent backup file and
        # extract the existing history. Then add a new line for this run.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']
        backup_dirs = sorted(
            glob.glob(os.path.join(datadir, '..', '.backup*')),
            reverse=True)
        if len(backup_dirs):
            #
            # Backup files exist so find the latest one and extract its
            # history, then add a new line that increments the version
            #
            bu_file = os.path.join(backup_dirs[0], 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            version = int(
                backup_dirs[0].replace(
                    os.path.join(datadir, '..', '.backup'), ''))
            version += 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        elif os.path.isfile(os.path.join(datadir, 'shake_data.hdf')):
            #
            # No backups are available, but there is an existing shake_data
            # file. Extract its history and update the timestamp and
            # source network (but leave the version alone).
            #
            bu_file = os.path.join(datadir, 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            new_line = [timestamp, originator, history['history'][-1][2]]
            history['history'][-1] = new_line
        else:
            #
            # No backup and no existing file. Make this version 1
            #
            history = {'history': []}
            new_line = [timestamp, originator, 1]
            history['history'].append(new_line)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')

        self.logger.debug('Creating input container...')
        shake_data = ShakeMapInputContainer.createFromInput(
                hdf_file,
                config,
                eventxml,
                history,
                rupturefile=rupturefile,
                datafiles=datafiles)
        self.logger.debug('Created HDF5 input container in %s' %
                         shake_data.getFileName())
        shake_data.close()
Beispiel #40
0
def test_config():

    #
    # get_logger()
    #
    logger = config.get_logger('nc72282711', log_option='debug')

    #
    # Some stuff we just call and see if it bombs out
    #
    mydatapath = config.get_data_path()
    myinstall, mydata = config.get_config_paths()

    myspec = config.get_configspec()
    myvalid = config.get_custom_validator()

    c1 = ConfigObj(os.path.join(mydatapath, "model.conf"),
                   configspec=myspec)
    c2 = ConfigObj(os.path.join(mydatapath, "modules.conf"),
                   configspec=myspec)
    c3 = ConfigObj(os.path.join(mydatapath, "gmpe_sets.conf"),
                   configspec=myspec)
    c4 = ConfigObj(os.path.join(mydatapath, "northridge_model.conf"),
                   configspec=myspec)
    c5 = ConfigObj(os.path.join(mydatapath, "products.conf"),
                   configspec=myspec)
    c1.merge(c2)
    c1.merge(c3)
    c1.merge(c4)
    c1.merge(c5)

    results = c1.validate(myvalid, preserve_errors=True)

    assert isinstance(results, bool) and results

    config.check_config(c1, logger)
    #
    # Break the config
    #
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ccf'] = 'NotACCF'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ipe'] = 'NotAnIPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmice'] = 'NotAGMICE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 'NotAGMPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)

    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 47
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    ctest = copy.deepcopy(c1)
    del ctest['interp']
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    #
    # annotatedfloat_type()
    #
    res = config.annotatedfloat_type('4.0')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0d')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0m')
    assert isinstance(res, float)
    assert res == 4.0 / 60.0
    res = config.annotatedfloat_type('4.0c')
    assert isinstance(res, float)
    assert res == 4.0 / 3600.0
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('4.0caweoifaw')
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('')
    #
    # weight_list()
    #
    res = config.weight_list(['0.2', '0.3', '0.5'], min=0)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    res = config.weight_list('None', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list(['0.2', '0.3', '0.5'], min=3)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    with pytest.raises(ValidateError):
        res = config.weight_list([], min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[None]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('None', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.2', '0.3', '0.5'], min=4)
    with pytest.raises(ValidateError):
        res = config.weight_list(['-0.2', '0.3', '0.5'], min=3)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.1', '0.3', '0.5'], min=3)
    #
    # gmpe_list()
    #
    res = config.gmpe_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.gmpe_list('[]', min=1)
    res = config.gmpe_list('thing1', min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1', 'thing2'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1', 'thing2']
    with pytest.raises(ValidateError):
        res = config.gmpe_list(['thing1', 'thing2'], min=3)
    with pytest.raises(ValidateError):
        res = config.gmpe_list(7, min=0)
    with pytest.raises(ValidateError):
        res = config.gmpe_list([7], min=0)
    #
    # extent_list()
    #
    res = config.extent_list('[]')
    assert isinstance(res, list)
    assert res == []
    res = config.extent_list([])
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.extent_list(7)
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', 'thing'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', '1000.0'])
    res = config.extent_list(['-20.0', '-10.0', '20.0', '10.0'])
    assert isinstance(res, list)
    assert res == [-20.0, -10.0, 20.0, 10.0]
    #
    # file_type()
    #
    res = config.file_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.file_type('/home/xxxyyyzzz/awefawe')
    res = config.file_type(os.path.abspath(__file__))
    assert isinstance(res, str)
    assert res == os.path.abspath(__file__)
    #
    # directory_type()
    #
    res = config.directory_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.directory_type('/home/xxxyyyzzz/awefawe')
    res = config.directory_type(os.path.dirname(os.path.abspath(__file__)))
    assert isinstance(res, str)
    assert res == os.path.dirname(os.path.abspath(__file__))
    #
    # status_string()
    #
    res = config.status_string('', min=1)
    assert res == 'automatic'
    res = config.status_string('automatic', min=1)
    assert res == 'automatic'
    with pytest.raises(ValidateError):
        res = config.status_string('thing', min=1)
    #
    # cfg_float_list()
    #
    res = config.cfg_float_list(['2.0', '3.0', '4.0'])
    assert res == [2.0, 3.0, 4.0]
    res = config.cfg_float_list('2.0')
    assert res == [2.0]
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('')
    with pytest.raises(ValidateError):
        res = config.cfg_float_list({})
    with pytest.raises(ValidateError):
        res = config.cfg_float_list([])
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('thing')
    #
    # cfg_float()
    #
    res = config.cfg_float('2.0')
    assert res == 2.0
    with pytest.raises(ValidateError):
        res = config.cfg_float(['2.0'])
    with pytest.raises(ValidateError):
        res = config.cfg_float('')
    with pytest.raises(ValidateError):
        res = config.cfg_float('None')
    with pytest.raises(ValidateError):
        res = config.cfg_float('thing')
Beispiel #41
0
    def execute(self):
        """
        Augment a ShakeMap input data file with local configs, data, rupture,
        etc. The version history will only be incremented if the originator
        differs from the originator in the previous line of the history.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')
        if not os.path.isfile(hdf_file):
            raise FileNotFoundError('%s does not exist. Use assemble.' %
                                    hdf_file)
        shake_data = ShakeMapInputContainer.load(hdf_file)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Get the config from the HDF file and merge in the local configs
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        shake_config = shake_data.getConfig()
        shake_config = ConfigObj(shake_config, configspec=spec_file)

        modules_file = os.path.join(install_path, 'config', 'modules.conf')
        if os.path.isfile(modules_file):
            self.logger.info('Found a modules file.')
            modules = ConfigObj(modules_file, configspec=spec_file)
            shake_config.merge(modules)
        gmpe_file = os.path.join(install_path, 'config', 'gmpe_sets.conf')
        if os.path.isfile(gmpe_file):
            self.logger.info('Found a gmpe file.')
            gmpe_sets = ConfigObj(gmpe_file, configspec=spec_file)
            shake_config.merge(gmpe_sets)
        config_file = os.path.join(install_path, 'config', 'model.conf')
        if os.path.isfile(config_file):
            self.logger.info('Found a global config file.')
            global_config = ConfigObj(config_file, configspec=spec_file)
            shake_config.merge(global_config)
        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            self.logger.info('Found an event specific model.conf file.')
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        elif os.path.isfile(event_config_zc_file):
            self.logger.info('Found an event specific model_zc file.')
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        #
        # Validate the resulting config
        #
        results = shake_config.validate(validator)
        if not results or isinstance(results, dict):
            config_error(shake_config, results)
        check_config(shake_config, self.logger)
        #
        # The vs30 file may have macros in it
        #
        vs30file = shake_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError('vs30 file "%s" is not a '
                                        'valid file' % vs30file)
            shake_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in shake_config['interp']['prediction_location']:
            loc_file = shake_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError('prediction file "%s" is not a '
                                            'valid file' % loc_file)
                shake_config['interp']['prediction_location']['file'] = loc_file
        #
        # Put the updated config back into shake_data.hdf`
        #
        config = shake_config.dict()
        shake_data.setConfig(config)
        #
        # Look for additional data files and update the stationlist if found
        #
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))
        if datafiles:
            self.logger.info('Found additional data files...')
            shake_data.addStationData(datafiles)
        #
        # Look for a rupture file and replace the existing one if found
        #
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        eventxml = os.path.join(datadir, 'event.xml')
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        if not os.path.isfile(eventxml):
            eventxml = None
        if rupturefile is not None or eventxml is not None:
            self.logger.info('Updating rupture/origin information.')
            shake_data.updateRupture(
                eventxml=eventxml, rupturefile=rupturefile)

        #
        # Sort out the version history. We're working with an existing
        # HDF file, so: if we are the originator, just update the timestamp,
        # otherwise add a new line.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']

        history = shake_data.getVersionHistory()
        if history['history'][-1][1] == originator:
            history['history'][-1][0] = timestamp
        else:
            version = int(history['history'][-1][2]) + 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        shake_data.setVersionHistory(history)

        shake_data.close()
Beispiel #42
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'points':
            raise NotImplementedError('xtestplot module can only operate on '
                                      'sets of points, not gridded data')

        datalist = []
        stddevlist = []
        periodlist = []
        imtlist = container.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            if not myimt.startswith('SA('):
                continue
            ddict = container.getIMTArrays(myimt, 'GREATER_OF_TWO_HORIZONTAL')
            datalist.append(ddict['mean'][0])
            stddevlist.append(ddict['std'][0])
            periodlist.append(float(myimt.replace('SA(', '').replace(')', '')))
            self.logger.debug(myimt, datalist[-1])
        datalist = np.array(datalist)
        stddevlist = np.array(stddevlist)
        periodlist = np.array(periodlist)
        indxx = np.argsort(periodlist)

        #
        # Make plots
        #
        fig = plt.figure(figsize=(10, 8))
        plt.semilogx(periodlist[indxx],
                 datalist[indxx], 
                 color='k', label='mean')
        plt.semilogx(periodlist[indxx],
                 datalist[indxx] + stddevlist[indxx], 
                 '--b', label='mean +/- stddev')
        plt.semilogx(periodlist[indxx],
                 datalist[indxx] - stddevlist[indxx], 
                 '--b')
        plt.semilogx(periodlist[indxx],
                 stddevlist[indxx], 
                 '-.r', label='stddev')
        plt.xlabel('Period (s)')
        plt.ylabel('ln(SA) (g)')
        plt.legend(loc='best')
        plt.title(self._eventid)
        plt.grid()
        pfile = os.path.join(datadir, self._eventid + '_spectra_plot.pdf')
        plt.savefig(pfile)
Beispiel #43
0
    def execute(self):
        '''
        Parses the output of STREC in accordance with the
        configuration file, creates a new GMPE set specific to the event,
        and writes model_zc.conf in the event's 'current' directory.

        Configuration file: select.conf

        Raises:
            NotADirectoryError -- the event's current directory doesn't exist
            FileNotFoundError -- the event.xml file doesn't exist
            ValidateError -- problems with the configuration file
            RuntimeError -- various problems matching the event to a gmpe set
        '''

        # ---------------------------------------------------------------------
        # Get the install and data paths and verify that the even directory
        # exists
        # ---------------------------------------------------------------------
        install_path, data_path = cfg.get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory' % datadir)
        # ---------------------------------------------------------------------
        # Open event.xml and make an Origin object
        # ---------------------------------------------------------------------
        eventxml = os.path.join(datadir, 'event.xml')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)
        org = Origin.fromFile(eventxml)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        # ---------------------------------------------------------------------
        # Get config file from install_path/config, parse and
        # validate it
        # ---------------------------------------------------------------------
        config = ConfigObj(os.path.join(install_path, 'config', 'select.conf'))
        validate_config(config, install_path)
        # ---------------------------------------------------------------------
        # Get the strec results
        # ---------------------------------------------------------------------
        strec_out = get_tectonic_regions(org.lon, org.lat, org.depth,
                                         self._eventid)
        # ---------------------------------------------------------------------
        # Get the default weighting for this event
        # ---------------------------------------------------------------------
        cfg_tr = config['tectonic_regions']
        str_tr = strec_out['tectonic_regions']
        gmpe_list, weight_list = get_gmpes_by_region(str_tr, cfg_tr, org)

        # ---------------------------------------------------------------------
        # Now look at the geographic layers to see if we need to modify or
        # replace the gmpe list
        # ---------------------------------------------------------------------
        #
        # Find the first configured layer the event is within (if any) or the
        # closest layer
        #
        min_dist_to_layer = 999999.9
        nearest_layer_name = None
        if 'layers' in config and 'layer_dir' in config['layers']:
            layer_dir = config['layers']['layer_dir']
            if layer_dir and layer_dir != 'None':
                geo_layers = get_layer_distances(org.lon, org.lat, layer_dir)
            else:
                geo_layers = {}
            for layer in config['layers']:
                if layer == 'layer_dir':
                    continue
                if layer not in geo_layers:
                    self.logger.warning('Error: cannot find layer %s in %s' %
                                        (layer, layer_dir))
                    continue
                ldist = geo_layers[layer]
                if ldist < min_dist_to_layer:
                    min_dist_to_layer = ldist
                    nearest_layer_name = layer
                    if min_dist_to_layer == 0:
                        break
        #
        # If we are in or near a geographic layer, update the gmpe and weight
        # lists
        #
        if nearest_layer_name is not None and \
           (min_dist_to_layer == 0 or
            min_dist_to_layer <=
                config['layers'][nearest_layer_name]['horizontal_buffer']):

            lcfg = config['layers'][nearest_layer_name]
            #
            # Overwrite the tectonic regions with the layer's custom region
            # settings
            #
            for thing in lcfg:
                if thing == 'horizontal_buffer':
                    layer_buff = lcfg[thing]
                    continue
                layer = thing
                for element in lcfg[layer]:
                    cfg_tr[layer][element] = lcfg[layer][element]
            #
            # Now get the gmpes and weights for the custom layer
            #
            layer_gmpes, layer_weights = get_gmpes_by_region(
                str_tr, cfg_tr, org)
            if layer_buff == 0:
                #
                # If we're here, min_dist_to_layer must be 0,
                # so the weight is 1
                #
                lwgt = 1.0
            else:
                lwgt = 1.0 - min_dist_to_layer / layer_buff
            #
            # If we're inside the region's boundaries, we just use the custom
            # gmpe and weights. If we are outside the region (but still inside
            # the buffer), we blend the custom gmpe and weights with the
            # generic ones we computed earlier.
            #
            if min_dist_to_layer == 0:
                gmpe_list = layer_gmpes
                weight_list = layer_weights
            else:
                gmpe_list = np.append(gmpe_list, layer_gmpes)
                weight_list = np.append(weight_list * (1.0 - lwgt),
                                        layer_weights * lwgt)
        # ---------------------------------------------------------------------
        # Create ConfigObj object for output to model_zc.conf
        # ---------------------------------------------------------------------
        zc_file = os.path.join(datadir, 'model_zc.conf')
        zc_conf = ConfigObj(indent_type='    ')
        zc_conf.filename = zc_file
        #
        # Add the new gmpe set to the object
        #
        gmpe_set = 'gmpe_' + str(self._eventid) + '_custom'
        zc_conf['gmpe_sets'] = OrderedDict([
            (gmpe_set, OrderedDict([
                ('gmpes', list(gmpe_list)),
                ('weights', list(weight_list)),
                ('weights_larage_dist', 'None'),
                ('dist_cutoff', 'nan'),
                ('site_gmpes', 'None'),
                ('weights_site_gmpes', 'None')
            ]))
        ])
        #
        # Set gmpe to use the new gmpe set
        #
        zc_conf['modeling'] = OrderedDict([
            ('gmpe', gmpe_set),
            ('mechanism', strec_out['focal_mech'])
        ])

        zc_conf.write()
Beispiel #44
0
def test_assemble():

    installpath, datapath = get_config_paths()

    # Process a non-existent event (should fail)
    amod = AssembleModule('not_an_event')
    with pytest.raises(NotADirectoryError):
        amod.execute()

    # Would succeed but we remove event.xml (should fail)
    event_file = os.path.join(datapath, 'wenchuan', 'current', 'event.xml')
    os.rename(event_file, event_file + '_safe')
    try:
        amod = AssembleModule('wenchuan')
        with pytest.raises(FileNotFoundError):
            amod.execute()
    finally:
        os.rename(event_file + '_safe', event_file)

    # Normal event (should succeed)
    data_file = os.path.join(datapath, 'wenchuan', 'current', 'shake_data.hdf')
    if os.path.isfile(data_file):
        os.remove(data_file)
    try:
        amod = AssembleModule('wenchuan')
        amod.execute()
        #
        # Run a second time to exercise a different branch of the code
        #
        amod.execute()
    finally:
        if os.path.isfile(data_file):
            os.remove(data_file)

    # Do an event with model.conf (not model_zc.conf) and no zoneinfo
    # (should succeed)
    data_file = os.path.join(datapath, 'nc72282711',
                             'current', 'shake_data.hdf')
    if os.path.isfile(data_file):
        os.remove(data_file)
    try:
        amod = AssembleModule('nc72282711')
        amod.execute()
    finally:
        if os.path.isfile(data_file):
            os.remove(data_file)

    # Try not having an event-specific config (should succeed)
    model_file = os.path.join(datapath, 'nc72282711', 'current',
                              'model.conf')
    os.rename(model_file, model_file + '_safe')
    data_file = os.path.join(datapath, 'nc72282711',
                             'current', 'shake_data.hdf')
    if os.path.isfile(data_file):
        os.remove(data_file)
    try:
        amod = AssembleModule('nc72282711')
        amod.execute()
    finally:
        os.rename(model_file + '_safe', model_file)
        if os.path.isfile(data_file):
            os.remove(data_file)

    # Do an event with DYFI data (should succeed)
    hdf_file = os.path.join(datapath, 'nc72282711_dyfi', 'current',
                            'shake_data.hdf')
    if os.path.isfile(hdf_file):
        os.rename(hdf_file, hdf_file + '_safe')
    try:
        amod = AssembleModule('nc72282711_dyfi')
        amod.execute()
    finally:
        if os.path.isfile(hdf_file + '_safe'):
            os.rename(hdf_file + '_safe', hdf_file)

    #
    # Try some bad config files
    #
    # Should fail validation
    model_file = os.path.join(datapath, 'nc72282711_nodata_nofault',
                              'current', 'model_zc.conf')
    os.rename(model_file, model_file + '_safe')
    shutil.copyfile(model_file + '.bad0', model_file)
    try:
        amod = AssembleModule('nc72282711_nodata_nofault')
        with pytest.raises(RuntimeError):
            amod.execute()
    finally:
        os.rename(model_file + '_safe', model_file)

    # Should fail vs30 filename check
    model_file = os.path.join(datapath, 'nc72282711_nodata_nofault',
                              'current', 'model_zc.conf')
    os.rename(model_file, model_file + '_safe')
    shutil.copyfile(model_file + '.bad1', model_file)
    try:
        amod = AssembleModule('nc72282711_nodata_nofault')
        with pytest.raises(FileNotFoundError):
            amod.execute()
    finally:
        os.rename(model_file + '_safe', model_file)

    # Should fail prediction locations filename check
    model_file = os.path.join(datapath, 'nc72282711_nodata_nofault',
                              'current', 'model_zc.conf')
    os.rename(model_file, model_file + '_safe')
    shutil.copyfile(model_file + '.bad2', model_file)
    try:
        amod = AssembleModule('nc72282711_nodata_nofault')
        with pytest.raises(FileNotFoundError):
            amod.execute()
    finally:
        os.rename(model_file + '_safe', model_file)
    #
    # Make sure the location file substitutions work (should succeed)
    #
    data_file = os.path.join(
        datapath, 'northridge_points', 'current', 'shake_data.hdf')
    if os.path.isfile(data_file):
        os.remove(data_file)
    try:
        amod = AssembleModule('northridge_points')
        amod.execute()
    finally:
        if os.path.isfile(data_file):
            os.remove(data_file)