Beispiel #1
0
def test_create_kmz():
    tempdir = tempfile.mkdtemp()
    try:
        homedir = os.path.dirname(os.path.abspath(__file__))
        cfile = os.path.join(homedir, '..', '..', 'data', 'containers',
                             'northridge', 'shake_result.hdf')
        container = ShakeMapOutputContainer.load(cfile)
        install_path, data_path = get_config_paths()

        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        pconfig = configobj.ConfigObj(product_config_file,
                                      configspec=spec_file)
        results = pconfig.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(pconfig, results)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']

        logger = logging.getLogger(__name__)
        kmzfile = create_kmz(container, tempdir, oceanfile, logger)
        myzip = zipfile.ZipFile(kmzfile, mode='r')
        kmlstr = myzip.read('shakemap.kml').decode('utf-8')
        root = minidom.parseString(kmlstr)
        document = root.getElementsByTagName('Document')[0]
        folders = document.getElementsByTagName('Folder')
        names = []
        nstations = 0
        nmmi = 0
        for folder in folders:
            name = folder.getElementsByTagName('name')[0].firstChild.data
            names.append(name)
            if name == 'Instrumented Stations':
                nstations = len(folder.getElementsByTagName('Placemark'))
            elif name == 'Macroseismic Stations':
                nmmi = len(folder.getElementsByTagName('Placemark'))
        assert sorted(names) == [
            'Contours', 'Instrumented Stations', 'MMI 4 Polygons',
            'MMI 5 Polygons', 'MMI 6 Polygons', 'MMI 7 Polygons',
            'MMI 8 Polygons', 'MMI 8.5 Polygons', 'MMI Contours', 'MMI Labels',
            'MMI Polygons', 'Macroseismic Stations', 'PGA Contours',
            'PGV Contours', 'SA(0.3) Contours', 'SA(1.0) Contours',
            'SA(3.0) Contours'
        ]
        assert nstations == 185
        assert nmmi == 547
        myzip.close()

    except Exception as e:
        print(str(e))
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Beispiel #2
0
    def execute(self):
        install_path, data_path = get_config_paths()
        self.datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(self.datadir):
            raise NotADirectoryError('%s is not a valid directory.' %
                                     self.datadir)

        # look for the presence of a NO_TRANSFER file in the datadir.
        notransfer = os.path.join(self.datadir, NO_TRANSFER)
        if os.path.isfile(notransfer):
            self.logger.info('Event has a %s file blocking transfer.' %
                             NO_TRANSFER)
            return

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(self.datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(install_path, 'config',
                                         'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        self.config = ConfigObj(transfer_conf, configspec=configspec)
        results = self.config.validate(Validator())
        if not isinstance(results, bool) or not results:
            config_error(self.config, results)

        # get the output container with all the things in it
        products_dir = os.path.join(self.datadir, 'products')
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        # extract the info.json object from the container
        self.info = container.getMetadata()
        container.close()

        # check for the presence of a .saved file. If found, do nothing.
        # Otherwise, create the backup directory.
        save_file = os.path.join(self.datadir, SAVE_FILE)
        if not os.path.isfile(save_file):
            logging.info('Making backup directory...')
            self._make_backup(data_path)
            with open(save_file, 'wt') as f:
                tnow = datetime.utcnow().strftime(constants.TIMEFMT)
                f.write('Saved %s by %s\n' % (tnow, self.command_name))
Beispiel #3
0
def get_config(install_path):
    """Read the config and get it into a usable state.

    Args:
        install_path (str): The install path of the current profile.

    Returns:
        dict: A dictionary of configuration data.
    """
    config_file = os.path.join(install_path, 'config', 'queue.conf')
    configspec = get_configspec('queue')
    config = ConfigObj(config_file, configspec=configspec)
    results = config.validate(Validator())
    if not isinstance(results, bool) or not results:
        config_error(config, results)
    config = parse_config(config.dict())
    return config
Beispiel #4
0
    def execute(self):
        """
        Create shape files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('shape module can only contour '
                                      'gridded data, not sets of points')

        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        max_workers = config['products']['mapping']['max_workers']
        method = config['products']['shape']['method']

        create_polygons(container,
                        datadir,
                        self.logger,
                        max_workers,
                        method=method)

        container.close()

        self.contents.addFile('shakemap_shapefiles', 'ShakeMap Shape Files',
                              'Shape Files.', 'shape.zip', 'application/zip')
Beispiel #5
0
    def execute(self):
        """
        Create contour files for all configured IMT values.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        if container.getDataType() != 'grid':
            raise NotImplementedError('contour module can only contour '
                                      'gridded data, not sets of points')

        # get the filter size from the products.conf
        filter_size = config['products']['contour']['filter_size']

        # create contour files
        self.logger.debug('Contouring to files...')
        contour_to_files(container, datadir, self.logger, self.contents,
                         filter_size)
        container.close()
Beispiel #6
0
    def execute(self):
        """
        Create KML files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        if container.getDataType() != 'grid':
            raise NotImplementedError('kml module can only contour '
                                      'gridded data, not sets of points')

        # find the low res ocean vector dataset
        product_config_file = os.path.join(install_path, 'config',
                                           'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        pconfig = configobj.ConfigObj(product_config_file,
                                      configspec=spec_file)
        results = pconfig.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(pconfig, results)
        oceanfile = pconfig['products']['mapping']['layers']['lowres_oceans']

        # call create_kmz function
        create_kmz(container, datadir, oceanfile, self.logger)

        container.close()
Beispiel #7
0
    def __init__(self, pargs):

        current_time = int(time.time())
        self.MEMORY_UPDATE_TIME = current_time
        self.ASSOCIATE_UPDATE_TIME = current_time
        self.DB_MAINTENANCE_TIME = current_time

        self.children = {}
        self.attached = pargs.attached

        self.install_path, self.data_path = get_config_paths()

        self.config = get_config(self.install_path)
        #
        # Get shake.conf for the autorun modules
        #
        config_file = os.path.join(self.install_path, 'config', 'shake.conf')
        spec_file = get_configspec('shake')
        shake_config = ConfigObj(config_file, configspec=spec_file)
        results = shake_config.validate(Validator())
        if not isinstance(results, bool) or not results:
            config_error(shake_config, results)
        self.shake_cmds = shlex.split(shake_config['autorun_modules'])
        #
        # Turn this process into a daemon
        #
        self.logpath = os.path.join(self.install_path, 'logs')
        if not os.path.isdir(self.logpath):
            os.makedirs(self.logpath)
        pidfile = os.path.join(self.logpath, 'queue.pid')
        self.filelock = lockfile.FileLock(pidfile)
        if self.filelock.is_locked():
            if pargs.break_lock:
                self.filelock.break_lock()
            else:
                logger = self.getLogger()
                logger.error("pid lock file '%s' exists, can't start "
                             "sm_queue; exiting..." % (pidfile))
                sys.exit(-1)
Beispiel #8
0
def get_logging_config():
    """Extract logging configuration from logging.conf.

    See this URL for example of config.
    https://gist.github.com/st4lk/6287746

    See https://docs.python.org/3.5/library/logging.config.html

    Returns:
        dict: Dictionary suitable for use with logging.config.dictConfig().
    """

    install_path, _ = get_config_paths()
    conf_file = os.path.join(install_path, 'config', 'logging.conf')
    spec_file = get_configspec(config='logging')
    log_config = ConfigObj(conf_file,
                           configspec=spec_file,
                           interpolation='template')

    val = Validator()
    results = log_config.validate(val)
    if not isinstance(results, bool) or not results:
        config_error(log_config, results)

    _clean_log_dict(log_config)

    # Here follows a bit of trickery...
    # To have a logger point to the root logger using the dictConfig() method,
    # you need to have the logger have a name equal to the empty string ''.
    # Our logging dictionary is originally specified using ConfigObj, which
    # does not allow for empty section headers.  So, we need to get all of the
    # information from the logger we specify, copy it into a logger dictionary
    # with an empty key, and then delete the original logger from the config
    # dictionary. Whew.
    log_name = log_config['loggers'].keys()[0]
    log_config['loggers'][''] = log_config['loggers'][log_name]
    del log_config['loggers'][log_name]
    return log_config
Beispiel #9
0
def get_aqms_config(cname=None):
    """
    Returns the ConfigObj object resulting from parsing aqms.conf.

    Args:
        none

    Returns:
        ConfigObj: The ConfigObj object representing aqms.conf.

    Raises:
        FileNotFoundError: if aqms.conf or aqmsspec.conf is not found.
        RuntimeError: if there is an error parsing aqms.conf
    """
    if cname is None:
        cname = 'aqms'

    install_path, _ = get_config_paths()
    conf_file = os.path.join(install_path, 'config', cname + '.conf')
    if not os.path.isfile(conf_file):
        raise FileNotFoundError('No file "%s" exists.' % conf_file)
    spec_path = pkg_resources.resource_filename('shakemap_aqms', 'config')
    spec_file = os.path.join(spec_path, cname + 'spec.conf')
    if not os.path.isfile(spec_file):
        raise FileNotFoundError('No file "%s" exists.' % spec_file)
    config = ConfigObj(conf_file, configspec=spec_file)

    val = Validator()
    results = config.validate(val)
    if not isinstance(results, bool) or not results:
        try:
            config_error(config, results)
        except RuntimeError as err:
            logging.error('Error in {0}.conf: {1}'.format(cname, err))
            raise

    return config
Beispiel #10
0
    def execute(self):
        """
        Augment a ShakeMap input data file with local configs, data, rupture,
        etc. The version history will only be incremented if the originator
        differs from the originator in the previous line of the history.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')
        if not os.path.isfile(hdf_file):
            raise FileNotFoundError('%s does not exist. Use assemble.' %
                                    hdf_file)
        shake_data = ShakeMapInputContainer.load(hdf_file)

        # Prompt for a comment string if none is provided on the command line
        if self.comment is None:
            if sys.stdout.isatty():
                self.comment = input(
                    'Please enter a comment for this version.\n'
                    '(Start with "+" if you wish to append to the\n'
                    'existing comment; "+" by itself will preserve\n'
                    'existing comments.)\n'
                    'comment: ')
            else:
                self.comment = ''

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Get the config from the HDF file and merge in the local configs
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        shake_config = shake_data.getConfig()
        shake_config = ConfigObj(shake_config, configspec=spec_file)
        #
        # This is a weird hack to get around a bug/feature of ConfigObj
        # that results in the validation failing if max_workers is already
        # an integer.
        #
        if 'max_workers' in shake_config['system']:
            shake_config['system']['max_workers'] = \
                    str(shake_config['system']['max_workers'])

        modules_file = os.path.join(install_path, 'config', 'modules.conf')
        if os.path.isfile(modules_file):
            self.logger.debug('Found a modules file.')
            modules = ConfigObj(modules_file, configspec=spec_file)
            shake_config.merge(modules)
        gmpe_file = os.path.join(install_path, 'config', 'gmpe_sets.conf')
        if os.path.isfile(gmpe_file):
            self.logger.debug('Found a gmpe file.')
            gmpe_sets = ConfigObj(gmpe_file, configspec=spec_file)
            shake_config.merge(gmpe_sets)
        config_file = os.path.join(install_path, 'config', 'model.conf')
        if os.path.isfile(config_file):
            self.logger.debug('Found a global config file.')
            global_config = ConfigObj(config_file, configspec=spec_file)
            shake_config.merge(global_config)

        # extent conf (may not be present)
        extent_config = os.path.join(install_path, 'config', 'extent.conf')
        if os.path.isfile(extent_config):
            extent_config = ConfigObj(extent_config, configspec=spec_file)
        else:
            extent_config = ConfigObj()
        shake_config.merge(extent_config)
        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            self.logger.debug('Found an event specific model.conf file.')
            event_config = ConfigObj(event_config_file, configspec=spec_file)
            shake_config.merge(event_config)
        elif os.path.isfile(event_config_zc_file):
            self.logger.debug('Found an event specific model_zc file.')
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        #
        # Validate the resulting config
        #
        results = shake_config.validate(validator)
        if not results or isinstance(results, dict):
            config_error(shake_config, results)
        check_config(shake_config, self.logger)
        #
        # The vs30 file may have macros in it
        #
        vs30file = shake_config['data']['vs30file']
        global_data_path = os.path.join(os.path.expanduser('~'),
                                        'shakemap_data')
        if vs30file:
            vs30file = path_macro_sub(vs30file,
                                      ip=install_path,
                                      dp=data_path,
                                      gp=global_data_path,
                                      ei=self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError('vs30 file "%s" is not a '
                                        'valid file' % vs30file)
            shake_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in shake_config['interp']['prediction_location']:
            loc_file = shake_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':  # 'None' is a string here
                loc_file = path_macro_sub(loc_file,
                                          ip=install_path,
                                          dp=data_path,
                                          gp=global_data_path,
                                          ei=self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError('prediction file "%s" is not a '
                                            'valid file' % loc_file)
                shake_config['interp']['prediction_location']['file'] = \
                    loc_file
        #
        # Put the updated config back into shake_data.hdf`
        #
        config = shake_config.dict()
        shake_data.setConfig(config)
        #
        # Look for additional data files and update the stationlist if found
        #
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))
        if datafiles:
            self.logger.debug('Found additional data files...')
            shake_data.addStationData(datafiles)
        #
        # Look for a rupture file and replace the existing one if found
        #
        rupturefile = os.path.join(datadir, 'rupture.json')
        eventxml = os.path.join(datadir, 'event.xml')
        if not os.path.isfile(eventxml):
            eventxml = None
        if not os.path.isfile(rupturefile):
            faultfiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
            if len(faultfiles):
                rupturefile = faultfiles[0]
            else:
                rupturefile = None
        if (rupturefile and os.path.isfile(rupturefile)) \
                or eventxml is not None:
            self.logger.debug('Updating rupture/origin information.')
            shake_data.updateRupture(eventxml=eventxml,
                                     rupturefile=rupturefile)

        #
        # Sort out the version history. We're working with an existing
        # HDF file, so: if we are the originator, just update the timestamp,
        # otherwise add a new line.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']

        history = shake_data.getVersionHistory()
        if history['history'][-1][1] == originator:
            history['history'][-1][0] = timestamp
            if self.comment.startswith('+'):
                if self.comment.replace('+', '') != '':
                    history['history'][-1][3] += self.comment.replace('+', ' ')
            else:
                history['history'][-1][3] = self.comment
        else:
            version = int(history['history'][-1][2]) + 1
            if self.comment.startswith('+'):
                new_line = [
                    timestamp, originator, version,
                    self.comment.replace('+', '')
                ]
            else:
                new_line = [timestamp, originator, version, self.comment]
            history['history'].append(new_line)
        shake_data.setVersionHistory(history)

        shake_data.close()
Beispiel #11
0
def test_config():

    #
    # get_logger()
    #
    logger = config.get_logger('nc72282711', log_option='debug')

    #
    # Some stuff we just call and see if it bombs out
    #
    mydatapath = config.get_data_path()
    myinstall, mydata = config.get_config_paths()

    myspec = config.get_configspec()
    myvalid = config.get_custom_validator()

    c1 = ConfigObj(os.path.join(mydatapath, "model.conf"),
                   configspec=myspec)
    c2 = ConfigObj(os.path.join(mydatapath, "modules.conf"),
                   configspec=myspec)
    c3 = ConfigObj(os.path.join(mydatapath, "gmpe_sets.conf"),
                   configspec=myspec)
    c4 = ConfigObj(os.path.join(mydatapath, "northridge_model.conf"),
                   configspec=myspec)
    c5 = ConfigObj(os.path.join(mydatapath, "products.conf"),
                   configspec=myspec)
    c1.merge(c2)
    c1.merge(c3)
    c1.merge(c4)
    c1.merge(c5)

    results = c1.validate(myvalid, preserve_errors=True)

    assert isinstance(results, bool) and results

    config.check_config(c1, logger)
    #
    # Break the config
    #
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ccf'] = 'NotACCF'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ipe'] = 'NotAnIPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmice'] = 'NotAGMICE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 'NotAGMPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)

    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 47
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    ctest = copy.deepcopy(c1)
    del ctest['interp']
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    #
    # annotatedfloat_type()
    #
    res = config.annotatedfloat_type('4.0')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0d')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0m')
    assert isinstance(res, float)
    assert res == 4.0 / 60.0
    res = config.annotatedfloat_type('4.0c')
    assert isinstance(res, float)
    assert res == 4.0 / 3600.0
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('4.0caweoifaw')
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('')
    #
    # weight_list()
    #
    res = config.weight_list(['0.2', '0.3', '0.5'], min=0)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    res = config.weight_list('None', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list(['0.2', '0.3', '0.5'], min=3)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    with pytest.raises(ValidateError):
        res = config.weight_list([], min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[None]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('None', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.2', '0.3', '0.5'], min=4)
    with pytest.raises(ValidateError):
        res = config.weight_list(['-0.2', '0.3', '0.5'], min=3)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.1', '0.3', '0.5'], min=3)
    #
    # gmpe_list()
    #
    res = config.gmpe_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.gmpe_list('[]', min=1)
    res = config.gmpe_list('thing1', min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1', 'thing2'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1', 'thing2']
    with pytest.raises(ValidateError):
        res = config.gmpe_list(['thing1', 'thing2'], min=3)
    with pytest.raises(ValidateError):
        res = config.gmpe_list(7, min=0)
    with pytest.raises(ValidateError):
        res = config.gmpe_list([7], min=0)
    #
    # extent_list()
    #
    res = config.extent_list('[]')
    assert isinstance(res, list)
    assert res == []
    res = config.extent_list([])
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.extent_list(7)
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', 'thing'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', '1000.0'])
    res = config.extent_list(['-20.0', '-10.0', '20.0', '10.0'])
    assert isinstance(res, list)
    assert res == [-20.0, -10.0, 20.0, 10.0]
    #
    # file_type()
    #
    res = config.file_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.file_type('/home/xxxyyyzzz/awefawe')
    res = config.file_type(os.path.abspath(__file__))
    assert isinstance(res, str)
    assert res == os.path.abspath(__file__)
    #
    # directory_type()
    #
    res = config.directory_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.directory_type('/home/xxxyyyzzz/awefawe')
    res = config.directory_type(os.path.dirname(os.path.abspath(__file__)))
    assert isinstance(res, str)
    assert res == os.path.dirname(os.path.abspath(__file__))
    #
    # status_string()
    #
    res = config.status_string('', min=1)
    assert res == 'automatic'
    res = config.status_string('automatic', min=1)
    assert res == 'automatic'
    with pytest.raises(ValidateError):
        res = config.status_string('thing', min=1)
    #
    # cfg_float_list()
    #
    res = config.cfg_float_list(['2.0', '3.0', '4.0'])
    assert res == [2.0, 3.0, 4.0]
    res = config.cfg_float_list('2.0')
    assert res == [2.0]
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('')
    with pytest.raises(ValidateError):
        res = config.cfg_float_list({})
    with pytest.raises(ValidateError):
        res = config.cfg_float_list([])
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('thing')
    #
    # cfg_float()
    #
    res = config.cfg_float('2.0')
    assert res == 2.0
    with pytest.raises(ValidateError):
        res = config.cfg_float(['2.0'])
    with pytest.raises(ValidateError):
        res = config.cfg_float('')
    with pytest.raises(ValidateError):
        res = config.cfg_float('None')
    with pytest.raises(ValidateError):
        res = config.cfg_float('thing')
Beispiel #12
0
def test_scr_rlme():
    old_gmpe = set_gmpe('stable_continental_nshmp2014_rlme')
    spec_file = pkg_resources.resource_filename(
        'scenarios', os.path.join('data', 'configspec.conf'))
    validator = get_custom_validator()
    config = ConfigObj(os.path.join(os.path.expanduser('~'), 'scenarios.conf'),
                       configspec=spec_file)
    tmp = pkg_resources.resource_filename(
        'scenarios', os.path.join('..', 'data', 'gmpe_sets.conf'))
    config.merge(ConfigObj(tmp, configspec=spec_file))
    tmp = pkg_resources.resource_filename(
        'scenarios', os.path.join('..', 'data', 'modules.conf'))
    config.merge(ConfigObj(tmp, configspec=spec_file))
    results = config.validate(validator)
    if results != True:
        config_error(config, results)

    # MultiGMPE from config
    config = config.dict()
    gmpe = MultiGMPE.from_config(config)

    # Input stuff
    IMT = imt.SA(1.0)
    rctx = RuptureContext()
    dctx = DistancesContext()
    sctx = SitesContext()

    rctx.rake = 0.0
    rctx.dip = 90.0
    rctx.ztor = 0.0
    rctx.mag = 8.0
    rctx.width = 10.0
    rctx.hypo_depth = 8.0

    dctx.rjb = np.logspace(1, np.log10(800), 100)
    dctx.rrup = dctx.rjb
    dctx.rhypo = dctx.rjb
    dctx.rx = dctx.rjb
    dctx.ry0 = dctx.rjb

    sctx.vs30 = np.ones_like(dctx.rjb) * 275.0
    sctx.vs30measured = np.full_like(dctx.rjb, False, dtype='bool')
    sctx = MultiGMPE.set_sites_depth_parameters(sctx, gmpe)

    # Evaluate
    conf_lmean, dummy = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, IMT,
                                                  [const.StdDev.TOTAL])

    target_lmean = np.array([
        0.10556736, 0.0839267, 0.06189444, 0.03945984, 0.01661264, -0.006657,
        -0.03035844, -0.05450058, -0.07909179, -0.10413995, -0.1296524,
        -0.15563655, -0.1821091, -0.20909381, -0.23661405, -0.26469259,
        -0.29335086, -0.32257956, -0.35232905, -0.38254639, -0.41317807,
        -0.44417017, -0.47549552, -0.5071888, -0.53929293, -0.57185042,
        -0.60490345, -0.63848027, -0.67255251, -0.70707712, -0.74201096,
        -0.77731091, -0.81293906, -0.84889737, -0.88520644, -0.92188724,
        -0.95899471, -0.99699613, -1.03583184, -1.07530664, -1.11531737,
        -1.15576129, -1.19653696, -1.23757689, -1.2772327, -1.2915098,
        -1.30576498, -1.32001713, -1.33429606, -1.3486727, -1.36322545,
        -1.37803346, -1.39317668, -1.40677752, -1.42081409, -1.43538898,
        -1.45056417, -1.46640223, -1.48327111, -1.50656497, -1.53368548,
        -1.56645985, -1.59991327, -1.63399401, -1.66867278, -1.7039438,
        -1.73980246, -1.77624473, -1.81326727, -1.85087166, -1.889066,
        -1.92784814, -1.96721442, -2.0071855, -2.04779304, -2.08909259,
        -2.13114448, -2.17401045, -2.21775376, -2.26243406, -2.30808979,
        -2.35475487, -2.40246494, -2.4512575, -2.50117075, -2.55223495,
        -2.60447754, -2.65792811, -2.71261851, -2.61732716, -2.67007323,
        -2.72399057, -2.77918054, -2.83574666, -2.89379416, -2.95340501,
        -3.01462691, -3.07750731, -3.14209631, -3.20844679
    ])

    np.testing.assert_allclose(conf_lmean, target_lmean, atol=1e-6)

    # Redo for 3 sec so some GMPEs are filtered out
    IMT = imt.SA(3.0)
    gmpe = MultiGMPE.from_config(config, filter_imt=IMT)
    conf_lmean, dummy = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, IMT,
                                                  [const.StdDev.TOTAL])

    target_lmean = np.array([
        -1.26636973, -1.289514, -1.31300386, -1.33683936, -1.36102084,
        -1.38554902, -1.41042497, -1.43565015, -1.46122642, -1.48715602,
        -1.51344154, -1.54008586, -1.56709215, -1.59446375, -1.62220409,
        -1.65031664, -1.6788048, -1.70767178, -1.7369205, -1.76655351,
        -1.79657287, -1.82698005, -1.85777587, -1.88896039, -1.92053288,
        -1.95249175, -1.98483453, -2.01755788, -2.05065755, -2.08412844,
        -2.11796463, -2.15215943, -2.18670547, -2.22159473, -2.25681869,
        -2.29236835, -2.32823441, -2.36453464, -2.40140834, -2.43883442,
        -2.47679132, -2.51525752, -2.55421156, -2.59363211, -2.63112832,
        -2.63336521, -2.63582817, -2.6385319, -2.64147962, -2.64466761,
        -2.64809268, -2.65175214, -2.6556438, -2.65976592, -2.66411721,
        -2.66869673, -2.67350386, -2.67853821, -2.68413311, -2.69604497,
        -2.7124745, -2.73590549, -2.75964098, -2.78367044, -2.80798539,
        -2.8325853, -2.85746998, -2.88263948, -2.90809408, -2.93383429,
        -2.95986073, -2.98617306, -3.01275705, -3.03961495, -3.06675608,
        -3.09419043, -3.12192861, -3.14998191, -3.17836228, -3.20708239,
        -3.23615561, -3.26559604, -3.29541858, -3.32563888, -3.35627343,
        -3.38733956, -3.41885548, -3.4508403, -3.48331409, -3.56476842,
        -3.59987076, -3.63573296, -3.67238872, -3.70987332, -3.74822369,
        -3.78747847, -3.82767809, -3.86886488, -3.91108308, -3.95437899
    ])

    np.testing.assert_allclose(conf_lmean, target_lmean, atol=1e-6)

    # Clean up
    set_gmpe(old_gmpe)
Beispiel #13
0
def test_config():

    install_dir, data_dir = config.get_config_paths()
    #
    # get_logger()
    #
    log_file = os.path.join(data_dir, 'nc72282711', 'shake.log')
    if os.path.isfile(log_file):
        os.remove(log_file)
    logger = config.get_logger('nc72282711', log_file=True,
                               log_option='debug')
    logger.debug('xyxyxyzz')
    with open(log_file, 'r') as log_fd:
        line = log_fd.readline()
        assert 'xyxyxyzz' in line
    os.remove(log_file)

    logger = config.get_logger('nc72282711', log_option='quiet')
    logger = config.get_logger('nc72282711')
    logger = config.get_logger('nc72282711', log_option='debug')

    #
    # Some stuff we just call and see if it bombs out
    #
    mydatapath = config.get_data_path()
    myinstall, mydata = config.get_config_paths()

    myspec = config.get_configspec()
    myvalid = config.get_custom_validator()

    c1 = ConfigObj(os.path.join(mydatapath, "model.conf"),
                   configspec=myspec)
    c2 = ConfigObj(os.path.join(mydatapath, "modules.conf"),
                   configspec=myspec)
    c3 = ConfigObj(os.path.join(mydatapath, "gmpe_sets.conf"),
                   configspec=myspec)
    c4 = ConfigObj(os.path.join(mydatapath, "northridge_model.conf"),
                   configspec=myspec)
    c5 = ConfigObj(os.path.join(mydatapath, "products.conf"),
                   configspec=myspec)
    c1.merge(c2)
    c1.merge(c3)
    c1.merge(c4)
    c1.merge(c5)

    results = c1.validate(myvalid, preserve_errors=True)

    assert isinstance(results, bool) and results

    config.check_config(c1, logger)
    #
    # Break the config
    #
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ccf'] = 'NotACCF'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ipe'] = 'NotAnIPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmice'] = 'NotAGMICE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 'NotAGMPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)

    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 47
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    ctest = copy.deepcopy(c1)
    del ctest['interp']
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    #
    # Test the profile checker
    #
    ctest = ConfigObj()
    ctest['profiles'] = {'prof1': {'data_path': '/xyz/zzsx/zz',
                                   'install_path': '/xyz/zzsx/zz'},
                         'prof2': {'data_path': data_dir,
                                   'install_path': install_dir}}
    ct1 = config.check_profile_config(ctest)
    assert 'prof1' not in list(ct1['profiles'].keys())
    # os.remove(config_file)
    #
    # annotatedfloat_type()
    #
    res = config.annotatedfloat_type('4.0')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0d')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0m')
    assert isinstance(res, float)
    assert res == 4.0 / 60.0
    res = config.annotatedfloat_type('4.0c')
    assert isinstance(res, float)
    assert res == 4.0 / 3600.0
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('4.0caweoifaw')
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('')
    #
    # weight_list()
    #
    res = config.weight_list(['0.2', '0.3', '0.5'], min=0)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    res = config.weight_list('None', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list(['0.2', '0.3', '0.5'], min=3)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    with pytest.raises(ValidateError):
        res = config.weight_list([], min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[None]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('None', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.2', '0.3', '0.5'], min=4)
    with pytest.raises(ValidateError):
        res = config.weight_list(['-0.2', '0.3', '0.5'], min=3)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.1', '0.3', '0.5'], min=3)
    #
    # gmpe_list()
    #
    res = config.gmpe_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.gmpe_list('[]', min=1)
    res = config.gmpe_list('thing1', min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1', 'thing2'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1', 'thing2']
    with pytest.raises(ValidateError):
        res = config.gmpe_list(['thing1', 'thing2'], min=3)
    with pytest.raises(ValidateError):
        res = config.gmpe_list(7, min=0)
    with pytest.raises(ValidateError):
        res = config.gmpe_list([7], min=0)
    #
    # extent_list()
    #
    res = config.extent_list('[]')
    assert isinstance(res, list)
    assert res == []
    res = config.extent_list([])
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.extent_list(7)
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', 'thing'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', '1000.0'])
    res = config.extent_list(['-20.0', '-10.0', '20.0', '10.0'])
    assert isinstance(res, list)
    assert res == [-20.0, -10.0, 20.0, 10.0]
    #
    # file_type()
    #
    res = config.file_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.file_type('/home/xxxyyyzzz/awefawe')
    res = config.file_type(os.path.abspath(__file__))
    assert isinstance(res, str)
    assert res == os.path.abspath(__file__)
    #
    # directory_type()
    #
    res = config.directory_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.directory_type('/home/xxxyyyzzz/awefawe')
    res = config.directory_type(os.path.dirname(os.path.abspath(__file__)))
    assert isinstance(res, str)
    assert res == os.path.dirname(os.path.abspath(__file__))
    #
    # status_string()
    #
    res = config.status_string('', min=1)
    assert res == 'automatic'
    res = config.status_string('automatic', min=1)
    assert res == 'automatic'
    with pytest.raises(ValidateError):
        res = config.status_string('thing', min=1)
    #
    # cfg_float_list()
    #
    res = config.cfg_float_list(['2.0', '3.0', '4.0'])
    assert res == [2.0, 3.0, 4.0]
    res = config.cfg_float_list('2.0')
    assert res == [2.0]
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('')
    with pytest.raises(ValidateError):
        res = config.cfg_float_list({})
    with pytest.raises(ValidateError):
        res = config.cfg_float_list({'a': 'b'})
    with pytest.raises(ValidateError):
        res = config.cfg_float_list([])
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('thing')
    #
    # cfg_float()
    #
    res = config.cfg_float('2.0')
    assert res == 2.0
    with pytest.raises(ValidateError):
        res = config.cfg_float(['2.0'])
    with pytest.raises(ValidateError):
        res = config.cfg_float('')
    with pytest.raises(ValidateError):
        res = config.cfg_float('None')
    with pytest.raises(ValidateError):
        res = config.cfg_float('thing')
Beispiel #14
0
    def execute(self):
        """
        Augment a ShakeMap input data file with local configs, data, rupture,
        etc. The version history will only be incremented if the originator
        differs from the originator in the previous line of the history.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')
        if not os.path.isfile(hdf_file):
            raise FileNotFoundError('%s does not exist. Use assemble.' %
                                    hdf_file)
        shake_data = ShakeMapInputContainer.load(hdf_file)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Get the config from the HDF file and merge in the local configs
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        shake_config = shake_data.getConfig()
        shake_config = ConfigObj(shake_config, configspec=spec_file)

        modules_file = os.path.join(install_path, 'config', 'modules.conf')
        if os.path.isfile(modules_file):
            self.logger.info('Found a modules file.')
            modules = ConfigObj(modules_file, configspec=spec_file)
            shake_config.merge(modules)
        gmpe_file = os.path.join(install_path, 'config', 'gmpe_sets.conf')
        if os.path.isfile(gmpe_file):
            self.logger.info('Found a gmpe file.')
            gmpe_sets = ConfigObj(gmpe_file, configspec=spec_file)
            shake_config.merge(gmpe_sets)
        config_file = os.path.join(install_path, 'config', 'model.conf')
        if os.path.isfile(config_file):
            self.logger.info('Found a global config file.')
            global_config = ConfigObj(config_file, configspec=spec_file)
            shake_config.merge(global_config)
        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            self.logger.info('Found an event specific model.conf file.')
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        elif os.path.isfile(event_config_zc_file):
            self.logger.info('Found an event specific model_zc file.')
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
            shake_config.merge(event_config)
        #
        # Validate the resulting config
        #
        results = shake_config.validate(validator)
        if not results or isinstance(results, dict):
            config_error(shake_config, results)
        check_config(shake_config, self.logger)
        #
        # The vs30 file may have macros in it
        #
        vs30file = shake_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError('vs30 file "%s" is not a '
                                        'valid file' % vs30file)
            shake_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in shake_config['interp']['prediction_location']:
            loc_file = shake_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError('prediction file "%s" is not a '
                                            'valid file' % loc_file)
                shake_config['interp']['prediction_location']['file'] = loc_file
        #
        # Put the updated config back into shake_data.hdf`
        #
        config = shake_config.dict()
        shake_data.setConfig(config)
        #
        # Look for additional data files and update the stationlist if found
        #
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))
        if datafiles:
            self.logger.info('Found additional data files...')
            shake_data.addStationData(datafiles)
        #
        # Look for a rupture file and replace the existing one if found
        #
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        eventxml = os.path.join(datadir, 'event.xml')
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        if not os.path.isfile(eventxml):
            eventxml = None
        if rupturefile is not None or eventxml is not None:
            self.logger.info('Updating rupture/origin information.')
            shake_data.updateRupture(
                eventxml=eventxml, rupturefile=rupturefile)

        #
        # Sort out the version history. We're working with an existing
        # HDF file, so: if we are the originator, just update the timestamp,
        # otherwise add a new line.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']

        history = shake_data.getVersionHistory()
        if history['history'][-1][1] == originator:
            history['history'][-1][0] = timestamp
        else:
            version = int(history['history'][-1][2]) + 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        shake_data.setVersionHistory(history)

        shake_data.close()
Beispiel #15
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('uncertaintymaps module can only '
                                      'operate on gridded data, not sets of '
                                      'points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Uncertainty mapping...')

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the uncertainty mapping functions
        layers = config['products']['mapping']['layers']
        if 'countries' in layers and layers['countries'] != '':
            countries_file = layers['countries']
        else:
            countries_file = None
        if 'states_provs' in layers and layers['states_provs'] != '':
            states_provs_file = layers['states_provs']
        else:
            states_provs_file = None
        if 'oceans' in layers and layers['oceans'] != '':
            oceans_file = layers['oceans']
        else:
            oceans_file = None
        if 'lakes' in layers and layers['lakes'] != '':
            lakes_file = layers['lakes']
        else:
            lakes_file = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
        topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        faults = None
        roads = None
        if states_provs_file is not None:
            states_provs = ShapelyFeature(
                Reader(states_provs_file).geometries(),
                ccrs.PlateCarree(),
                facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            # The feature constructor doesn't necessarily download the
            # data, but we want it to so that multiple threads don't
            # try to do it at once when they actually access the data.
            # So below we just call the geometries() method to trigger
            # the download if necessary.
            _ = states_provs.geometries()

        if countries_file is not None:
            countries = ShapelyFeature(Reader(countries_file).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')
            _ = countries.geometries()

        if oceans_file is not None:
            oceans = ShapelyFeature(Reader(oceans_file).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)
            _ = oceans.geometries()

        if lakes_file is not None:
            lakes = ShapelyFeature(Reader(lakes_file).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)
            _ = lakes.geometries()

        alist = []
        llogo = config['products']['mapping'].get('license_logo') or None
        ltext = config['products']['mapping'].get('license_text') or None
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'roadcolor': layers['roadcolor'],
                'roadwidth': layers['roadwidth'],
                'faults': faults,
                'faultcolor': layers['faultcolor'],
                'faultwidth': layers['faultwidth'],
                'datadir': datadir,
                'operator': operator,
                'filter_size': 0,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict,
                'display_magnitude': self.display_magnitude,
                'pdf_dpi': config['products']['mapping']['pdf_dpi'],
                'img_dpi': config['products']['mapping']['img_dpi'],
                'license_logo': llogo,
                'license_text': ltext,
            }
            alist.append(d)

            #
            # Populate the contents.xml
            #
            for key in ('std', 'phi', 'tau'):
                if key not in d['imtdict'] or d['imtdict'][key] is None:
                    continue

                if key == 'std':
                    ext = '_sigma'
                    utype = ' Total'
                elif key == 'phi':
                    ext = '_phi'
                    utype = ' Within-event'
                else:
                    ext = '_tau'
                    utype = ' Between-event'

                if imtype == 'MMI':
                    fileimt = 'intensity'
                else:
                    fileimt = oq_to_file(imtype)

                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.jpg', 'image/jpeg')
                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
Beispiel #16
0
    def execute(self):
        """
        Assemble ShakeMap input data and write and ShakeMapInputContainer named
        shake_data.hdf in the event's 'current' directory.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        eventxml = os.path.join(datadir, 'event.xml')
        self.logger.debug('Looking for event.xml file...')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Look for global configs in install_path/config
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        self.logger.debug('Looking for configuration files...')
        modules = ConfigObj(
            os.path.join(install_path, 'config', 'modules.conf'),
            configspec=spec_file)
        gmpe_sets = ConfigObj(
            os.path.join(install_path, 'config', 'gmpe_sets.conf'),
            configspec=spec_file)
        global_config = ConfigObj(
            os.path.join(install_path, 'config', 'model.conf'),
            configspec=spec_file)

        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
        elif os.path.isfile(event_config_zc_file):
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
        else:
            event_config = ConfigObj()

        #
        # start merging event_config
        #
        global_config.merge(event_config)
        global_config.merge(modules)
        global_config.merge(gmpe_sets)

        results = global_config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(global_config, results)

        check_config(global_config, self.logger)

        #
        # The vs30 file may have macros in it
        #
        vs30file = global_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError("vs30 file '%s' is not a valid file" %
                                        vs30file)
            global_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in global_config['interp']['prediction_location']:
            loc_file = global_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError("prediction file '%s' is not "
                                            "a valid file" % loc_file)
                global_config['interp']['prediction_location']['file'] = loc_file

        config = global_config.dict()

        self.logger.debug('Looking for data files...')
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))

        self.logger.debug('Looking for rupture files...')
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        #
        # Sort out the version history. Get the most recent backup file and
        # extract the existing history. Then add a new line for this run.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']
        backup_dirs = sorted(
            glob.glob(os.path.join(datadir, '..', '.backup*')),
            reverse=True)
        if len(backup_dirs):
            #
            # Backup files exist so find the latest one and extract its
            # history, then add a new line that increments the version
            #
            bu_file = os.path.join(backup_dirs[0], 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            version = int(
                backup_dirs[0].replace(
                    os.path.join(datadir, '..', '.backup'), ''))
            version += 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        elif os.path.isfile(os.path.join(datadir, 'shake_data.hdf')):
            #
            # No backups are available, but there is an existing shake_data
            # file. Extract its history and update the timestamp and
            # source network (but leave the version alone).
            #
            bu_file = os.path.join(datadir, 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            new_line = [timestamp, originator, history['history'][-1][2]]
            history['history'][-1] = new_line
        else:
            #
            # No backup and no existing file. Make this version 1
            #
            history = {'history': []}
            new_line = [timestamp, originator, 1]
            history['history'].append(new_line)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')

        self.logger.debug('Creating input container...')
        shake_data = ShakeMapInputContainer.createFromInput(
                hdf_file,
                config,
                eventxml,
                history,
                rupturefile=rupturefile,
                datafiles=datafiles)
        self.logger.debug('Created HDF5 input container in %s' %
                         shake_data.getFileName())
        shake_data.close()
Beispiel #17
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('mapping module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Mapping...')

        # get the filter size from the products.conf
        filter_size = config['products']['contour']['filter_size']

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the mapping functions
        layers = config['products']['mapping']['layers']
        if 'topography' in layers and layers['topography'] != '':
            topofile = layers['topography']
        else:
            topofile = None
        if 'roads' in layers and layers['roads'] != '':
            roadfile = layers['roads']
        else:
            roadfile = None
        if 'faults' in layers and layers['faults'] != '':
            faultfile = layers['faults']
        else:
            faultfile = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        if topofile:
            topogrid = read(topofile, samplegeodict=sampledict, resample=False)
        else:
            tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
            topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        extent = (float(xmin), float(ymin), float(xmax), float(ymax))
        if 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            states_provs = list(states_provs.intersecting_geometries(extent))
            if len(states_provs) > 300:
                states_provs = None
            else:
                states_provs = cfeature.NaturalEarthFeature(
                    category='cultural',
                    name='admin_1_states_provinces_lines',
                    scale='10m',
                    facecolor='none')

            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')

            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)

            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)

        if faultfile is not None:
            faults = ShapelyFeature(Reader(faultfile).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor='none')
        else:
            faults = None

        if roadfile is not None:
            roads = ShapelyFeature(Reader(roadfile).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor='none')
            if len(list(roads.intersecting_geometries(extent))) > 200:
                roads = None
            else:
                roads = ShapelyFeature(Reader(roadfile).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        else:
            roads = None

        alist = []
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'faults': faults,
                'datadir': datadir,
                'operator': operator,
                'filter_size': filter_size,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict
            }
            alist.append(d)
            if imtype == 'MMI':
                g = copy.deepcopy(d)
                g['imtype'] = 'thumbnail'
                alist.append(g)
                h = copy.deepcopy(d)
                h['imtype'] = 'overlay'
                alist.append(h)
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.jpg', 'image/jpeg')
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.pdf', 'application/pdf')
                self.contents.addFile('intensityThumbnail',
                                      'Intensity Thumbnail',
                                      'Thumbnail of intensity map.',
                                      'pin-thumbnail.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.pngw', 'text/plain')
            else:
                fileimt = oq_to_file(imtype)
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.jpg', 'image/jpeg')
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
Beispiel #18
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        oc = ShakeMapOutputContainer.load(datafile)
        if oc.getDataType() != 'grid':
            raise NotImplementedError('plotregr module can only operate on '
                                      'gridded data not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # If mapping runs in parallel, then we want this module too, as well.
        # Otherwise we get weird errors from matplotlib
        max_workers = config['products']['mapping']['max_workers']

        #
        # Cheating here a bit by assuming that the IMTs are the same
        # as the regression IMTs
        #
        rockgrid = {}
        soilgrid = {}
        rocksd = {}
        soilsd = {}
        imtlist = oc.getIMTs('GREATER_OF_TWO_HORIZONTAL')
        for myimt in imtlist:
            rockgrid[myimt], _ = oc.getArray(['attenuation', 'rock', myimt],
                                             'mean')
            soilgrid[myimt], _ = oc.getArray(['attenuation', 'soil', myimt],
                                             'mean')
            rocksd[myimt], _ = oc.getArray(['attenuation', 'rock', myimt],
                                           'std')
            soilsd[myimt], _ = oc.getArray(['attenuation', 'soil', myimt],
                                           'std')
        distances, _ = oc.getArray(['attenuation', 'distances'], 'rrup')

        stations = oc.getStationDict()

        #
        # Make plots
        #
        alist = []
        for myimt in imtlist:
            a = {
                'myimt': myimt,
                'rockgrid': rockgrid,
                'soilgrid': soilgrid,
                'rocksd': rocksd,
                'soilsd': soilsd,
                'stations': stations,
                'distances': distances,
                'eventid': self._eventid,
                'datadir': datadir
            }
            alist.append(a)
            if myimt == 'MMI':
                self.contents.addFile(
                    'miRegr', 'Intensity Regression',
                    'Regression plot of macroseismic '
                    'intensity.', 'mmi_regr.png', 'image/png')
            elif myimt == 'PGA':
                self.contents.addFile(
                    'pgaRegr', 'PGA Regression', 'Regression plot of peak '
                    'ground acceleration (%g).', 'pga_regr.png', 'image/png')
            elif myimt == 'PGV':
                self.contents.addFile(
                    'pgvRegr', 'PGV Regression',
                    'Regression plot of peak ground '
                    'velocity (cm/s).', 'pgv_regr.png', 'image/png')
            else:
                oqimt = imt.from_string(myimt)
                period = str(oqimt.period)
                filebase = oq_to_file(myimt)
                psacap = 'Regression plot of ' + period + ' sec 5% damped ' \
                         'pseudo-spectral acceleration(%g).'
                self.contents.addFile(filebase + 'Regr',
                                      'PSA ' + period + ' sec Regression',
                                      psacap, filebase + '_regr.png',
                                      'image/png')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_plots, alist)
                list(results)
        else:
            for adict in alist:
                make_plots(adict)

        #
        # Make attenuation_curves.json
        #
        jdict = {'eventid': self._eventid}
        jdict['gmpe'] = {}
        for site in ['soil', 'rock']:
            jdict['gmpe'][site] = {}
            for myimt in imtlist:
                jdict['gmpe'][site][myimt] = {}
                jdict['gmpe'][site][myimt]['mean'] = oc.getArray(
                    ['attenuation', site, myimt],
                    'mean')[0].round(decimals=5).tolist()
                jdict['gmpe'][site][myimt]['stddev'] = oc.getArray(
                    ['attenuation', site, myimt],
                    'std')[0].round(decimals=5).tolist()
        jdict['distances'] = {}
        for dtype in ['repi', 'rhypo', 'rjb', 'rrup']:
            jdict['distances'][dtype] = oc.getArray(
                ['attenuation', 'distances'],
                dtype)[0].round(decimals=5).tolist()
        jdict['mean_bias'] = {}
        info = oc.getMetadata()
        for myimt in imtlist:
            jdict['mean_bias'][myimt] = info['output']['ground_motions'][
                myimt]['bias']
        jstring = json.dumps(jdict, allow_nan=False)
        jfile = os.path.join(datadir, 'attenuation_curves.json')
        f = open(jfile, 'wt')
        f.write(jstring)
        f.close()
        oc.close()
        cap = "Nominal attenuation curves"
        self.contents.addFile('attenuationCurves', 'Attenuation Curves', cap,
                              'attenuation_curves.json', 'application/json')
Beispiel #19
0
    def execute(self):
        """
        Assemble ShakeMap input data and write and ShakeMapInputContainer named
        shake_data.hdf in the event's 'current' directory.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        eventxml = os.path.join(datadir, 'event.xml')
        self.logger.debug('Looking for event.xml file...')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)

        # Prompt for a comment string if none is provided on the command line
        if self.comment is None:
            if sys.stdout is not None and sys.stdout.isatty():
                self.comment = input(
                    'Please enter a comment for this version.\n'
                    'comment: ')
            else:
                self.comment = ''

        # find any source.txt or moment.xml files
        momentfile = os.path.join(datadir, 'moment.xml')
        sourcefile = os.path.join(datadir, 'source.txt')
        if not os.path.isfile(sourcefile):
            sourcefile = None
        if not os.path.isfile(momentfile):
            momentfile = None

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)
        pdl_path = os.path.join(datadir, 'pdl')
        if os.path.isdir(pdl_path):
            shutil.rmtree(pdl_path, ignore_errors=True)

        # Look for any .transferred file and delete it
        save_file = os.path.join(datadir, SAVE_FILE)
        if os.path.isfile(save_file):
            os.remove(save_file)

        #
        # Look for global configs in install_path/config
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        self.logger.debug('Looking for configuration files...')
        modules = ConfigObj(
            os.path.join(install_path, 'config', 'modules.conf'),
            configspec=spec_file)
        gmpe_sets = ConfigObj(
            os.path.join(install_path, 'config', 'gmpe_sets.conf'),
            configspec=spec_file)
        global_config = ConfigObj(
            os.path.join(install_path, 'config', 'model.conf'),
            configspec=spec_file)

        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_select.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_select.conf')
        if os.path.isfile(event_config_file):
            event_config = ConfigObj(event_config_file,
                                     configspec=spec_file)
        elif os.path.isfile(event_config_zc_file):
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
        else:
            event_config = ConfigObj()

        #
        # start merging event_config
        #
        global_config.merge(event_config)
        global_config.merge(modules)
        global_config.merge(gmpe_sets)

        results = global_config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(global_config, results)

        check_config(global_config, self.logger)

        global_data_path = os.path.join(os.path.expanduser('~'),
                                        'shakemap_data')
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros; this could have the event ID, so we
        # can't just use the file_type handler in the configspec
        #
        if 'file' in global_config['interp']['prediction_location']:
            loc_file = global_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':      # 'None' is a string here
                loc_file = path_macro_sub(loc_file, ip=install_path,
                                          dp=data_path, gp=global_data_path,
                                          ei=self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError("prediction file '%s' is not "
                                            "a valid file" % loc_file)
                global_config['interp']['prediction_location']['file'] = \
                    loc_file

        config = global_config.dict()

        self.logger.debug('Looking for data files...')
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))
        datafiles += glob.glob(os.path.join(datadir, '*_dat.json'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.json')):
            datafiles.append(os.path.join(datadir, 'stationlist.json'))

        self.logger.debug('Looking for rupture files...')
        # look for geojson versions of rupture files
        rupturefile = os.path.join(datadir, 'rupture.json')
        if not os.path.isfile(rupturefile):
            # failing any of those, look for text file versions
            rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
            rupturefile = None
            if len(rupturefiles):
                rupturefile = rupturefiles[0]

        #
        # Sort out the version history. Get the most recent backup file and
        # extract the existing history. Then add a new line for this run.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']
        backup_dirs = sorted(
            glob.glob(os.path.join(datadir, '..', 'backup*')),
            reverse=True)
        if len(backup_dirs):
            #
            # Backup files exist so find the latest one and extract its
            # history, then add a new line that increments the version
            #
            bu_file = os.path.join(backup_dirs[0], 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            version = int(
                backup_dirs[0].replace(
                    os.path.join(datadir, '..', 'backup'), ''))
            version += 1
            new_line = [timestamp, originator, version, self.comment]
            history['history'].append(new_line)
        elif os.path.isfile(os.path.join(datadir, 'shake_data.hdf')):
            #
            # No backups are available, but there is an existing shake_data
            # file. Extract its history and update the timestamp and
            # source network (but leave the version alone).
            # If there is no history, just start a new one with version 1
            #
            bu_file = os.path.join(datadir, 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            if 'history' in history:
                new_line = [timestamp, originator, history['history'][-1][2],
                            self.comment]
                history['history'][-1] = new_line
            else:
                history = {'history': []}
                new_line = [timestamp, originator, 1, self.comment]
                history['history'].append(new_line)
        else:
            #
            # No backup and no existing file. Make this version 1
            #
            history = {'history': []}
            new_line = [timestamp, originator, 1, self.comment]
            history['history'].append(new_line)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')

        self.logger.debug('Creating input container...')
        shake_data = ShakeMapInputContainer.createFromInput(
            hdf_file,
            config,
            eventxml,
            history,
            rupturefile=rupturefile,
            sourcefile=sourcefile,
            momentfile=momentfile,
            datafiles=datafiles)
        self.logger.debug('Created HDF5 input container in %s' %
                          shake_data.getFileName())
        ah = AmplitudeHandler(install_path, data_path)
        event = ah.getEvent(self._eventid)
        if event is None:
            origin = shake_data.getRuptureObject().getOrigin()
            event = {'id': self._eventid,
                     'netid': origin.netid,
                     'network': origin.network,
                     'time': origin.time.strftime(constants.TIMEFMT),
                     'lat': origin.lat,
                     'lon': origin.lon,
                     'depth': origin.depth,
                     'mag': origin.mag,
                     'locstring': origin.locstring}
            ah.insertEvent(event)
        shake_data.close()
Beispiel #20
0
    def execute(self):
        """
        Assemble ShakeMap input data and write and ShakeMapInputContainer named
        shake_data.hdf in the event's 'current' directory.

        Raises:
            NotADirectoryError: When the event data directory does not
                exist.
            FileNotFoundError: When the the event's event.xml file does
                not exist.
            RuntimeError: When there are problems parsing the configuration.
            ValidateError: When there are configuration items missing or mis-
                configured.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        eventxml = os.path.join(datadir, 'event.xml')
        self.logger.debug('Looking for event.xml file...')
        if not os.path.isfile(eventxml):
            raise FileNotFoundError('%s does not exist.' % eventxml)

        #
        # Clear away results from previous runs
        #
        products_path = os.path.join(datadir, 'products')
        if os.path.isdir(products_path):
            shutil.rmtree(products_path, ignore_errors=True)

        #
        # Look for global configs in install_path/config
        #
        spec_file = get_configspec()
        validator = get_custom_validator()
        self.logger.info('Looking for configuration files...')
        modules = ConfigObj(os.path.join(install_path, 'config',
                                         'modules.conf'),
                            configspec=spec_file)
        gmpe_sets = ConfigObj(os.path.join(install_path, 'config',
                                           'gmpe_sets.conf'),
                              configspec=spec_file)
        global_config = ConfigObj(os.path.join(install_path, 'config',
                                               'model.conf'),
                                  configspec=spec_file)

        #
        # this is the event specific model.conf (may not be present)
        # prefer model.conf to model_zc.conf
        #
        event_config_file = os.path.join(datadir, 'model.conf')
        event_config_zc_file = os.path.join(datadir, 'model_zc.conf')
        if os.path.isfile(event_config_file):
            event_config = ConfigObj(event_config_file, configspec=spec_file)
        elif os.path.isfile(event_config_zc_file):
            event_config = ConfigObj(event_config_zc_file,
                                     configspec=spec_file)
        else:
            event_config = ConfigObj()

        #
        # start merging event_config
        #
        global_config.merge(event_config)
        global_config.merge(modules)
        global_config.merge(gmpe_sets)

        results = global_config.validate(validator)
        if not isinstance(results, bool) or not results:
            config_error(global_config, results)

        check_config(global_config, self.logger)

        #
        # The vs30 file may have macros in it
        #
        vs30file = global_config['data']['vs30file']
        if vs30file:
            vs30file = vs30file.replace('<INSTALL_DIR>', install_path)
            vs30file = vs30file.replace('<DATA_DIR>', data_path)
            vs30file = vs30file.replace('<EVENT_ID>', self._eventid)
            if not os.path.isfile(vs30file):
                raise FileNotFoundError("vs30 file '%s' is not a valid file" %
                                        vs30file)
            global_config['data']['vs30file'] = vs30file
        #
        # If there is a prediction_location->file file, then we need
        # to expand any macros
        #
        if 'file' in global_config['interp']['prediction_location']:
            loc_file = global_config['interp']['prediction_location']['file']
            if loc_file and loc_file != 'None':  # 'None' is a string here
                loc_file = loc_file.replace('<INSTALL_DIR>', install_path)
                loc_file = loc_file.replace('<DATA_DIR>', data_path)
                loc_file = loc_file.replace('<EVENT_ID>', self._eventid)
                if not os.path.isfile(loc_file):
                    raise FileNotFoundError("prediction file '%s' is not "
                                            "a valid file" % loc_file)
                global_config['interp']['prediction_location'][
                    'file'] = loc_file

        config = global_config.dict()

        self.logger.debug('Looking for data files...')
        datafiles = glob.glob(os.path.join(datadir, '*_dat.xml'))
        if os.path.isfile(os.path.join(datadir, 'stationlist.xml')):
            datafiles.append(os.path.join(datadir, 'stationlist.xml'))

        self.logger.debug('Looking for rupture files...')
        rupturefiles = glob.glob(os.path.join(datadir, '*_fault.txt'))
        rupturefile = None
        if len(rupturefiles):
            rupturefile = rupturefiles[0]
        #
        # Sort out the version history. Get the most recent backup file and
        # extract the existing history. Then add a new line for this run.
        #
        timestamp = datetime.datetime.utcnow().strftime('%FT%TZ')
        originator = config['system']['source_network']
        backup_dirs = sorted(glob.glob(os.path.join(datadir, '..',
                                                    '.backup*')),
                             reverse=True)
        if len(backup_dirs):
            #
            # Backup files exist so find the latest one and extract its
            # history, then add a new line that increments the version
            #
            bu_file = os.path.join(backup_dirs[0], 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            version = int(backup_dirs[0].replace(
                os.path.join(datadir, '..', '.backup'), ''))
            version += 1
            new_line = [timestamp, originator, version]
            history['history'].append(new_line)
        elif os.path.isfile(os.path.join(datadir, 'shake_data.hdf')):
            #
            # No backups are available, but there is an existing shake_data
            # file. Extract its history and update the timestamp and
            # source network (but leave the version alone).
            #
            bu_file = os.path.join(datadir, 'shake_data.hdf')
            bu_ic = ShakeMapInputContainer.load(bu_file)
            history = bu_ic.getVersionHistory()
            bu_ic.close()
            new_line = [timestamp, originator, history['history'][-1][2]]
            history['history'][-1] = new_line
        else:
            #
            # No backup and no existing file. Make this version 1
            #
            history = {'history': []}
            new_line = [timestamp, originator, 1]
            history['history'].append(new_line)

        hdf_file = os.path.join(datadir, 'shake_data.hdf')

        self.logger.debug('Creating input container...')
        shake_data = ShakeMapInputContainer.createFromInput(
            hdf_file,
            config,
            eventxml,
            history,
            rupturefile=rupturefile,
            datafiles=datafiles)
        self.logger.info('Created HDF5 input container in %s' %
                         shake_data.getFileName())
        shake_data.close()