示例#1
0
def test_mapmaker_intensity():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..'))
    out_file = os.path.join(shakedir, 'tests', 'data', 'containers',
                            'northridge', 'shake_result.hdf')
    container = ShakeMapOutputContainer.load(out_file)
    topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                            'mapping', 'CA_topo.grd')

    info = container.getMetadata()
    xmin = info['output']['map_information']['min']['longitude']
    xmax = info['output']['map_information']['max']['longitude']
    ymin = info['output']['map_information']['min']['latitude']
    ymax = info['output']['map_information']['max']['latitude']
    xmin = float(xmin) - 0.1
    xmax = float(xmax) + 0.1
    ymin = float(ymin) - 0.1
    ymax = float(ymax) + 0.1
    dy = float(info['output']['map_information']['grid_spacing']['latitude'])
    dx = float(info['output']['map_information']['grid_spacing']['longitude'])
    sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    topogrid = GMTGrid.load(topofile, samplegeodict=sampledict, resample=False)

    outpath = mkdtemp()

    model_config = container.getConfig()
    comp = container.getComponents('MMI')[0]
    textfile = os.path.join(get_data_path(), 'mapping', 'map_strings.en')
    text_dict = get_text_strings(textfile)

    cities = Cities.fromDefault()
    d = {
        'imtype': 'MMI',
        'topogrid': topogrid,
        'allcities': cities,
        'states_provinces': None,
        'countries': None,
        'oceans': None,
        'lakes': None,
        'roads': None,
        'faults': None,
        'datadir': outpath,
        'operator': 'NEIC',
        'filter_size': 10,
        'info': info,
        'component': comp,
        'imtdict': container.getIMTGrids('MMI', comp),
        'ruptdict': copy.deepcopy(container.getRuptureDict()),
        'stationdict': container.getStationDict(),
        'config': model_config,
        'tdict': text_dict
    }

    try:
        fig1, fig2 = draw_map(d)
    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(outpath)
示例#2
0
    def execute(self):
        """
        Cancel ShakeMap products using methods configured in transfer.conf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        products_dir = os.path.join(datadir, 'products')
        if not os.path.isdir(products_dir):
            raise NotADirectoryError('%s does not exist.' % products_dir)

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(
                install_path, 'config', 'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        config = ConfigObj(transfer_conf, configspec=configspec)

        # get the output container with all the things in it
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # call the transfer method
        self.logger.info('Sending cancel message...')
        _transfer(config, container, products_dir, cancel=True)

        # Create a file called CANCEL in the data directory. The
        # shake program will look for this and not run if present.
        self.logger.info('Creating cancel file...')
        cancelfile = os.path.join(datadir, 'CANCEL')
        with open(cancelfile, 'wt') as cfile:
            cfile.write('Event cancelled at %s\n' %
                        datetime.utcnow().strftime(TIMEFMT))

        # delete the event from the database
        handler = AmplitudeHandler(install_path, data_path)
        handler.deleteEvent(self._eventid)
        container.close()
示例#3
0
    def execute(self):
        """
        Tranfer ShakeMap products using methods configured in transfer.conf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)

        # look for the presence of a NO_TRANSFER file in the datadir.
        notransfer = os.path.join(datadir, NO_TRANSFER)
        if os.path.isfile(notransfer):
            self.logger.info(
                'Event has a %s file blocking transfer.' % NO_TRANSFER)
            return

        products_dir = os.path.join(datadir, 'products')
        if not os.path.isdir(products_dir):
            raise NotADirectoryError('%s does not exist.' % products_dir)

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(
                install_path, 'config', 'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        config = ConfigObj(transfer_conf, configspec=configspec)

        # get the output container with all the things in it
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # call the transfer method
        _transfer(config, container, products_dir)

        # copy the current folder to a new backup directory
        self._make_backup(data_path)

        container.close()
示例#4
0
    def execute(self):
        install_path, data_path = get_config_paths()
        self.datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(self.datadir):
            raise NotADirectoryError('%s is not a valid directory.' %
                                     self.datadir)

        # look for the presence of a NO_TRANSFER file in the datadir.
        notransfer = os.path.join(self.datadir, NO_TRANSFER)
        if os.path.isfile(notransfer):
            self.logger.info('Event has a %s file blocking transfer.' %
                             NO_TRANSFER)
            return

        # get the path to the transfer.conf spec file
        configspec = os.path.join(get_data_path(), 'transferspec.conf')

        # look for an event specific transfer.conf file
        transfer_conf = os.path.join(self.datadir, 'transfer.conf')
        if not os.path.isfile(transfer_conf):
            # if not there, use the system one
            transfer_conf = os.path.join(install_path, 'config',
                                         'transfer.conf')
            if not os.path.isfile(transfer_conf):
                raise FileNotFoundError('%s does not exist.' % transfer_conf)

        # get the config information for transfer
        self.config = ConfigObj(transfer_conf, configspec=configspec)
        results = self.config.validate(Validator())
        if not isinstance(results, bool) or not results:
            config_error(self.config, results)

        # get the output container with all the things in it
        products_dir = os.path.join(self.datadir, 'products')
        datafile = os.path.join(products_dir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        # extract the info.json object from the container
        self.info = container.getMetadata()
        container.close()

        # check for the presence of a .saved file. If found, do nothing.
        # Otherwise, create the backup directory.
        save_file = os.path.join(self.datadir, SAVE_FILE)
        if not os.path.isfile(save_file):
            logging.info('Making backup directory...')
            self._make_backup(data_path)
            with open(save_file, 'wt') as f:
                tnow = datetime.utcnow().strftime(constants.TIMEFMT)
                f.write('Saved %s by %s\n' % (tnow, self.command_name))
示例#5
0
def test_migrate():
    with pytest.raises(OSError):
        new_gmpe, ref = migrate_gmpe('CY08')

    data_path = get_data_path()
    cfile = os.path.join(data_path, 'migrate.conf')
    config = ConfigObj(cfile)

    with pytest.raises(KeyError):
        new_gmpe, ref = migrate_gmpe('NotAGMPE', config)

    new_gmpe, ref = migrate_gmpe('CY08', config)
    assert new_gmpe == 'CY14'

    new_gmpe, ref = migrate_gmpe('Kanno2006', config)
    assert new_gmpe == 'Kea06s'

    new_gmpe, ref = migrate_gmpe('MA2005', config)
    assert new_gmpe == 'ASK14'
示例#6
0
def test_layers():
    data_path = get_data_path()
    layer_path = os.path.join(data_path, 'layers')

    elon = -117.0
    elat = 33.0
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {'induced': 1578.3879076203307,
                 'japan': 7972.1138613743387,
                 'taiwan': 11022.339157753582,
                 'california': 0.0}
    layers_equal(layer_distances, reference)

    elon = -97.5
    elat = 36.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {'induced': 0.0,
                 'japan': 8935.9779110700729,
                 'taiwan': 11997.837464370788,
                 'california': 1508.2155746648657}
    layers_equal(layer_distances, reference)

    elon = 121.0
    elat = 22.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {'induced': 12041.424518656486,
                 'japan': 1231.8954391427453,
                 'taiwan': 0.0,
                 'california': 10085.281293655946}
    layers_equal(layer_distances, reference)

    #
    # Test for geometry type exception in dist_to_layer by
    # handing it a Point rather than a Polygon or MultiPolygon
    #
    p = Point()
    with pytest.raises(TypeError):
        distance = dist_to_layer(0.0, 0.0, p)
示例#7
0
def test_layers():
    data_path = get_data_path()
    layer_path = os.path.join(data_path, 'layers')

    elon = -117.0
    elat = 33.0
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {'induced': 1578.3879076203307,
                 'japan': 7972.1138613743387,
                 'taiwan': 11022.339157753582,
                 'california': 0.0}
    layers_equal(layer_distances, reference)

    elon = -97.5
    elat = 36.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {'induced': 0.0,
                 'japan': 8935.9779110700729,
                 'taiwan': 11997.837464370788,
                 'california': 1508.2155746648657}
    layers_equal(layer_distances, reference)

    elon = 121.0
    elat = 22.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {'induced': 12041.424518656486,
                 'japan': 1231.8954391427453,
                 'taiwan': 0.0,
                 'california': 10085.281293655946}
    layers_equal(layer_distances, reference)

    #
    # Test for geometry type exception in dist_to_layer by
    # handing it a Point rather than a Polygon or MultiPolygon
    #
    p = Point()
    with pytest.raises(TypeError):
        distance = dist_to_layer(0.0, 0.0, p)
示例#8
0
def test_config():

    install_dir, data_dir = config.get_config_paths()
    #
    # get_logger()
    #
    log_file = os.path.join(data_dir, 'nc72282711', 'shake.log')
    if os.path.isfile(log_file):
        os.remove(log_file)
    logger = config.get_logger('nc72282711', log_file=True,
                               log_option='debug')
    logger.debug('xyxyxyzz')
    with open(log_file, 'r') as log_fd:
        line = log_fd.readline()
        assert 'xyxyxyzz' in line
    os.remove(log_file)

    logger = config.get_logger('nc72282711', log_option='quiet')
    logger = config.get_logger('nc72282711')
    logger = config.get_logger('nc72282711', log_option='debug')

    #
    # Some stuff we just call and see if it bombs out
    #
    mydatapath = config.get_data_path()
    myinstall, mydata = config.get_config_paths()

    myspec = config.get_configspec()
    myvalid = config.get_custom_validator()

    c1 = ConfigObj(os.path.join(mydatapath, "model.conf"),
                   configspec=myspec)
    c2 = ConfigObj(os.path.join(mydatapath, "modules.conf"),
                   configspec=myspec)
    c3 = ConfigObj(os.path.join(mydatapath, "gmpe_sets.conf"),
                   configspec=myspec)
    c4 = ConfigObj(os.path.join(mydatapath, "northridge_model.conf"),
                   configspec=myspec)
    c5 = ConfigObj(os.path.join(mydatapath, "products.conf"),
                   configspec=myspec)
    c1.merge(c2)
    c1.merge(c3)
    c1.merge(c4)
    c1.merge(c5)

    results = c1.validate(myvalid, preserve_errors=True)

    assert isinstance(results, bool) and results

    config.check_config(c1, logger)
    #
    # Break the config
    #
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ccf'] = 'NotACCF'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ipe'] = 'NotAnIPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmice'] = 'NotAGMICE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 'NotAGMPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)

    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 47
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    ctest = copy.deepcopy(c1)
    del ctest['interp']
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    #
    # Test the profile checker
    #
    ctest = ConfigObj()
    ctest['profiles'] = {'prof1': {'data_path': '/xyz/zzsx/zz',
                                   'install_path': '/xyz/zzsx/zz'},
                         'prof2': {'data_path': data_dir,
                                   'install_path': install_dir}}
    ct1 = config.check_profile_config(ctest)
    assert 'prof1' not in list(ct1['profiles'].keys())
    # os.remove(config_file)
    #
    # annotatedfloat_type()
    #
    res = config.annotatedfloat_type('4.0')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0d')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0m')
    assert isinstance(res, float)
    assert res == 4.0 / 60.0
    res = config.annotatedfloat_type('4.0c')
    assert isinstance(res, float)
    assert res == 4.0 / 3600.0
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('4.0caweoifaw')
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('')
    #
    # weight_list()
    #
    res = config.weight_list(['0.2', '0.3', '0.5'], min=0)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    res = config.weight_list('None', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list(['0.2', '0.3', '0.5'], min=3)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    with pytest.raises(ValidateError):
        res = config.weight_list([], min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[None]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('None', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.2', '0.3', '0.5'], min=4)
    with pytest.raises(ValidateError):
        res = config.weight_list(['-0.2', '0.3', '0.5'], min=3)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.1', '0.3', '0.5'], min=3)
    #
    # gmpe_list()
    #
    res = config.gmpe_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.gmpe_list('[]', min=1)
    res = config.gmpe_list('thing1', min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1', 'thing2'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1', 'thing2']
    with pytest.raises(ValidateError):
        res = config.gmpe_list(['thing1', 'thing2'], min=3)
    with pytest.raises(ValidateError):
        res = config.gmpe_list(7, min=0)
    with pytest.raises(ValidateError):
        res = config.gmpe_list([7], min=0)
    #
    # extent_list()
    #
    res = config.extent_list('[]')
    assert isinstance(res, list)
    assert res == []
    res = config.extent_list([])
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.extent_list(7)
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', 'thing'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', '1000.0'])
    res = config.extent_list(['-20.0', '-10.0', '20.0', '10.0'])
    assert isinstance(res, list)
    assert res == [-20.0, -10.0, 20.0, 10.0]
    #
    # file_type()
    #
    res = config.file_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.file_type('/home/xxxyyyzzz/awefawe')
    res = config.file_type(os.path.abspath(__file__))
    assert isinstance(res, str)
    assert res == os.path.abspath(__file__)
    #
    # directory_type()
    #
    res = config.directory_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.directory_type('/home/xxxyyyzzz/awefawe')
    res = config.directory_type(os.path.dirname(os.path.abspath(__file__)))
    assert isinstance(res, str)
    assert res == os.path.dirname(os.path.abspath(__file__))
    #
    # status_string()
    #
    res = config.status_string('', min=1)
    assert res == 'automatic'
    res = config.status_string('automatic', min=1)
    assert res == 'automatic'
    with pytest.raises(ValidateError):
        res = config.status_string('thing', min=1)
    #
    # cfg_float_list()
    #
    res = config.cfg_float_list(['2.0', '3.0', '4.0'])
    assert res == [2.0, 3.0, 4.0]
    res = config.cfg_float_list('2.0')
    assert res == [2.0]
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('')
    with pytest.raises(ValidateError):
        res = config.cfg_float_list({})
    with pytest.raises(ValidateError):
        res = config.cfg_float_list({'a': 'b'})
    with pytest.raises(ValidateError):
        res = config.cfg_float_list([])
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('thing')
    #
    # cfg_float()
    #
    res = config.cfg_float('2.0')
    assert res == 2.0
    with pytest.raises(ValidateError):
        res = config.cfg_float(['2.0'])
    with pytest.raises(ValidateError):
        res = config.cfg_float('')
    with pytest.raises(ValidateError):
        res = config.cfg_float('None')
    with pytest.raises(ValidateError):
        res = config.cfg_float('thing')
示例#9
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('mapping module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Mapping...')

        # get the filter size from the products.conf
        filter_size = config['products']['contour']['filter_size']

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the mapping functions
        layers = config['products']['mapping']['layers']
        if 'topography' in layers and layers['topography'] != '':
            topofile = layers['topography']
        else:
            topofile = None
        if 'roads' in layers and layers['roads'] != '':
            roadfile = layers['roads']
        else:
            roadfile = None
        if 'faults' in layers and layers['faults'] != '':
            faultfile = layers['faults']
        else:
            faultfile = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        if topofile:
            topogrid = read(topofile, samplegeodict=sampledict, resample=False)
        else:
            tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
            topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        extent = (float(xmin), float(ymin), float(xmax), float(ymax))
        if 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            states_provs = list(states_provs.intersecting_geometries(extent))
            if len(states_provs) > 300:
                states_provs = None
            else:
                states_provs = cfeature.NaturalEarthFeature(
                    category='cultural',
                    name='admin_1_states_provinces_lines',
                    scale='10m',
                    facecolor='none')

            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')

            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)

            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)

        if faultfile is not None:
            faults = ShapelyFeature(Reader(faultfile).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor='none')
        else:
            faults = None

        if roadfile is not None:
            roads = ShapelyFeature(Reader(roadfile).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor='none')
            if len(list(roads.intersecting_geometries(extent))) > 200:
                roads = None
            else:
                roads = ShapelyFeature(Reader(roadfile).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        else:
            roads = None

        alist = []
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'faults': faults,
                'datadir': datadir,
                'operator': operator,
                'filter_size': filter_size,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict
            }
            alist.append(d)
            if imtype == 'MMI':
                g = copy.deepcopy(d)
                g['imtype'] = 'thumbnail'
                alist.append(g)
                h = copy.deepcopy(d)
                h['imtype'] = 'overlay'
                alist.append(h)
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.jpg', 'image/jpeg')
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.pdf', 'application/pdf')
                self.contents.addFile('intensityThumbnail',
                                      'Intensity Thumbnail',
                                      'Thumbnail of intensity map.',
                                      'pin-thumbnail.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.pngw', 'text/plain')
            else:
                fileimt = oq_to_file(imtype)
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.jpg', 'image/jpeg')
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
示例#10
0
def test_layers():
    data_path = get_data_path()
    layer_path = os.path.join(data_path, 'layers')

    elon = -117.0
    elat = 33.0
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {
        'induced': 1578.3879076203307,
        'japan': 7972.1138613743387,
        'taiwan': 11022.339157753582,
        'california': 0.0
    }
    layers_equal(layer_distances, reference)

    elon = -97.5
    elat = 36.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {
        'induced': 0.0,
        'japan': 8935.9779110700729,
        'taiwan': 11997.837464370788,
        'california': 1508.2155746648657
    }
    layers_equal(layer_distances, reference)

    elon = 121.0
    elat = 22.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {
        'induced': 12041.424518656486,
        'japan': 1231.8954391427453,
        'taiwan': 0.0,
        'california': 10085.281293655946
    }
    layers_equal(layer_distances, reference)

    #
    # Test for geometry type exception in dist_to_layer by
    # handing it a Point rather than a Polygon or MultiPolygon
    #
    p = Point()
    with pytest.raises(TypeError):
        dist_to_layer(0.0, 0.0, p)

    #
    # Test the updates to the config based on being in a layer (or not)
    #
    install_path, data_path = get_config_paths()
    config = ConfigObj(os.path.join(install_path, 'config', 'select.conf'))
    validate_config(config, install_path)
    # Taiwan
    elon = 121.0
    elat = 22.5

    config = update_config_regions(elat, elon, config)
    assert config['tectonic_regions']['acr']['gmpe'] == \
        ['active_crustal_taiwan', 'active_crustal_taiwan_deep']

    config = ConfigObj(os.path.join(install_path, 'config', 'select.conf'))
    validate_config(config, install_path)
    # Induced
    elon = -97.5
    elat = 36.5

    config = update_config_regions(elat, elon, config)
    assert config['tectonic_regions']['scr']['gmpe'] == \
        ['stable_continental_induced', 'stable_continental_nshmp2014_rlme',
         'stable_continental_deep']

    config = ConfigObj(os.path.join(install_path, 'config', 'select.conf'))
    validate_config(config, install_path)
    # Not in a layer
    elon = -77.5
    elat = 36.5

    config = update_config_regions(elat, elon, config)
    assert config['tectonic_regions']['acr']['gmpe'] == \
        ['active_crustal_nshmp2014', 'active_crustal_deep']
    assert config['tectonic_regions']['scr']['gmpe'] == \
        ['stable_continental_nshmp2014_rlme', 'stable_continental_deep']
示例#11
0
def test_layers():
    data_path = get_data_path()
    layer_path = os.path.join(data_path, 'layers')

    elon = -117.0
    elat = 33.0
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {
        'italy': 8710.04538291321,
        'hawaii': 4009.810418951339,
        'chile': 4803.501119602294,
        'japan': 7462.090628325871,
        'europe_share': 6337.760076297577,
        'induced': 1581.7186730857293,
        'australia': 9822.519908573386,
        'turkey': 9595.056080385448,
        'greece': 9513.190936814879,
        'china': 8749.15702828136,
        'california': 0.0,
        'new_zealand': 8859.317797859405,
        'taiwan': 9698.206101592557
    }
    layers_equal(layer_distances, reference)

    elon = -97.5
    elat = 36.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {
        'italy': 7600.631037485594,
        'hawaii': 5619.745326956643,
        'chile': 3585.560465124193,
        'japan': 8221.294305532281,
        'europe_share': 5178.578550706942,
        'induced': 0.0,
        'australia': 10877.954598395638,
        'turkey': 8706.788113517472,
        'greece': 8551.021519240006,
        'china': 9045.63060410134,
        'california': 1511.4662456781018,
        'new_zealand': 9950.334279593713,
        'taiwan': 10301.898766277141
    }
    layers_equal(layer_distances, reference)

    elon = 121.0
    elat = 22.5
    layer_distances = get_layer_distances(elon, elat, layer_path)
    reference = {
        'italy': 8555.0797075834,
        'hawaii': 7467.418933530909,
        'chile': 12071.522116104166,
        'japan': 1230.4098709858458,
        'europe_share': 6828.880422152432,
        'induced': 10327.489296208103,
        'australia': 3857.7430367906,
        'turkey': 6892.820456944929,
        'greece': 8039.666277993378,
        'china': 284.61969009227863,
        'california': 9064.755577907308,
        'new_zealand': 7707.863992372902,
        'taiwan': 0.0
    }
    layers_equal(layer_distances, reference)

    #
    # Test for geometry type exception in dist_to_layer by
    # handing it a Point rather than a Polygon or MultiPolygon
    #
    p = Point()
    with pytest.raises(TypeError):
        dist_to_layer(0.0, 0.0, p)

    #
    # Test the updates to the config based on being in a layer (or not)
    #
    install_path, data_path = get_config_paths()
    global_data_path = os.path.join(os.path.expanduser('~'), 'shakemap_data')
    config = ConfigObj(os.path.join(install_path, 'config', 'select.conf'))
    validate_config(config, install_path, data_path, global_data_path)
    # Taiwan
    elon = 121.0
    elat = 22.5

    config = update_config_regions(elat, elon, config)
    assert config['tectonic_regions']['acr']['gmpe'] == \
        ['active_crustal_taiwan', 'active_crustal_taiwan_deep']

    config = ConfigObj(os.path.join(install_path, 'config', 'select.conf'))
    validate_config(config, install_path, data_path, global_data_path)
    # Induced
    elon = -97.5
    elat = 36.5

    config = update_config_regions(elat, elon, config)
    assert config['tectonic_regions']['scr']['gmpe'] == \
        ['stable_continental_nshmp2014_rlme', 'stable_continental_deep']

    config = ConfigObj(os.path.join(install_path, 'config', 'select.conf'))
    validate_config(config, install_path, data_path, global_data_path)
    # Not in a layer
    elon = -77.5
    elat = 36.5

    config = update_config_regions(elat, elon, config)
    assert config['tectonic_regions']['acr']['gmpe'] == \
        ['active_crustal_nshmp2014', 'active_crustal_deep']
    assert config['tectonic_regions']['scr']['gmpe'] == \
        ['stable_continental_nshmp2014_rlme', 'stable_continental_deep']
示例#12
0
def test_config():

    #
    # get_logger()
    #
    logger = config.get_logger('nc72282711', log_option='debug')

    #
    # Some stuff we just call and see if it bombs out
    #
    mydatapath = config.get_data_path()
    myinstall, mydata = config.get_config_paths()

    myspec = config.get_configspec()
    myvalid = config.get_custom_validator()

    c1 = ConfigObj(os.path.join(mydatapath, "model.conf"),
                   configspec=myspec)
    c2 = ConfigObj(os.path.join(mydatapath, "modules.conf"),
                   configspec=myspec)
    c3 = ConfigObj(os.path.join(mydatapath, "gmpe_sets.conf"),
                   configspec=myspec)
    c4 = ConfigObj(os.path.join(mydatapath, "northridge_model.conf"),
                   configspec=myspec)
    c5 = ConfigObj(os.path.join(mydatapath, "products.conf"),
                   configspec=myspec)
    c1.merge(c2)
    c1.merge(c3)
    c1.merge(c4)
    c1.merge(c5)

    results = c1.validate(myvalid, preserve_errors=True)

    assert isinstance(results, bool) and results

    config.check_config(c1, logger)
    #
    # Break the config
    #
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ccf'] = 'NotACCF'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['ipe'] = 'NotAnIPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmice'] = 'NotAGMICE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)
    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 'NotAGMPE'
    with pytest.raises(ValidateError):
        config.check_config(ctest, logger)

    ctest = copy.deepcopy(c1)
    ctest['modeling']['gmpe'] = 47
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    ctest = copy.deepcopy(c1)
    del ctest['interp']
    results = ctest.validate(myvalid, preserve_errors=True)
    assert isinstance(results, dict)
    with pytest.raises(RuntimeError):
        config.config_error(ctest, results)

    #
    # annotatedfloat_type()
    #
    res = config.annotatedfloat_type('4.0')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0d')
    assert isinstance(res, float)
    assert res == 4.0
    res = config.annotatedfloat_type('4.0m')
    assert isinstance(res, float)
    assert res == 4.0 / 60.0
    res = config.annotatedfloat_type('4.0c')
    assert isinstance(res, float)
    assert res == 4.0 / 3600.0
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('4.0caweoifaw')
    with pytest.raises(ValidateError):
        res = config.annotatedfloat_type('')
    #
    # weight_list()
    #
    res = config.weight_list(['0.2', '0.3', '0.5'], min=0)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    res = config.weight_list('None', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    res = config.weight_list(['0.2', '0.3', '0.5'], min=3)
    assert isinstance(res, list)
    assert res == [0.2, 0.3, 0.5]
    with pytest.raises(ValidateError):
        res = config.weight_list([], min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('[None]', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list('None', min=1)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.2', '0.3', '0.5'], min=4)
    with pytest.raises(ValidateError):
        res = config.weight_list(['-0.2', '0.3', '0.5'], min=3)
    with pytest.raises(ValidateError):
        res = config.weight_list(['0.1', '0.3', '0.5'], min=3)
    #
    # gmpe_list()
    #
    res = config.gmpe_list('[]', min=0)
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.gmpe_list('[]', min=1)
    res = config.gmpe_list('thing1', min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1']
    res = config.gmpe_list(['thing1', 'thing2'], min=0)
    assert isinstance(res, list)
    assert res == ['thing1', 'thing2']
    with pytest.raises(ValidateError):
        res = config.gmpe_list(['thing1', 'thing2'], min=3)
    with pytest.raises(ValidateError):
        res = config.gmpe_list(7, min=0)
    with pytest.raises(ValidateError):
        res = config.gmpe_list([7], min=0)
    #
    # extent_list()
    #
    res = config.extent_list('[]')
    assert isinstance(res, list)
    assert res == []
    res = config.extent_list([])
    assert isinstance(res, list)
    assert res == []
    with pytest.raises(ValidateError):
        res = config.extent_list(7)
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', 'thing'])
    with pytest.raises(ValidateError):
        res = config.extent_list(['-20.0', '-10.0', '20.0', '1000.0'])
    res = config.extent_list(['-20.0', '-10.0', '20.0', '10.0'])
    assert isinstance(res, list)
    assert res == [-20.0, -10.0, 20.0, 10.0]
    #
    # file_type()
    #
    res = config.file_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.file_type('/home/xxxyyyzzz/awefawe')
    res = config.file_type(os.path.abspath(__file__))
    assert isinstance(res, str)
    assert res == os.path.abspath(__file__)
    #
    # directory_type()
    #
    res = config.directory_type('None')
    assert isinstance(res, str)
    assert not res
    with pytest.raises(ValidateError):
        res = config.directory_type('/home/xxxyyyzzz/awefawe')
    res = config.directory_type(os.path.dirname(os.path.abspath(__file__)))
    assert isinstance(res, str)
    assert res == os.path.dirname(os.path.abspath(__file__))
    #
    # status_string()
    #
    res = config.status_string('', min=1)
    assert res == 'automatic'
    res = config.status_string('automatic', min=1)
    assert res == 'automatic'
    with pytest.raises(ValidateError):
        res = config.status_string('thing', min=1)
    #
    # cfg_float_list()
    #
    res = config.cfg_float_list(['2.0', '3.0', '4.0'])
    assert res == [2.0, 3.0, 4.0]
    res = config.cfg_float_list('2.0')
    assert res == [2.0]
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('')
    with pytest.raises(ValidateError):
        res = config.cfg_float_list({})
    with pytest.raises(ValidateError):
        res = config.cfg_float_list([])
    with pytest.raises(ValidateError):
        res = config.cfg_float_list('thing')
    #
    # cfg_float()
    #
    res = config.cfg_float('2.0')
    assert res == 2.0
    with pytest.raises(ValidateError):
        res = config.cfg_float(['2.0'])
    with pytest.raises(ValidateError):
        res = config.cfg_float('')
    with pytest.raises(ValidateError):
        res = config.cfg_float('None')
    with pytest.raises(ValidateError):
        res = config.cfg_float('thing')
示例#13
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('uncertaintymaps module can only '
                                      'operate on gridded data, not sets of '
                                      'points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Uncertainty mapping...')

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the uncertainty mapping functions
        layers = config['products']['mapping']['layers']
        if 'countries' in layers and layers['countries'] != '':
            countries_file = layers['countries']
        else:
            countries_file = None
        if 'states_provs' in layers and layers['states_provs'] != '':
            states_provs_file = layers['states_provs']
        else:
            states_provs_file = None
        if 'oceans' in layers and layers['oceans'] != '':
            oceans_file = layers['oceans']
        else:
            oceans_file = None
        if 'lakes' in layers and layers['lakes'] != '':
            lakes_file = layers['lakes']
        else:
            lakes_file = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
        topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        faults = None
        roads = None
        if states_provs_file is not None:
            states_provs = ShapelyFeature(
                Reader(states_provs_file).geometries(),
                ccrs.PlateCarree(),
                facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            # The feature constructor doesn't necessarily download the
            # data, but we want it to so that multiple threads don't
            # try to do it at once when they actually access the data.
            # So below we just call the geometries() method to trigger
            # the download if necessary.
            _ = states_provs.geometries()

        if countries_file is not None:
            countries = ShapelyFeature(Reader(countries_file).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')
            _ = countries.geometries()

        if oceans_file is not None:
            oceans = ShapelyFeature(Reader(oceans_file).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)
            _ = oceans.geometries()

        if lakes_file is not None:
            lakes = ShapelyFeature(Reader(lakes_file).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)
            _ = lakes.geometries()

        alist = []
        llogo = config['products']['mapping'].get('license_logo') or None
        ltext = config['products']['mapping'].get('license_text') or None
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'roadcolor': layers['roadcolor'],
                'roadwidth': layers['roadwidth'],
                'faults': faults,
                'faultcolor': layers['faultcolor'],
                'faultwidth': layers['faultwidth'],
                'datadir': datadir,
                'operator': operator,
                'filter_size': 0,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict,
                'display_magnitude': self.display_magnitude,
                'pdf_dpi': config['products']['mapping']['pdf_dpi'],
                'img_dpi': config['products']['mapping']['img_dpi'],
                'license_logo': llogo,
                'license_text': ltext,
            }
            alist.append(d)

            #
            # Populate the contents.xml
            #
            for key in ('std', 'phi', 'tau'):
                if key not in d['imtdict'] or d['imtdict'][key] is None:
                    continue

                if key == 'std':
                    ext = '_sigma'
                    utype = ' Total'
                elif key == 'phi':
                    ext = '_phi'
                    utype = ' Within-event'
                else:
                    ext = '_tau'
                    utype = ' Between-event'

                if imtype == 'MMI':
                    fileimt = 'intensity'
                else:
                    fileimt = oq_to_file(imtype)

                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.jpg', 'image/jpeg')
                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
示例#14
0
def create_polygons(container,
                    datadir,
                    logger,
                    max_workers,
                    method='pcontour'):
    """ Generates a set of closed polygons (with or without holes) using the
    specified method (either pcontour or skimage), and uses fiona to convert
    the resulting GeoJSON objects into ESRI-style shape files which are then
    zipped into an archive along with .prj, .lyr, and metadata .xml files. A
    warning will be emitted if .lyr, or .xml files cannot be found for the
    ground motion parameter in question.

    Args:
        container (ShakeMapOutputContainer): An open ShakeMap output
            container object.
        datadir (str): The products directory for the event in question.
        logger (logger): This module's logger object.
        method (str): Contouring implementation to use (either 'pcontour' or
            'skimage')

    Returns:
        (nothing): Nothing.
    """

    # gmice info for shakelib.plotting.contour
    config = container.getConfig()
    gmice = get_object_from_config('gmice', 'modeling', config)
    gmice_imts = gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES
    gmice_pers = gmice.DEFINED_FOR_SA_PERIODS

    component = list(container.getComponents())[0]
    imts = container.getIMTs(component)

    if method == 'pcontour':
        schema = {
            'properties':
            OrderedDict([('AREA', 'float:13.3'), ('PERIMETER', 'float:14.3'),
                         ('PGAPOL_', 'int:12'), ('PGAPOL_ID', 'int:12'),
                         ('GRID_CODE', 'int:12'),
                         ('PARAMVALUE', 'float:14.4')]),
            'geometry':
            'Polygon'
        }
    elif method == 'skimage':
        schema = {
            'properties':
            OrderedDict([('value', 'float:2.1'), ('units', 'str'),
                         ('color', 'str'), ('weight', 'float:13.3')]),
            'geometry':
            'MultiLineString'
        }
    else:
        raise ValueError('Unknown contouring method {}'.format(method))

    smdata = os.path.join(get_data_path(), 'gis')
    # Make a directory for the files to live in prior to being zipped
    alist = []
    with tempfile.TemporaryDirectory(dir=datadir) as tdir:
        for imt in imts:
            gdict = container.getIMTGrids(imt, component)
            fgrid = gdict['mean']
            if imt == 'MMI':
                fname = 'mi'
            elif imt == 'PGV':
                fname = 'pgv'
            else:
                fname = oq_to_file(imt)

            if method == 'pcontour':
                my_gmice = None
                if imt == 'MMI':
                    contour_levels = np.arange(0.1, 10.2, 0.2)
                elif imt == 'PGV':
                    fgrid = np.exp(fgrid)
                    cont_max = np.ceil(np.max(fgrid)) + 2.0
                    contour_levels = np.arange(1.0, cont_max, 2.0)
                    if contour_levels.size == 0:
                        contour_levels = np.array([1.0])
                else:
                    fgrid = np.exp(fgrid)
                    cont_max = (np.ceil(100 * np.max(fgrid)) + 2.0) / 100.0
                    contour_levels = np.arange(0.01, cont_max, 0.02)
                    if contour_levels.size == 0:
                        contour_levels = np.array([0.01])
            else:
                # skimage method chooses its own levels
                contour_levels = None
                # but wants gmice info
                oqimt = OQIMT.from_string(imt)
                if imt == 'MMI' or not isinstance(oqimt, tuple(gmice_imts)) or \
                   (isinstance(oqimt, OQIMT.SA) and oqimt.period not in gmice_pers):
                    my_gmice = None
                else:
                    my_gmice = gmice
            a = {
                'fgrid': fgrid,
                'dx': gdict['mean_metadata']['dx'],
                'dy': gdict['mean_metadata']['dy'],
                'xmin': gdict['mean_metadata']['xmin'],
                'ymax': gdict['mean_metadata']['ymax'],
                'contour_levels': contour_levels,
                'tdir': tdir,
                'fname': fname,
                'schema': schema,
                'imt': imt,
                'gmice': my_gmice,
                'gdict': gdict
            }
            alist.append(a)
            copyfile(os.path.join(smdata, 'WGS1984.prj'),
                     os.path.join(tdir, fname + '.prj'))
            lyrfile = os.path.join(smdata, fname + '.lyr')
            if not os.path.isfile(lyrfile):
                logger.warning("No " + fname + ".lyr file in " + smdata)
            else:
                copyfile(lyrfile, os.path.join(tdir, fname + '.lyr'))
            xmlfile = os.path.join(smdata, fname + '.shp.xml')
            if not os.path.isfile(xmlfile):
                logger.warning("No " + fname + ".shp.xml file in " + smdata)
            else:
                copyfile(xmlfile, os.path.join(tdir, fname + '.shp.xml'))

        worker = partial(make_shape_files, method=method)

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(worker, alist)
                list(results)
        else:
            for adict in alist:
                worker(adict)

        zfilename = os.path.join(datadir, 'shape.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)
        filelist = []
        for (dirpath, dirnames, filenames) in os.walk(tdir):
            filelist.extend(filenames)
            break
        for sfile in filelist:
            zfile.write(os.path.join(tdir, sfile), sfile)
        zfile.close()
示例#15
0
def create_polygons(container, datadir, logger, max_workers):
    """ Generates a set of closed polygons (with or without holes) using
    the pcontour function, and uses fiona to convert the resulting GeoJSON
    objects into ESRI-style shape files which are then zipped into an
    archive along with .prj, .lyr, and metadata .xml files. A warning will
    be emitted if .lyr, or .xml files cannot be found for the ground motion
    parameter in question.

    Args:
        container (ShakeMapOutputContainer): An open ShakeMap output
            container object.
        datadir (str): The products directory for the event in question.
        logger (logger): This module's logger object.

    Returns:
        (nothing): Nothing.
    """

    component = list(container.getComponents())[0]
    imts = container.getIMTs(component)

    schema = {
        'properties':
        OrderedDict([('AREA', 'float:13.3'), ('PERIMETER', 'float:14.3'),
                     ('PGAPOL_', 'int:12'), ('PGAPOL_ID', 'int:12'),
                     ('GRID_CODE', 'int:12'), ('PARAMVALUE', 'float:14.4')]),
        'geometry':
        'Polygon'
    }

    smdata = os.path.join(get_data_path(), 'gis')
    # Make a directory for the files to live in prior to being zipped
    alist = []
    with tempfile.TemporaryDirectory(dir=datadir) as tdir:
        for imt in imts:
            gdict = container.getIMTGrids(imt, component)
            fgrid = gdict['mean']
            if imt == 'MMI':
                contour_levels = np.arange(0.1, 10.2, 0.2)
                fname = 'mi'
            elif imt == 'PGV':
                fgrid = np.exp(fgrid)
                cont_max = np.ceil(np.max(fgrid)) + 2.0
                contour_levels = np.arange(1.0, cont_max, 2.0)
                if contour_levels.size == 0:
                    contour_levels = np.array([1.0])
                fname = 'pgv'
            else:
                fgrid = np.exp(fgrid)
                cont_max = (np.ceil(100 * np.max(fgrid)) + 2.0) / 100.0
                contour_levels = np.arange(0.01, cont_max, 0.02)
                if contour_levels.size == 0:
                    contour_levels = np.array([0.01])
                fname = oq_to_file(imt)
            a = {
                'fgrid': fgrid,
                'dx': gdict['mean_metadata']['dx'],
                'dy': gdict['mean_metadata']['dy'],
                'xmin': gdict['mean_metadata']['xmin'],
                'ymax': gdict['mean_metadata']['ymax'],
                'contour_levels': contour_levels,
                'tdir': tdir,
                'fname': fname,
                'schema': schema
            }
            alist.append(a)
            copyfile(os.path.join(smdata, 'WGS1984.prj'),
                     os.path.join(tdir, fname + '.prj'))
            lyrfile = os.path.join(smdata, fname + '.lyr')
            if not os.path.isfile(lyrfile):
                logger.warning("No " + fname + ".lyr file in " + smdata)
            else:
                copyfile(lyrfile, os.path.join(tdir, fname + '.lyr'))
            xmlfile = os.path.join(smdata, fname + '.shp.xml')
            if not os.path.isfile(xmlfile):
                logger.warning("No " + fname + ".shp.xml file in " + smdata)
            else:
                copyfile(xmlfile, os.path.join(tdir, fname + '.shp.xml'))

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_shape_files, alist)
                list(results)
        else:
            for adict in alist:
                make_shape_files(adict)

        zfilename = os.path.join(datadir, 'shape.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)
        filelist = []
        for (dirpath, dirnames, filenames) in os.walk(tdir):
            filelist.extend(filenames)
            break
        for sfile in filelist:
            zfile.write(os.path.join(tdir, sfile), sfile)
        zfile.close()