def test(): print('Test loading geonames cities file...') cities = Cities.fromDefault() # load from a file contained in repo assert len(cities) == 145315 print('Passed loading geonames cities file.') print('Test getting city names and coordinates...') lat, lon, names = cities.getCities() assert len(lat) == len(cities) print('Passed getting city names and coordinates...') print('Test limiting cities using California bounds...') ymin, ymax = 32.394, 42.062 xmin, xmax = -125.032, -114.002 bcities = cities.limitByBounds((xmin, xmax, ymin, ymax)) bounds = bcities.getBounds() assert bounds[0] > xmin and bounds[1] < xmax and bounds[ 2] > ymin and bounds[3] < ymax print('Passed limiting cities using California bounds.') print('Test limiting cities using a 4 by 4 grid...') gcities = bcities.limitByGrid(nx=2, ny=4, cities_per_grid=10) assert len(gcities) <= 2 * 4 * 10 print('Passed limiting cities using California bounds.') print('Test getting cities by name (Los Angeles)...') cityofangels = bcities.limitByName('Los Angeles').limitByPopulation( 1000000) assert len(cityofangels) == 1 print('Passed getting cities by name (Los Angeles).') print('Test limiting cities 50 km radius around LA...') clat, clon = 34.048019, -118.244133 rcities = bcities.limitByRadius(clat, clon, 50) print('Passed limiting cities using California bounds.') print('Test limiting cities above population above 50,000...') popthresh = 50000 bigcities = rcities.limitByPopulation(popthresh) df = bigcities.getDataFrame() assert df['pop'].max() >= popthresh print('Test limiting cities above population above 50,000.') print('Test saving cities and reading them back in...') foo, tmpfile = tempfile.mkstemp() os.close(foo) bigcities.save(tmpfile) newcities = Cities.fromCSV(tmpfile) assert len(bigcities) == len(newcities) print('Passed saving cities and reading them back in.')
def __init__(self,cities,mmigrid): """Create a PagerCities object with a MapIO Cities instance and a Grid2 object containing MMI data. :param cities: BasemapCities instance. :param mmigrid: Grid2 object containing MMI data from a ShakeMap. """ xmin,xmax,ymin,ymax = mmigrid.getBounds() dataframe = cities.limitByBounds((xmin,xmax,ymin,ymax)).getDataFrame() lat = dataframe['lat'].as_matrix() lon = dataframe['lon'].as_matrix() mmi = mmigrid.getValue(lat,lon) dataframe['mmi'] = mmi self._cities = Cities(dataframe)
def test_mapmaker_intensity(): homedir = os.path.dirname( os.path.abspath(__file__)) # where is this script? shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..')) out_file = os.path.join(shakedir, 'tests', 'data', 'containers', 'northridge', 'shake_result.hdf') container = ShakeMapOutputContainer.load(out_file) topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data', 'mapping', 'CA_topo.grd') info = container.getMetadata() xmin = info['output']['map_information']['min']['longitude'] xmax = info['output']['map_information']['max']['longitude'] ymin = info['output']['map_information']['min']['latitude'] ymax = info['output']['map_information']['max']['latitude'] xmin = float(xmin) - 0.1 xmax = float(xmax) + 0.1 ymin = float(ymin) - 0.1 ymax = float(ymax) + 0.1 dy = float(info['output']['map_information']['grid_spacing']['latitude']) dx = float(info['output']['map_information']['grid_spacing']['longitude']) sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy) topogrid = GMTGrid.load(topofile, samplegeodict=sampledict, resample=False) outpath = mkdtemp() model_config = container.getConfig() comp = container.getComponents('MMI')[0] textfile = os.path.join(get_data_path(), 'mapping', 'map_strings.en') text_dict = get_text_strings(textfile) cities = Cities.fromDefault() d = { 'imtype': 'MMI', 'topogrid': topogrid, 'allcities': cities, 'states_provinces': None, 'countries': None, 'oceans': None, 'lakes': None, 'roads': None, 'faults': None, 'datadir': outpath, 'operator': 'NEIC', 'filter_size': 10, 'info': info, 'component': comp, 'imtdict': container.getIMTGrids('MMI', comp), 'ruptdict': copy.deepcopy(container.getRuptureDict()), 'stationdict': container.getStationDict(), 'config': model_config, 'tdict': text_dict } try: fig1, fig2 = draw_map(d) except Exception: assert 1 == 2 finally: shutil.rmtree(outpath)
def draw_stations_map(pstreams, event, event_dir): # draw map of stations and cities and stuff lats = np.array( [stream[0].stats.coordinates['latitude'] for stream in pstreams]) lons = np.array( [stream[0].stats.coordinates['longitude'] for stream in pstreams]) map_width = event.magnitude cy = event.latitude cx = event.longitude xmin = lons.min() xmax = lons.max() ymin = lats.min() ymax = lats.max() if xmax - xmin < map_width: xmin = cx - map_width / 2 xmax = cx + map_width / 2 if ymax - ymin < map_width: ymin = cy - map_width / 2 ymax = cy + map_width / 2 bounds = (xmin, xmax, ymin, ymax) figsize = (10, 10) cities = Cities.fromDefault() mmap = MercatorMap(bounds, figsize, cities) mmap.drawCities(draw_dots=True) ax = mmap.axes draw_scale(ax) ax.plot(cx, cy, 'r*', markersize=16, transform=mmap.geoproj, zorder=8) status = [ FAILED_COLOR if np.any([trace.hasParameter("failure") for trace in stream]) else PASSED_COLOR for stream in pstreams ] ax.scatter(lons, lats, c=status, marker='^', edgecolors='k', transform=mmap.geoproj, zorder=100, s=48) scale = '50m' land = cfeature.NaturalEarthFeature(category='physical', name='land', scale=scale, facecolor=LAND_COLOR) ocean = cfeature.NaturalEarthFeature(category='physical', name='ocean', scale=scale, facecolor=OCEAN_COLOR) ax.add_feature(land) ax.add_feature(ocean) ax.coastlines(resolution=scale, zorder=10, linewidth=1) mapfile = os.path.join(event_dir, 'stations_map.png') plt.savefig(mapfile) return mapfile
def test_mapcity(): cities = Cities.fromDefault() # load from a file contained in repo df = cities._dataframe mapcities = MapCities(df) fontlist = mapcities.getFontList() assert len(fontlist) > 0 try: mapcities.limitByMapCollision() except NotImplementedError as nie: assert 1 == 1
def __init__(self): dbfile = os.path.join(get_data_dir(), DBFILE) cfgfile = os.path.join(get_data_dir(), CFGFILE) self._config = yaml.load(open(cfgfile, 'rt')) self._cities = Cities.fromDefault() create = False if not os.path.isfile(dbfile): create = True self._seqdb = SequenceDatabase(dbfile, self._config, create=create)
def test_mmap(outfile=None, bounds=None): if bounds is None: bounds = xmin, ymin, xmax, ymax = \ -121.046000, -116.046000, 32.143500, 36.278500 else: xmin, ymin, xmax, ymax = bounds figsize = (7, 7) cities = Cities.fromDefault() mmap = MercatorMap(bounds, figsize, cities, padding=0.5) fig = mmap.figure ax = mmap.axes
def mapSequence(self, sequence): sequence_frame = self.getSequenceEvents(sequence['name']) fig = plt.figure(figsize=[7, 7]) clon = sequence['center_lon'] clat = sequence['center_lat'] xmin = sequence['xmin'] - 1 xmax = sequence['xmax'] + 1 ymin = sequence['ymin'] - 1 ymax = sequence['ymax'] + 1 bounds = [xmin, xmax, ymin, ymax] figsize = (7, 7) cities = Cities.fromDefault() dims = [0.1, 0.1, 0.8, 0.8] mmap = MercatorMap(bounds, figsize, cities, dimensions=dims) fig = mmap.figure ax = mmap.axes proj = mmap.proj markersizes = list(MSIZES.values()) for idx, row in sequence_frame.iterrows(): elat = row['latitude'] elon = row['longitude'] emag = row['magnitude'] mdiff = np.abs(emag - np.array(list(MSIZES.keys()))) imin = mdiff.argmin() markersize = markersizes[imin] zorder = 1 / markersize ax.plot([elon], [elat], 'g', marker='o', mec='k', markersize=markersize, zorder=zorder, transform=ccrs.PlateCarree()) mmap.drawCities(draw_dots=True) _draw_graticules(ax, xmin, xmax, ymin, ymax) corner = 'll' ax.coastlines(resolution='50m') draw_scale(ax, corner, pady=0.05, padx=0.05, zorder=SCALE_ZORDER) states_provinces = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='50m', facecolor='none') states_provinces = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='50m', facecolor='none') plt.title('%s (N=%i)' % (sequence['name'], sequence['n_earthquakes'])) return ax, fig
def __init__(self): self._meangrid, self._stdgrid = get_rates() self._dbfile = os.path.join(get_data_dir(), DBFILE) self._cities = Cities.fromDefault() if not os.path.isfile(self._dbfile): self._db = sqlite3.connect(self._dbfile) self._db.row_factory = sqlite3.Row self._cursor = self._db.cursor() create_tables(self._db, self._cursor) else: self._db = sqlite3.connect(self._dbfile) self._db.row_factory = sqlite3.Row self._cursor = self._db.cursor() self._debug_plot_counter = 1
def test_mmap(outfile=None, bounds=None): if bounds is None: bounds = xmin, ymin, xmax, ymax = \ -121.046000, -116.046000, 32.143500, 36.278500 else: xmin, ymin, xmax, ymax = bounds figsize = (7, 7) cities = Cities.fromDefault() mmap = MercatorMap(bounds, figsize, cities, padding=0.5) ax = mmap.axes # TODO -- Travis hangs here so commenting out stuff so it doesn't hang. # Should sort out issue to fully test this module. # fig.canvas.draw() ax.coastlines(resolution="10m", zorder=10) # plt.show() mmap.drawCities(shadow=True) if outfile: plt.savefig(outfile) print(f"Figure saved to {outfile}")
def execute(self): """ Raises: NotADirectoryError: When the event data directory does not exist. FileNotFoundError: When the the shake_result HDF file does not exist. """ install_path, data_path = get_config_paths() datadir = os.path.join(data_path, self._eventid, 'current', 'products') if not os.path.isdir(datadir): raise NotADirectoryError('%s is not a valid directory.' % datadir) datafile = os.path.join(datadir, 'shake_result.hdf') if not os.path.isfile(datafile): raise FileNotFoundError('%s does not exist.' % datafile) # Open the ShakeMapOutputContainer and extract the data container = ShakeMapOutputContainer.load(datafile) if container.getDataType() != 'grid': raise NotImplementedError('mapping module can only operate on ' 'gridded data, not sets of points') # get the path to the products.conf file, load the config config_file = os.path.join(install_path, 'config', 'products.conf') spec_file = get_configspec('products') validator = get_custom_validator() config = ConfigObj(config_file, configspec=spec_file) results = config.validate(validator) check_extra_values(config, self.logger) if not isinstance(results, bool) or not results: config_error(config, results) # create contour files self.logger.debug('Mapping...') # get the filter size from the products.conf filter_size = config['products']['contour']['filter_size'] # get the operator setting from config operator = config['products']['mapping']['operator'] # get all of the pieces needed for the mapping functions layers = config['products']['mapping']['layers'] if 'topography' in layers and layers['topography'] != '': topofile = layers['topography'] else: topofile = None if 'roads' in layers and layers['roads'] != '': roadfile = layers['roads'] else: roadfile = None if 'faults' in layers and layers['faults'] != '': faultfile = layers['faults'] else: faultfile = None # Get the number of parallel workers max_workers = config['products']['mapping']['max_workers'] # Reading HDF5 files currently takes a long time, due to poor # programming in MapIO. To save us some time until that issue is # resolved, we'll coarsely subset the topo grid once here and pass # it into both mapping functions # get the bounds of the map info = container.getMetadata() xmin = info['output']['map_information']['min']['longitude'] xmax = info['output']['map_information']['max']['longitude'] ymin = info['output']['map_information']['min']['latitude'] ymax = info['output']['map_information']['max']['latitude'] dy = float( info['output']['map_information']['grid_spacing']['latitude']) dx = float( info['output']['map_information']['grid_spacing']['longitude']) padx = 5 * dx pady = 5 * dy sxmin = float(xmin) - padx sxmax = float(xmax) + padx symin = float(ymin) - pady symax = float(ymax) + pady sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx, dy) if topofile: topogrid = read(topofile, samplegeodict=sampledict, resample=False) else: tdata = np.full([sampledict.ny, sampledict.nx], 0.0) topogrid = Grid2D(data=tdata, geodict=sampledict) model_config = container.getConfig() imtlist = container.getIMTs() textfile = os.path.join( get_data_path(), 'mapping', 'map_strings.' + config['products']['mapping']['language']) text_dict = get_text_strings(textfile) if config['products']['mapping']['fontfamily'] != '': matplotlib.rcParams['font.family'] = \ config['products']['mapping']['fontfamily'] matplotlib.rcParams['axes.unicode_minus'] = False allcities = Cities.fromDefault() states_provs = None countries = None oceans = None lakes = None extent = (float(xmin), float(ymin), float(xmax), float(ymax)) if 'CALLED_FROM_PYTEST' not in os.environ: states_provs = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='10m', facecolor='none') states_provs = list(states_provs.intersecting_geometries(extent)) if len(states_provs) > 300: states_provs = None else: states_provs = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='10m', facecolor='none') countries = cfeature.NaturalEarthFeature(category='cultural', name='admin_0_countries', scale='10m', facecolor='none') oceans = cfeature.NaturalEarthFeature(category='physical', name='ocean', scale='10m', facecolor=WATERCOLOR) lakes = cfeature.NaturalEarthFeature(category='physical', name='lakes', scale='10m', facecolor=WATERCOLOR) if faultfile is not None: faults = ShapelyFeature(Reader(faultfile).geometries(), ccrs.PlateCarree(), facecolor='none') else: faults = None if roadfile is not None: roads = ShapelyFeature(Reader(roadfile).geometries(), ccrs.PlateCarree(), facecolor='none') if len(list(roads.intersecting_geometries(extent))) > 200: roads = None else: roads = ShapelyFeature(Reader(roadfile).geometries(), ccrs.PlateCarree(), facecolor='none') else: roads = None alist = [] for imtype in imtlist: component, imtype = imtype.split('/') comp = container.getComponents(imtype)[0] d = { 'imtype': imtype, 'topogrid': topogrid, 'allcities': allcities, 'states_provinces': states_provs, 'countries': countries, 'oceans': oceans, 'lakes': lakes, 'roads': roads, 'faults': faults, 'datadir': datadir, 'operator': operator, 'filter_size': filter_size, 'info': info, 'component': comp, 'imtdict': container.getIMTGrids(imtype, comp), 'ruptdict': copy.deepcopy(container.getRuptureDict()), 'stationdict': container.getStationDict(), 'config': model_config, 'tdict': text_dict } alist.append(d) if imtype == 'MMI': g = copy.deepcopy(d) g['imtype'] = 'thumbnail' alist.append(g) h = copy.deepcopy(d) h['imtype'] = 'overlay' alist.append(h) self.contents.addFile('intensityMap', 'Intensity Map', 'Map of macroseismic intensity.', 'intensity.jpg', 'image/jpeg') self.contents.addFile('intensityMap', 'Intensity Map', 'Map of macroseismic intensity.', 'intensity.pdf', 'application/pdf') self.contents.addFile('intensityThumbnail', 'Intensity Thumbnail', 'Thumbnail of intensity map.', 'pin-thumbnail.png', 'image/png') self.contents.addFile( 'intensityOverlay', 'Intensity Overlay and World File', 'Macroseismic intensity rendered as a ' 'PNG overlay and associated world file', 'intensity_overlay.png', 'image/png') self.contents.addFile( 'intensityOverlay', 'Intensity Overlay and World File', 'Macroseismic intensity rendered as a ' 'PNG overlay and associated world file', 'intensity_overlay.pngw', 'text/plain') else: fileimt = oq_to_file(imtype) self.contents.addFile(fileimt + 'Map', fileimt.upper() + ' Map', 'Map of ' + imtype + '.', fileimt + '.jpg', 'image/jpeg') self.contents.addFile(fileimt + 'Map', fileimt.upper() + ' Map', 'Map of ' + imtype + '.', fileimt + '.pdf', 'application/pdf') if max_workers > 0: with cf.ProcessPoolExecutor(max_workers=max_workers) as ex: results = ex.map(make_map, alist) list(results) else: for adict in alist: make_map(adict) container.close()
def draw_stations_map(pstreams, event, event_dir): # draw map of stations and cities and stuff lats = np.array( [stream[0].stats.coordinates['latitude'] for stream in pstreams]) lons = np.array( [stream[0].stats.coordinates['longitude'] for stream in pstreams]) cy = event.latitude cx = event.longitude xmin = lons.min() xmax = lons.max() ymin = lats.min() ymax = lats.max() diff_x = max(abs(cx - xmin), abs(cx - xmax)) diff_y = max(abs(cy - ymin), abs(cy - ymax)) xmax = cx + MAP_PADDING * diff_x xmin = cx - MAP_PADDING * diff_x ymax = cy + MAP_PADDING * diff_y ymin = cy - MAP_PADDING * diff_y bounds = (xmin, xmax, ymin, ymax) figsize = (10, 10) cities = Cities.fromDefault() mmap = MercatorMap(bounds, figsize, cities) mmap.drawCities(draw_dots=True) ax = mmap.axes draw_scale(ax) ax.plot(cx, cy, 'r*', markersize=16, transform=mmap.geoproj, zorder=8) status = [ FAILED_COLOR if np.any([trace.hasParameter("failure") for trace in stream]) else PASSED_COLOR for stream in pstreams ] ax.scatter(lons, lats, c=status, marker='^', edgecolors='k', transform=mmap.geoproj, zorder=100, s=48) passed_marker = mlines.Line2D([], [], color=PASSED_COLOR, marker='^', markeredgecolor='k', markersize=12, label='Passed station', linestyle='None') failed_marker = mlines.Line2D([], [], color=FAILED_COLOR, marker='^', markeredgecolor='k', markersize=12, label='Failed station', linestyle='None') earthquake_marker = mlines.Line2D([], [], color='red', marker='*', markersize=12, label='Earthquake Epicenter', linestyle='None') ax.legend(handles=[passed_marker, failed_marker, earthquake_marker], fontsize=12) scale = '50m' land = cfeature.NaturalEarthFeature(category='physical', name='land', scale=scale, facecolor=LAND_COLOR) ocean = cfeature.NaturalEarthFeature(category='physical', name='ocean', scale=scale, facecolor=OCEAN_COLOR) ax.add_feature(land) ax.add_feature(ocean) ax.coastlines(resolution=scale, zorder=10, linewidth=1) mapfile = os.path.join(event_dir, 'stations_map.png') plt.savefig(mapfile) return mapfile
def draw_stations_map(pstreams, event, event_dir): # interactive html map is created first lats = np.array( [stream[0].stats.coordinates["latitude"] for stream in pstreams]) lons = np.array( [stream[0].stats.coordinates["longitude"] for stream in pstreams]) stnames = np.array([stream[0].stats.station for stream in pstreams]) networks = np.array([stream[0].stats.network for stream in pstreams]) failed = np.array([ np.any([trace.hasParameter("failure") for trace in stream]) for stream in pstreams ]) failure_reasons = list( pd.Series( [ next(tr for tr in st if tr.hasParameter( "failure")).getParameter("failure")["reason"] for st in pstreams if not st.passed ], dtype=str, )) station_map = folium.Map(location=[event.latitude, event.longitude], zoom_start=7, control_scale=True) failed_coords = zip(lats[failed], lons[failed]) failed_stations = stnames[failed] failed_networks = networks[failed] failed_station_df = pd.DataFrame({ "stnames": failed_stations, "network": failed_networks, "coords": failed_coords, "reason": failure_reasons, }) passed_coords = zip(lats[~failed], lons[~failed]) passed_stations = stnames[~failed] passed_networks = networks[~failed] passed_station_df = pd.DataFrame({ "stnames": passed_stations, "network": passed_networks, "coords": passed_coords, }) # Plot the failed first for i, r in failed_station_df.iterrows(): station_info = "NET: {} LAT: {:.2f} LON: {:.2f} REASON: {}".format( r["network"], r["coords"][0], r["coords"][1], r["reason"]) folium.CircleMarker( location=r["coords"], tooltip=r["stnames"], popup=station_info, color=FAILED_COLOR, fill=True, radius=6, ).add_to(station_map) for i, r in passed_station_df.iterrows(): station_info = "NET: {}\n LAT: {:.2f} LON: {:.2f}".format( r["network"], r["coords"][0], r["coords"][1]) folium.CircleMarker( location=r["coords"], tooltip=r["stnames"], popup=station_info, color=PASSED_COLOR, fill=True, radius=10, ).add_to(station_map) event_info = "MAG: {} LAT: {:.2f} LON: {:.2f} DEPTH: {:.2f}".format( event.magnitude, event.latitude, event.longitude, event.depth) folium.CircleMarker( [event.latitude, event.longitude], popup=event_info, color="yellow", fill=True, radius=15, ).add_to(station_map) html_mapfile = os.path.join(event_dir, "stations_map.html") station_map.save(html_mapfile) # now the static map for the report is created # draw map of stations and cities and stuff cy = event.latitude cx = event.longitude xmin = lons.min() xmax = lons.max() ymin = lats.min() ymax = lats.max() diff_x = max(abs(cx - xmin), abs(cx - xmax), 1) diff_y = max(abs(cy - ymin), abs(cy - ymax), 1) xmax = cx + MAP_PADDING * diff_x xmin = cx - MAP_PADDING * diff_x ymax = cy + MAP_PADDING * diff_y ymin = cy - MAP_PADDING * diff_y bounds = (xmin, xmax, ymin, ymax) figsize = (10, 10) cities = Cities.fromDefault() mmap = MercatorMap(bounds, figsize, cities) mmap.drawCities(draw_dots=True) ax = mmap.axes draw_scale(ax) ax.plot(cx, cy, "r*", markersize=16, transform=mmap.geoproj, zorder=8) failed = np.array([ np.any([trace.hasParameter("failure") for trace in stream]) for stream in pstreams ]) # Plot the failed first ax.scatter( lons[failed], lats[failed], c=FAILED_COLOR, marker="v", edgecolors="k", transform=mmap.geoproj, zorder=100, s=48, ) # Plot the successes above the failures ax.scatter( lons[~failed], lats[~failed], c=PASSED_COLOR, marker="^", edgecolors="k", transform=mmap.geoproj, zorder=101, s=48, ) passed_marker = mlines.Line2D( [], [], color=PASSED_COLOR, marker="^", markeredgecolor="k", markersize=12, label="Passed station", linestyle="None", ) failed_marker = mlines.Line2D( [], [], color=FAILED_COLOR, marker="v", markeredgecolor="k", markersize=12, label="Failed station", linestyle="None", ) earthquake_marker = mlines.Line2D( [], [], color="red", marker="*", markersize=12, label="Earthquake Epicenter", linestyle="None", ) ax.legend(handles=[passed_marker, failed_marker, earthquake_marker], fontsize=12) scale = "50m" land = cfeature.NaturalEarthFeature(category="physical", name="land", scale=scale, facecolor=LAND_COLOR) ocean = cfeature.NaturalEarthFeature(category="physical", name="ocean", scale=scale, facecolor=OCEAN_COLOR) ax.add_feature(land) ax.add_feature(ocean) ax.coastlines(resolution=scale, zorder=10, linewidth=1) png_mapfile = os.path.join(event_dir, "stations_map.png") plt.savefig(png_mapfile) return (png_mapfile, html_mapfile)
def drawHazusMap(self, shakegrid, filename, model_config): gd = shakegrid.getGeoDict() # Retrieve the epicenter - this will get used on the map (??) center_lat = shakegrid.getEventDict()['lat'] center_lon = shakegrid.getEventDict()['lon'] # define the map # first cope with stupid 180 meridian height = (gd.ymax - gd.ymin) * 111.191 if gd.xmin < gd.xmax: width = (gd.xmax - gd.xmin) * \ np.cos(np.radians(center_lat)) * 111.191 xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax) else: xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax) xmax += 360 width = ((gd.xmax + 360) - gd.xmin) * \ np.cos(np.radians(center_lat)) * 111.191 aspect = width / height # if the aspect is not 1, then trim bounds in # x or y direction as appropriate if width > height: dw = (width - height) / 2.0 # this is width in km xmin = xmin + dw / (np.cos(np.radians(center_lat)) * 111.191) xmax = xmax - dw / (np.cos(np.radians(center_lat)) * 111.191) width = (xmax - xmin) * np.cos(np.radians(center_lat)) * 111.191 if height > width: dh = (height - width) / 2.0 # this is width in km ymin = ymin + dh / 111.191 ymax = ymax - dh / 111.191 height = (ymax - ymin) * 111.191 aspect = width / height figheight = FIGWIDTH / aspect bounds = (xmin, xmax, ymin, ymax) figsize = (FIGWIDTH, figheight) # load the counties here so we can grab the county names to # draw on the map counties_file = model_config['counties'] counties_shapes = fiona.open(counties_file, 'r') counties = counties_shapes.items(bbox=(xmin, ymin, xmax, ymax)) county_shapes = [] county_columns = { 'name': [], 'lat': [], 'lon': [], 'pop': [], } for cid, county in counties: # county is a dictionary county_shape = sShape(county['geometry']) state_fips = county['properties']['STATEFP10'] county_fips = county['properties']['COUNTYFP10'] fips = int(state_fips + county_fips) df = self._dataframe weight = 1 if (df['CountyFips'] == fips).any(): loss_row = df[df['CountyFips'] == fips].iloc[0] weight = loss_row['EconLoss'] center_point = county_shape.centroid county_name = county['properties']['NAMELSAD10'].replace( 'County', '').strip() # feature = ShapelyFeature([county_shape], ccrs.PlateCarree(), # zorder=COUNTY_ZORDER) county_shapes.append(county_shape) county_columns['name'].append(county_name) county_columns['pop'].append(county_shape.area * weight) county_columns['lat'].append(center_point.y) county_columns['lon'].append(center_point.x) # ax.add_feature(feature, facecolor=GREY, # edgecolor='grey', linewidth=0.5) # tx, ty = mmap.proj.transform_point( # center_point.x, center_point.y, ccrs.PlateCarree()) # plt.text(tx, ty, county_name, # zorder=NAME_ZORDER, # horizontalalignment='center', # verticalalignment='center') # Create the MercatorMap object, which holds a separate but identical # axes object used to determine collisions between city labels. # here we're pretending that county names are city names. county_df = pd.DataFrame(county_columns) cities = Cities(county_df) mmap = MercatorMap(bounds, figsize, cities, padding=0.5) fig = mmap.figure ax = mmap.axes geoproj = mmap.geoproj proj = mmap.proj # this is a workaround to an occasional problem where some vector layers # are not rendered. See # https://github.com/SciTools/cartopy/issues/1155#issuecomment-432941088 proj._threshold /= 6 # this needs to be done here so that city label collision # detection will work fig.canvas.draw() # draw county names mmap.drawCities(zorder=NAME_ZORDER) # now draw the counties in grey for county_shape in county_shapes: feature = ShapelyFeature([county_shape], ccrs.PlateCarree(), zorder=COUNTY_ZORDER) ax.add_feature(feature, facecolor=GREY, edgecolor='grey', linewidth=0.5, zorder=COUNTY_ZORDER) # now draw the county boundaries only so that we can see # them on top of the colored tracts. for county_shape in county_shapes: feature = ShapelyFeature([county_shape], ccrs.PlateCarree(), zorder=COUNTY_ZORDER) ax.add_feature(feature, facecolor=(0, 0, 0, 0), edgecolor='grey', linewidth=0.5, zorder=NAME_ZORDER) # define bounding box we'll use to clip vector data bbox = (xmin, ymin, xmax, ymax) # load and clip ocean vectors to match map boundaries oceanfile = model_config['ocean_vectors'] oceanshapes = _clip_bounds(bbox, oceanfile) ax.add_feature(ShapelyFeature(oceanshapes, crs=geoproj), facecolor=WATERCOLOR, zorder=OCEAN_ZORDER) # draw states with black border - TODO: Look into states_file = model_config['states'] transparent = '#00000000' states = _clip_bounds(bbox, states_file) ax.add_feature(ShapelyFeature(states, crs=geoproj), facecolor=transparent, edgecolor='k', zorder=STATE_ZORDER) # draw census tracts, colored by loss level tracts_file = model_config['tracts'] tract_shapes = fiona.open(tracts_file, 'r') tracts = tract_shapes.items(bbox=(xmin, ymin, xmax, ymax)) ntracts = 0 for tid, tract in tracts: # tract is a dictionary ntracts += 1 tract_shape = sShape(tract['geometry']) state_fips = str(int(tract['properties']['STATEFP10'])) county_fips = state_fips + tract['properties']['COUNTYFP10'] fips_column = self._dataframe['CountyFips'] if not fips_column.isin([county_fips]).any(): continue tract_fips = int(county_fips + tract['properties']['TRACTCE10']) econloss = 0.0 if tract_fips in self._tract_loss: econloss = self._tract_loss[tract_fips] # print('Tract %i: Economic loss: %.3f' % (tract_fips, econloss)) else: x = 1 if econloss < 1e3: color = GREEN elif econloss >= 1e3 and econloss < 1e5: color = YELLOW elif econloss >= 1e5 and econloss < 1e6: color = ORANGE else: color = RED feature = ShapelyFeature([tract_shape], ccrs.PlateCarree(), zorder=TRACT_ZORDER) ax.add_feature(feature, facecolor=color) # # Draw the epicenter as a black star # plt.plot(center_lon, center_lat, 'k*', markersize=16, # zorder=EPICENTER_ZORDER, transform=geoproj) # save our map out to a file logging.info('Saving to %s' % filename) t0 = time.time() plt.savefig(filename, dpi=300) t1 = time.time() logging.info('Done saving map - %.2f seconds' % (t1 - t0))
def execute(self): """ Raises: NotADirectoryError: When the event data directory does not exist. FileNotFoundError: When the the shake_result HDF file does not exist. """ install_path, data_path = get_config_paths() datadir = os.path.join(data_path, self._eventid, 'current', 'products') if not os.path.isdir(datadir): raise NotADirectoryError('%s is not a valid directory.' % datadir) datafile = os.path.join(datadir, 'shake_result.hdf') if not os.path.isfile(datafile): raise FileNotFoundError('%s does not exist.' % datafile) # Open the ShakeMapOutputContainer and extract the data container = ShakeMapOutputContainer.load(datafile) if container.getDataType() != 'grid': raise NotImplementedError('uncertaintymaps module can only ' 'operate on gridded data, not sets of ' 'points') # get the path to the products.conf file, load the config config_file = os.path.join(install_path, 'config', 'products.conf') spec_file = get_configspec('products') validator = get_custom_validator() config = ConfigObj(config_file, configspec=spec_file) results = config.validate(validator) check_extra_values(config, self.logger) if not isinstance(results, bool) or not results: config_error(config, results) # create contour files self.logger.debug('Uncertainty mapping...') # get the operator setting from config operator = config['products']['mapping']['operator'] # get all of the pieces needed for the uncertainty mapping functions layers = config['products']['mapping']['layers'] if 'countries' in layers and layers['countries'] != '': countries_file = layers['countries'] else: countries_file = None if 'states_provs' in layers and layers['states_provs'] != '': states_provs_file = layers['states_provs'] else: states_provs_file = None if 'oceans' in layers and layers['oceans'] != '': oceans_file = layers['oceans'] else: oceans_file = None if 'lakes' in layers and layers['lakes'] != '': lakes_file = layers['lakes'] else: lakes_file = None # Get the number of parallel workers max_workers = config['products']['mapping']['max_workers'] # Reading HDF5 files currently takes a long time, due to poor # programming in MapIO. To save us some time until that issue is # resolved, we'll coarsely subset the topo grid once here and pass # it into both mapping functions # get the bounds of the map info = container.getMetadata() xmin = info['output']['map_information']['min']['longitude'] xmax = info['output']['map_information']['max']['longitude'] ymin = info['output']['map_information']['min']['latitude'] ymax = info['output']['map_information']['max']['latitude'] dy = float( info['output']['map_information']['grid_spacing']['latitude']) dx = float( info['output']['map_information']['grid_spacing']['longitude']) padx = 5 * dx pady = 5 * dy sxmin = float(xmin) - padx sxmax = float(xmax) + padx symin = float(ymin) - pady symax = float(ymax) + pady sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx, dy) tdata = np.full([sampledict.ny, sampledict.nx], 0.0) topogrid = Grid2D(data=tdata, geodict=sampledict) model_config = container.getConfig() imtlist = container.getIMTs() textfile = os.path.join( get_data_path(), 'mapping', 'map_strings.' + config['products']['mapping']['language']) text_dict = get_text_strings(textfile) if config['products']['mapping']['fontfamily'] != '': matplotlib.rcParams['font.family'] = \ config['products']['mapping']['fontfamily'] matplotlib.rcParams['axes.unicode_minus'] = False allcities = Cities.fromDefault() states_provs = None countries = None oceans = None lakes = None faults = None roads = None if states_provs_file is not None: states_provs = ShapelyFeature( Reader(states_provs_file).geometries(), ccrs.PlateCarree(), facecolor='none') elif 'CALLED_FROM_PYTEST' not in os.environ: states_provs = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='10m', facecolor='none') # The feature constructor doesn't necessarily download the # data, but we want it to so that multiple threads don't # try to do it at once when they actually access the data. # So below we just call the geometries() method to trigger # the download if necessary. _ = states_provs.geometries() if countries_file is not None: countries = ShapelyFeature(Reader(countries_file).geometries(), ccrs.PlateCarree(), facecolor='none') elif 'CALLED_FROM_PYTEST' not in os.environ: countries = cfeature.NaturalEarthFeature(category='cultural', name='admin_0_countries', scale='10m', facecolor='none') _ = countries.geometries() if oceans_file is not None: oceans = ShapelyFeature(Reader(oceans_file).geometries(), ccrs.PlateCarree(), facecolor=WATERCOLOR) elif 'CALLED_FROM_PYTEST' not in os.environ: oceans = cfeature.NaturalEarthFeature(category='physical', name='ocean', scale='10m', facecolor=WATERCOLOR) _ = oceans.geometries() if lakes_file is not None: lakes = ShapelyFeature(Reader(lakes_file).geometries(), ccrs.PlateCarree(), facecolor=WATERCOLOR) elif 'CALLED_FROM_PYTEST' not in os.environ: lakes = cfeature.NaturalEarthFeature(category='physical', name='lakes', scale='10m', facecolor=WATERCOLOR) _ = lakes.geometries() alist = [] llogo = config['products']['mapping'].get('license_logo') or None ltext = config['products']['mapping'].get('license_text') or None for imtype in imtlist: component, imtype = imtype.split('/') comp = container.getComponents(imtype)[0] d = { 'imtype': imtype, 'topogrid': topogrid, 'allcities': allcities, 'states_provinces': states_provs, 'countries': countries, 'oceans': oceans, 'lakes': lakes, 'roads': roads, 'roadcolor': layers['roadcolor'], 'roadwidth': layers['roadwidth'], 'faults': faults, 'faultcolor': layers['faultcolor'], 'faultwidth': layers['faultwidth'], 'datadir': datadir, 'operator': operator, 'filter_size': 0, 'info': info, 'component': comp, 'imtdict': container.getIMTGrids(imtype, comp), 'ruptdict': copy.deepcopy(container.getRuptureDict()), 'stationdict': container.getStationDict(), 'config': model_config, 'tdict': text_dict, 'display_magnitude': self.display_magnitude, 'pdf_dpi': config['products']['mapping']['pdf_dpi'], 'img_dpi': config['products']['mapping']['img_dpi'], 'license_logo': llogo, 'license_text': ltext, } alist.append(d) # # Populate the contents.xml # for key in ('std', 'phi', 'tau'): if key not in d['imtdict'] or d['imtdict'][key] is None: continue if key == 'std': ext = '_sigma' utype = ' Total' elif key == 'phi': ext = '_phi' utype = ' Within-event' else: ext = '_tau' utype = ' Between-event' if imtype == 'MMI': fileimt = 'intensity' else: fileimt = oq_to_file(imtype) self.contents.addFile( fileimt + ext + 'UncertaintyMap', fileimt.upper() + utype + ' Uncertainty Map', 'Map of ' + imtype + utype + ' uncertainty.', fileimt + ext + '.jpg', 'image/jpeg') self.contents.addFile( fileimt + ext + 'UncertaintyMap', fileimt.upper() + utype + ' Uncertainty Map', 'Map of ' + imtype + utype + ' uncertainty.', fileimt + ext + '.pdf', 'application/pdf') if max_workers > 0: with cf.ProcessPoolExecutor(max_workers=max_workers) as ex: results = ex.map(make_map, alist) list(results) else: for adict in alist: make_map(adict) container.close()
def draw_contour(shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename, borderfile=None, is_scenario=False): """Create a contour map showing MMI contours over greyscale population. :param shakegrid: ShakeGrid object. :param popgrid: Grid2D object containing population data. :param oceanfile: String path to file containing ocean vector data in a format compatible with fiona. :param oceangridfile: String path to file containing ocean grid data . :param cityfile: String path to file containing GeoNames cities data. :param basename: String path containing desired output PDF base name, i.e., /home/pager/exposure. ".pdf" and ".png" files will be made. :param make_png: Boolean indicating whether a PNG version of the file should also be created in the same output folder as the PDF. :returns: Tuple containing: - Name of PNG file created, or None if PNG output not specified. - Cities object containing the cities that were rendered on the contour map. """ gd = shakegrid.getGeoDict() # Retrieve the epicenter - this will get used on the map center_lat = shakegrid.getEventDict()['lat'] center_lon = shakegrid.getEventDict()['lon'] # load the ocean grid file (has 1s in ocean, 0s over land) # having this file saves us almost 30 seconds! oceangrid = read(oceangridfile, samplegeodict=gd, resample=True, doPadding=True) # load the cities data, limit to cities within shakemap bounds allcities = Cities.fromDefault() cities = allcities.limitByBounds((gd.xmin, gd.xmax, gd.ymin, gd.ymax)) # define the map # first cope with stupid 180 meridian height = (gd.ymax - gd.ymin) * DEG2KM if gd.xmin < gd.xmax: width = (gd.xmax - gd.xmin) * np.cos(np.radians(center_lat)) * DEG2KM xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax) else: xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax) xmax += 360 width = ((gd.xmax + 360) - gd.xmin) * \ np.cos(np.radians(center_lat)) * DEG2KM aspect = width / height # if the aspect is not 1, then trim bounds in x or y direction # as appropriate if width > height: dw = (width - height) / 2.0 # this is width in km xmin = xmin + dw / (np.cos(np.radians(center_lat)) * DEG2KM) xmax = xmax - dw / (np.cos(np.radians(center_lat)) * DEG2KM) width = (xmax - xmin) * np.cos(np.radians(center_lat)) * DEG2KM if height > width: dh = (height - width) / 2.0 # this is width in km ymin = ymin + dh / DEG2KM ymax = ymax - dh / DEG2KM height = (ymax - ymin) * DEG2KM aspect = width / height figheight = FIGWIDTH / aspect bbox = (xmin, ymin, xmax, ymax) bounds = (xmin, xmax, ymin, ymax) figsize = (FIGWIDTH, figheight) # Create the MercatorMap object, which holds a separate but identical # axes object used to determine collisions between city labels. mmap = MercatorMap(bounds, figsize, cities, padding=0.5) fig = mmap.figure ax = mmap.axes # this needs to be done here so that city label collision # detection will work fig.canvas.draw() geoproj = mmap.geoproj proj = mmap.proj # project our population grid to the map projection projstr = proj.proj4_init popgrid_proj = popgrid.project(projstr) popdata = popgrid_proj.getData() newgd = popgrid_proj.getGeoDict() # Use our GMT-inspired palette class to create population and MMI colormaps popmap = ColorPalette.fromPreset('pop') mmimap = ColorPalette.fromPreset('mmi') # set the image extent to that of the data img_extent = (newgd.xmin, newgd.xmax, newgd.ymin, newgd.ymax) plt.imshow(popdata, origin='upper', extent=img_extent, cmap=popmap.cmap, vmin=popmap.vmin, vmax=popmap.vmax, zorder=POP_ZORDER, interpolation='nearest') # draw 10m res coastlines ax.coastlines(resolution="10m", zorder=COAST_ZORDER) states_provinces = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='50m', facecolor='none') ax.add_feature(states_provinces, edgecolor='black', zorder=COAST_ZORDER) # draw country borders using natural earth data set if borderfile is not None: borders = ShapelyFeature( Reader(borderfile).geometries(), ccrs.PlateCarree()) ax.add_feature(borders, zorder=COAST_ZORDER, edgecolor='black', linewidth=2, facecolor='none') # clip the ocean data to the shakemap bbox = (gd.xmin, gd.ymin, gd.xmax, gd.ymax) oceanshapes = _clip_bounds(bbox, oceanfile) ax.add_feature(ShapelyFeature(oceanshapes, crs=geoproj), facecolor=WATERCOLOR, zorder=OCEAN_ZORDER) # So here we're going to project the MMI data to # our mercator map, then smooth and contour that # projected grid. # smooth the MMI data for contouring, themn project mmi = shakegrid.getLayer('mmi').getData() smoothed_mmi = gaussian_filter(mmi, FILTER_SMOOTH) newgd = shakegrid.getGeoDict().copy() smooth_grid = Grid2D(data=smoothed_mmi, geodict=newgd) smooth_grid_merc = smooth_grid.project(projstr) newgd2 = smooth_grid_merc.getGeoDict() # project the ocean grid oceangrid_merc = oceangrid.project(projstr) # create masked arrays using the ocean grid data_xmin, data_xmax = newgd2.xmin, newgd2.xmax data_ymin, data_ymax = newgd2.ymin, newgd2.ymax smooth_data = smooth_grid_merc.getData() landmask = np.ma.masked_where(oceangrid_merc._data == 0.0, smooth_data) oceanmask = np.ma.masked_where(oceangrid_merc._data == 1.0, smooth_data) # contour the data contourx = np.linspace(data_xmin, data_xmax, newgd2.nx) contoury = np.linspace(data_ymin, data_ymax, newgd2.ny) ax.contour( contourx, contoury, np.flipud(oceanmask), linewidths=3.0, linestyles='solid', zorder=1000, cmap=mmimap.cmap, vmin=mmimap.vmin, vmax=mmimap.vmax, levels=np.arange(0.5, 10.5, 1.0), ) ax.contour( contourx, contoury, np.flipud(landmask), linewidths=2.0, linestyles='dashed', zorder=OCEANC_ZORDER, cmap=mmimap.cmap, vmin=mmimap.vmin, vmax=mmimap.vmax, levels=np.arange(0.5, 10.5, 1.0), ) # the idea here is to plot invisible MMI contours at integer levels # and then label them. clabel method won't allow text to appear, # which is this case is kind of ok, because it allows us an # easy way to draw MMI labels as roman numerals. cs_land = plt.contour( contourx, contoury, np.flipud(oceanmask), linewidths=0.0, levels=np.arange(0, 11), alpha=0.0, zorder=CLABEL_ZORDER, ) clabel_text = ax.clabel(cs_land, cs_land.cvalues, colors='k', fmt='%.0f', fontsize=40) for clabel in clabel_text: x, y = clabel.get_position() label_str = clabel.get_text() roman_label = MMI_LABELS[label_str] th = plt.text(x, y, roman_label, zorder=CLABEL_ZORDER, ha='center', va='center', color='black', weight='normal', size=16) th.set_path_effects([ path_effects.Stroke(linewidth=2.0, foreground='white'), path_effects.Normal() ]) cs_ocean = plt.contour( contourx, contoury, np.flipud(landmask), linewidths=0.0, levels=np.arange(0, 11), zorder=CLABEL_ZORDER, ) clabel_text = ax.clabel(cs_ocean, cs_ocean.cvalues, colors='k', fmt='%.0f', fontsize=40) for clabel in clabel_text: x, y = clabel.get_position() label_str = clabel.get_text() roman_label = MMI_LABELS[label_str] th = plt.text(x, y, roman_label, ha='center', va='center', color='black', weight='normal', size=16) th.set_path_effects([ path_effects.Stroke(linewidth=2.0, foreground='white'), path_effects.Normal() ]) # draw meridians and parallels using Cartopy's functions for that gl = ax.gridlines(draw_labels=True, linewidth=2, color=(0.9, 0.9, 0.9), alpha=0.5, linestyle='-', zorder=GRID_ZORDER) gl.xlabels_top = False gl.xlabels_bottom = False gl.ylabels_left = False gl.ylabels_right = False gl.xlines = True # let's floor/ceil the edges to nearest half a degree gxmin = np.floor(xmin * 2) / 2 gxmax = np.ceil(xmax * 2) / 2 gymin = np.floor(ymin * 2) / 2 gymax = np.ceil(ymax * 2) / 2 xlocs = np.linspace(gxmin, gxmax + 0.5, num=5) ylocs = np.linspace(gymin, gymax + 0.5, num=5) gl.xlocator = mticker.FixedLocator(xlocs) gl.ylocator = mticker.FixedLocator(ylocs) gl.xformatter = LONGITUDE_FORMATTER gl.yformatter = LATITUDE_FORMATTER gl.xlabel_style = {'size': 15, 'color': 'black'} gl.ylabel_style = {'size': 15, 'color': 'black'} # TODO - figure out x/y axes data coordinates # corresponding to 10% from left and 10% from top # use geoproj and proj dleft = 0.01 dtop = 0.97 proj_str = proj.proj4_init merc_to_dd = pyproj.Proj(proj_str) # use built-in transforms to get from axes units to data units display_to_data = ax.transData.inverted() axes_to_display = ax.transAxes # these are x,y coordinates in projected space yleft, t1 = display_to_data.transform( axes_to_display.transform((dleft, 0.5))) t2, xtop = display_to_data.transform(axes_to_display.transform( (0.5, dtop))) # these are coordinates in lon,lat space yleft_dd, t1_dd = merc_to_dd(yleft, t1, inverse=True) t2_dd, xtop_dd = merc_to_dd(t2, xtop, inverse=True) # drawing our own tick labels INSIDE the plot, as # Cartopy doesn't seem to support this. yrange = ymax - ymin xrange = xmax - xmin ddlabelsize = 12 for xloc in gl.xlocator.locs: outside = xloc < xmin or xloc > xmax # don't draw labels when we're too close to either edge near_edge = (xloc - xmin) < (xrange * 0.1) or (xmax - xloc) < (xrange * 0.1) if outside or near_edge: continue xtext = r'$%.1f^\circ$W' % (abs(xloc)) ax.text(xloc, xtop_dd, xtext, fontsize=ddlabelsize, zorder=GRID_ZORDER, ha='center', fontname=DEFAULT_FONT, transform=ccrs.Geodetic()) for yloc in gl.ylocator.locs: outside = yloc < gd.ymin or yloc > gd.ymax # don't draw labels when we're too close to either edge near_edge = (yloc - gd.ymin) < (yrange * 0.1) or (gd.ymax - yloc) < ( yrange * 0.1) if outside or near_edge: continue if yloc < 0: ytext = r'$%.1f^\circ$S' % (abs(yloc)) else: ytext = r'$%.1f^\circ$N' % (abs(yloc)) ax.text(yleft_dd, yloc, ytext, fontsize=ddlabelsize, zorder=GRID_ZORDER, va='center', fontname=DEFAULT_FONT, transform=ccrs.Geodetic()) # draw cities mapcities = mmap.drawCities(shadow=True, zorder=CITIES_ZORDER) # draw the figure border thickly # TODO - figure out how to draw map border # bwidth = 3 # ax.spines['top'].set_visible(True) # ax.spines['left'].set_visible(True) # ax.spines['bottom'].set_visible(True) # ax.spines['right'].set_visible(True) # ax.spines['top'].set_linewidth(bwidth) # ax.spines['right'].set_linewidth(bwidth) # ax.spines['bottom'].set_linewidth(bwidth) # ax.spines['left'].set_linewidth(bwidth) # Get the corner of the map with the lowest population corner_rect, filled_corner = _get_open_corner(popgrid, ax) clat2 = round_to_nearest(center_lat, 1.0) clon2 = round_to_nearest(center_lon, 1.0) # draw a little globe in the corner showing in small-scale # where the earthquake is located. proj = ccrs.Orthographic(central_latitude=clat2, central_longitude=clon2) ax2 = fig.add_axes(corner_rect, projection=proj) ax2.add_feature(cfeature.OCEAN, zorder=0, facecolor=WATERCOLOR, edgecolor=WATERCOLOR) ax2.add_feature(cfeature.LAND, zorder=0, edgecolor='black') ax2.plot([clon2], [clat2], 'w*', linewidth=1, markersize=16, markeredgecolor='k', markerfacecolor='r') ax2.gridlines() ax2.set_global() ax2.outline_patch.set_edgecolor('black') ax2.outline_patch.set_linewidth(2) # Draw the map scale in the unoccupied lower corner. corner = 'lr' if filled_corner == 'lr': corner = 'll' draw_scale(ax, corner, pady=0.05, padx=0.05) # Draw the epicenter as a black star plt.sca(ax) plt.plot(center_lon, center_lat, 'k*', markersize=16, zorder=EPICENTER_ZORDER, transform=geoproj) if is_scenario: plt.text(center_lon, center_lat, 'SCENARIO', fontsize=64, zorder=WATERMARK_ZORDER, transform=geoproj, alpha=0.2, color='red', horizontalalignment='center') # create pdf and png output file names pdf_file = basename + '.pdf' png_file = basename + '.png' # save to pdf plt.savefig(pdf_file) plt.savefig(png_file) return (pdf_file, png_file, mapcities)
class PagerCities(object): def __init__(self,cities,mmigrid): """Create a PagerCities object with a MapIO Cities instance and a Grid2 object containing MMI data. :param cities: BasemapCities instance. :param mmigrid: Grid2 object containing MMI data from a ShakeMap. """ xmin,xmax,ymin,ymax = mmigrid.getBounds() dataframe = cities.limitByBounds((xmin,xmax,ymin,ymax)).getDataFrame() lat = dataframe['lat'].as_matrix() lon = dataframe['lon'].as_matrix() mmi = mmigrid.getValue(lat,lon) dataframe['mmi'] = mmi self._cities = Cities(dataframe) def getCityTable(self,mapcities): """ Return a list of cities suitable for the onePAGER table of cities. The PAGER city sorting algorithm can be defined as follows. 1. Sort cities by inverse intensity. Select N (up to 6) from beginning of list. If N < 6, return. 2. Sort cities by capital status and population, and select M (up to 5) from beginning of the list that are not in the first list. If N+M == 11, sort selected cities by MMI, return list 3. If N+M < 11, sort cities by inverse population, then select (up to) P= 11 - (M+N) cities that are not already in the list. Combine list of P cities with list of N and list of M. 4. Sort combined list of cities by inverse MMI and return. :param mapcities: MapIO Cities instance which contains list of cities that have been rendered on PAGER exposure map. :returns: DataFrame of up to 11 cities, sorted by algorithm described above. 'on_map' column indicates whether the city also was found in the input mapcities. """ #pandas changed how dataframes get sorted, so we have a convenience function here to hide the #ugliness #1. Sort cities by inverse intensity. Select N (up to 6) from beginning of list. If N < 6, return. df = self._cities.getDataFrame() df = sort_data_frame(df,'mmi',ascending=False) if len(df) >= 6: rows = df.iloc[0:6] df = df.iloc[6:] else: df = sort_data_frame(df,'pop',ascending=True) df = _flag_map_cities(df,mapcities) return df #2. Sort cities by capital status and population, and select M (up to 5) from beginning of the list that are not in the first list. # If N+M == 11, sort selected cities by MMI, return list N = len(rows) df = sort_data_frame(df,['iscap','pop'],ascending=False) if len(df) >= 5: rows = pd.concat([rows,df.iloc[0:5]]) df = df.iloc[5:] if len(rows) == 11: rows = sort_data_frame(rows,'mmi',ascending=False) rows = _flag_map_cities(rows,mapcities) return rows else: rows = pd.concat([rows,df]) rows = sort_data_frame(rows,'mmi',ascending=False) rows = _flag_map_cities(rows,mapcities) return rows #3. If N+M < 11, sort cities by inverse population, then select (up to) P= 11 - (M+N) cities that are # not already in the list. Combine list of P cities with list of N and list of M. df = sort_data_frame(df,'pop',ascending=False) MN = len(df) P = 11 - (MN) rows = pd.concat([rows,df[0:P]]) #4. Sort combined list of cities by inverse MMI and return. rows = sort_data_frame(rows,'mmi',ascending=False) #Add a column indicating whether the city was rendered on the map rows = _flag_map_cities(rows,mapcities) return rows