def __init__(self, vertices, color): self.vertices = vertices self.color = color self.edges = [ Edge(*e, figure=self) for e in zip(vertices, list(vertices[1:]) + [vertices[0]]) ] self.inner_edges = [ Edge(*e, figure=self) for e in combinations(vertices, 2) ] self.mpl_poly = MplPolygon(vertices, True, color=[1, 0, 0])
def plot(self, ax: GeoAxesSubplot, **kwargs) -> Artist: """Plotting function. All arguments are passed to the geometry""" if "facecolor" not in kwargs: kwargs["facecolor"] = "None" if "edgecolor" not in kwargs: kwargs["edgecolor"] = ax._get_lines.get_next_color() if "projection" in ax.__dict__: return ax.add_geometries([self.shape], crs=PlateCarree(), **kwargs) else: return ax.add_patch( MplPolygon(list(self.shape.exterior.coords), **kwargs))
def plot(self): # get coordinates in the Earth projection xvec, yvec = self.projected(self._earthmap) # create patch with coordinates self._patch = MplPolygon(xy=np.array([xvec, yvec]).T, closed=True, linestyle=self._linestyle, linewidth=self._linewidth, fill=False, color=self._color) # add patch to plot self._parent.get_axes().add_patch(self._patch) # refresh plot self._parent.draw()
def plot(self, ax: "GeoAxesSubplot", **kwargs: Any) -> "Artist": """Plotting function. All arguments are passed to the geometry""" from cartopy.crs import PlateCarree from matplotlib.patches import Polygon as MplPolygon if "facecolor" not in kwargs: kwargs["facecolor"] = "None" if "edgecolor" not in kwargs: kwargs["edgecolor"] = ax._get_lines.get_next_color() if "projection" in ax.__dict__: return ax.add_geometries([self.shape], crs=PlateCarree(), **kwargs) else: return ax.add_patch( MplPolygon(list(self.shape.exterior.coords), **kwargs))
def plot(self, ax: GeoAxesSubplot, **kwargs) -> None: # coverage: ignore flat = self.flatten() if isinstance(flat, base.BaseMultipartGeometry): for poly in flat: # quick and dirty sub = Airspace("", [ExtrudedPolygon(poly, 0, 0)]) sub.plot(ax, **kwargs) return if "facecolor" not in kwargs: kwargs["facecolor"] = "None" if "edgecolor" not in kwargs: kwargs["edgecolor"] = ax._get_lines.get_next_color() if "projection" in ax.__dict__: ax.add_geometries([flat], crs=PlateCarree(), **kwargs) else: ax.add_patch(MplPolygon(list(flat.exterior.coords), **kwargs))
def union_polygons(list_of_polygons): #accept few polygons and return their union, visualize them to make sure it's correct fig = plt.figure(figsize=(10, 8)) ax = fig.add_subplot(111, axisbg='w', frame_on=False) ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) for spine in ax.spines.itervalues(): spine.set_visible(False) m = Basemap(llcrnrlat=lllat, urcrnrlat=urlat, llcrnrlon=lllon, urcrnrlon=urlon, resolution='i', projection='cyl') m.drawmapboundary(fill_color='white') #m.drawcoastlines(linewidth=0.2) m.drawcountries(linewidth=0.2) shp_info = m.readshapefile('../localism/data/us_states_st99/st99_d00', 'states', drawbounds=False, zorder=0) regions = [] for shapedict, state in zip(m.states_info, m.states): if shapedict['NAME'] in set(['California', 'Washington']): regions.append(state) polies = [] for r in regions: poly = Polygon(r) polies.append(poly) poly = MultiPolygon(polies) poly = unary_union(poly).convex_hull lons, lats = poly.exterior.coords.xy coords = np.array(zip(lons, lats)) poly = MplPolygon(coords, facecolor='gray', edgecolor='gray') ax.add_patch(poly) plt.show() result = None return result
def contour(coordinates, scores, world=False, filename="contour", do_contour=False, **kwargs): #with open('./data/coordinate_socres.pkl', 'wb') as fout: # pickle.dump((coordinates, scores), fout) with open('./data/coor_score_239.pkl', 'rb') as fin: coordinates, scores = pickle.load(fin) from matplotlib import rc rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica']}) ## for Palatino and other serif fonts use: #rc('font',**{'family':'serif','serif':['Palatino']}) rc('text', usetex=True) scores = np.array(scores) lllat = 24.396308 lllon = -124.848974 urlat = 49.384358 urlon = -66.885444 if world: lllat = -90 lllon = -180 urlat = 90 urlon = 180 fig = plt.figure(figsize=(2.5, 2)) grid_transform = kwargs.get('grid', False) ax = fig.add_subplot(111, axisbg='w', frame_on=False) grid_interpolation_method = 'nearest' #scores = np.log(scores) m = Basemap(llcrnrlat=lllat, urcrnrlat=urlat, llcrnrlon=lllon, urcrnrlon=urlon, resolution='i', projection='cyl') m.drawmapboundary(fill_color='white') #m.drawcoastlines(linewidth=0.2) m.drawcountries(linewidth=0.2) if world: m.drawstates(linewidth=0.2, color='lightgray') #m.fillcontinents(color='white', lake_color='#0000ff', zorder=2) #m.drawrivers(color='#0000ff') #m.drawlsmask(land_color='gray',ocean_color="#b0c4de", lakes=True) #m.drawcounties() shp_info = m.readshapefile('./data/us_states_st99/st99_d00', 'states', drawbounds=True, zorder=0) printed_names = [] ax = plt.gca() ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) for spine in ax.spines.itervalues(): spine.set_visible(False) state_names_set = set(short_state_names.values()) mi_index = 0 wi_index = 0 for shapedict, state in zip(m.states_info, m.states): if world: break draw_state_name = True if shapedict['NAME'] not in state_names_set: continue short_name = short_state_names.keys()[short_state_names.values().index( shapedict['NAME'])] if short_name in printed_names and short_name not in ['MI', 'WI']: continue if short_name == 'MI': if mi_index != 3: draw_state_name = False mi_index += 1 if short_name == 'WI': if wi_index != 2: draw_state_name = False wi_index += 1 # center of polygon x, y = np.array(state).mean(axis=0) hull = ConvexHull(state) hull_points = np.array(state)[hull.vertices] x, y = hull_points.mean(axis=0) if short_name == 'MD': y = y - 0.5 x = x + 0.5 elif short_name == 'DC': y = y + 0.1 elif short_name == 'MI': x = x - 1 elif short_name == 'RI': x = x + 1 y = y - 1 #poly = MplPolygon(state,facecolor='lightgray',edgecolor='black') #x, y = np.median(np.array(state), axis=0) # You have to align x,y manually to avoid overlapping for little states if draw_state_name: plt.text(x + .1, y, short_name, ha="center", fontsize=4) #ax.add_patch(poly) #pdb.set_trace() printed_names += [ short_name, ] mlon, mlat = m(*(coordinates[:, 1], coordinates[:, 0])) # grid data if do_contour: numcols, numrows = 2000, 2000 xi = np.linspace(mlon.min(), mlon.max(), numcols) yi = np.linspace(mlat.min(), mlat.max(), numrows) xi, yi = np.meshgrid(xi, yi) # interpolate x, y, z = mlon, mlat, scores #pdb.set_trace() #zi = griddata(x, y, z, xi, yi) zi = gd((mlon, mlat), scores, (xi, yi), method=grid_interpolation_method, rescale=False) #Remove the lakes and oceans data = maskoceans(xi, yi, zi) con = m.contourf(xi, yi, data, cmap=plt.get_cmap('YlOrRd')) else: cmap = plt.get_cmap('YlOrRd') con = m.scatter(mlon, mlat, c=scores, s=3, cmap=cmap) #con = m.contour(xi, yi, data, 3, cmap=plt.get_cmap('YlOrRd'), linewidths=1) #con = m.contour(x, y, z, 3, cmap=plt.get_cmap('YlOrRd'), tri=True, linewidths=1) #conf = m.contourf(x, y, z, 3, cmap=plt.get_cmap('coolwarm'), tri=True) cbar = m.colorbar(con, location='right', pad="3%") #plt.setp(cbar.ax.get_yticklabels(), visible=False) #cbar.ax.tick_params(axis=u'both', which=u'both',length=0) #cbar.ax.set_yticklabels(['low', 'high']) #tick_locator = ticker.MaxNLocator(nbins=9) #cbar.locator = tick_locator #cbar.update_ticks() cbar.ax.tick_params(labelsize=6) cbar.ax.xaxis.set_tick_params(pad=0) cbar.ax.yaxis.set_tick_params(pad=0) cbar.set_label('error in km', size=8, labelpad=1) for line in cbar.lines: line.set_linewidth(20) #read countries for world dataset with more than 100 number of users with open('./data/country_count.json', 'r') as fin: top_countries = set(json.load(fin)) world_shp_info = m.readshapefile( './data/CNTR_2014_10M_SH/Data/CNTR_RG_10M_2014', 'world', drawbounds=False, zorder=100) for shapedict, state in zip(m.world_info, m.world): if not world: if shapedict['CNTR_ID'] not in ['CA', 'MX']: continue else: if shapedict['CNTR_ID'] in top_countries: continue poly = MplPolygon(state, facecolor='gray', edgecolor='gray') ax.add_patch(poly) #plt.title('term: ' + word ) plt.tight_layout() plt.savefig('./maps/' + filename + '.pdf', bbox_inches='tight') plt.close() del m
def map_words(coords, preds, vocab, map_dir, dataset_name): """ given the coords distributed over the map and the unigram distribution over vocabulary pred, contourf the logprob of a word over the map with interpolation. """ lllat = 24.396308 lllon = -124.848974 urlat = 49.384358 urlon = -66.885444 if dataset_name == 'world-final': lllat = -90 lllon = -180 urlat = 90 urlon = 180 grid_interpolation_method = 'cubic' logging.info('interpolation: ' + grid_interpolation_method) region_words = { "the north":['braht','breezeway','bubbler','clout','davenport','euchre','fridge','hotdish','paczki','pop','sack','soda','toboggan','Yooper'], "northeast":['brook','cellar','sneaker','soda'], "New England":['grinder','packie','rotary','wicked'], "Eastern New England":['bulkhead','Cabinet','frappe','hosey','intervale','jimmies','johnnycake','quahog','tonic'], "Northern New England":['ayuh','creemee','dooryard','logan','muckle'], "The Mid-Atlantic":['breezeway','hoagie','jawn','jimmies','parlor','pavement','shoobie','youze'], "New York City Area":['bodega','dungarees','potsy','punchball','scallion','stoop','wedge'], "The Midland":['hoosier'], "The South":['banquette','billfold','chuck','commode','lagniappe','yankee','yonder'], "The West":['davenport','Hella','snowmachine' ] } word_dialect = {} with open('./data/geodare.cleansed.filtered.json', 'r') as fin: for line in fin: line = line.strip() dialect_word = json.loads(line) word_dialect[dialect_word['word']] = dialect_word['dialect'] #if os.path.exists(map_dir): # shutil.rmtree(map_dir) try: os.mkdir(map_dir) except: logging.info('map_dir %s exists or can not be created.') #pick some words to map including some known dialect words #some DARE words and some words that are not evenly distributed topk_words = [] for words in region_words.values(): topk_words.extend(words) topk_words.extend(word_dialect.keys()) dialect_words = ['hella', 'yall', 'jawn', 'paczki', 'euchre', 'brat', 'toboggan', 'brook', 'grinder', 'yinz', 'youze', 'yeen'] topk_words.extend(dialect_words) custom_words = ['springfield', 'columbia', 'n***a', 'niqqa', 'bamma', 'cooter', 'britches', 'yapper', 'younguns', 'hotdish', 'schnookered', 'bubbler', 'betcha', 'dontcha'] topk_words.extend(custom_words) vocabset = set(vocab) dare_in_vocab = set(word_dialect.keys()) & vocabset logging.info('%d DARE words, %d in vocab' %(len(word_dialect), len(dare_in_vocab))) add_local_words = True if add_local_words: ne_file = './dumps/ne_' + dataset_name + '.json' with codecs.open(ne_file, 'r', encoding='utf-8') as fout: NEs = json.load(fout) NEs = NEs['nes'] local_words = get_local_words(preds, vocab, NEs=NEs, k=500) logging.info(local_words) topk_words.extend(local_words[0:20]) add_cities = False if add_cities: with open('./data/cities.json', 'r') as fin: cities = json.load(fin) cities = cities[0:100] for city in cities: name = city['city'].lower() topk_words.append(name) wi = 0 for word in topk_words: if word in vocabset: fig = plt.figure(figsize=(5, 4)) ax = fig.add_subplot(111, axisbg='w', frame_on=False) logging.info('%d mapping %s' %(wi, word)) wi += 1 index = vocab.index(word) scores = np.log(preds[:, index]) m = Basemap(llcrnrlat=lllat, urcrnrlat=urlat, llcrnrlon=lllon, urcrnrlon=urlon, resolution='i', projection='cyl') ''' m = Basemap(llcrnrlon=-119,llcrnrlat=22,urcrnrlon=-64,urcrnrlat=49, projection='lcc',lat_1=33,lat_2=45,lon_0=-95, resolution='i') ''' m.drawmapboundary(fill_color = 'white') #m.drawcoastlines(linewidth=0.2) m.drawcountries(linewidth=0.2) if dataset_name != 'world-fianl': m.drawstates(linewidth=0.2, color='lightgray') #m.fillcontinents(color='white', lake_color='#0000ff', zorder=2) #m.drawrivers(color='#0000ff') #m.drawlsmask(land_color='gray',ocean_color="#b0c4de", lakes=True) #m.drawcounties() shp_info = m.readshapefile('./data/us_states_st99/st99_d00','states',drawbounds=True, zorder=0) printed_names = [] ax = plt.gca() ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) for spine in ax.spines.itervalues(): spine.set_visible(False) state_names_set = set(short_state_names.values()) mi_index = 0 wi_index = 0 for shapedict,state in zip(m.states_info, m.states): if dataset_name == 'world-final': break draw_state_name = True if shapedict['NAME'] not in state_names_set: continue short_name = short_state_names.keys()[short_state_names.values().index(shapedict['NAME'])] if short_name in printed_names and short_name not in ['MI', 'WI']: continue if short_name == 'MI': if mi_index != 3: draw_state_name = False mi_index += 1 if short_name == 'WI': if wi_index != 2: draw_state_name = False wi_index += 1 # center of polygon x, y = np.array(state).mean(axis=0) hull = ConvexHull(state) hull_points = np.array(state)[hull.vertices] x, y = hull_points.mean(axis=0) if short_name == 'MD': y = y - 0.5 x = x + 0.5 elif short_name == 'DC': y = y + 0.1 elif short_name == 'MI': x = x - 1 elif short_name == 'RI': x = x + 1 y = y - 1 #poly = MplPolygon(state,facecolor='lightgray',edgecolor='black') #x, y = np.median(np.array(state), axis=0) # You have to align x,y manually to avoid overlapping for little states if draw_state_name: plt.text(x+.1, y, short_name, ha="center", fontsize=8) #ax.add_patch(poly) #pdb.set_trace() printed_names += [short_name,] mlon, mlat = m(*(coords[:,1], coords[:,0])) # grid data numcols, numrows = 1000, 1000 xi = np.linspace(mlon.min(), mlon.max(), numcols) yi = np.linspace(mlat.min(), mlat.max(), numrows) xi, yi = np.meshgrid(xi, yi) # interpolate x, y, z = mlon, mlat, scores #pdb.set_trace() #zi = griddata(x, y, z, xi, yi) zi = gd( (mlon, mlat), scores, (xi, yi), method=grid_interpolation_method, rescale=False) #Remove the lakes and oceans data = maskoceans(xi, yi, zi) con = m.contourf(xi, yi, data, cmap=plt.get_cmap('YlOrRd')) #con = m.contour(xi, yi, data, 3, cmap=plt.get_cmap('YlOrRd'), linewidths=1) #con = m.contour(x, y, z, 3, cmap=plt.get_cmap('YlOrRd'), tri=True, linewidths=1) #conf = m.contourf(x, y, z, 3, cmap=plt.get_cmap('coolwarm'), tri=True) cbar = m.colorbar(con,location='right',pad="2%") #plt.setp(cbar.ax.get_yticklabels(), visible=False) #cbar.ax.tick_params(axis=u'both', which=u'both',length=0) #cbar.ax.set_yticklabels(['low', 'high']) tick_locator = ticker.MaxNLocator(nbins=9) cbar.locator = tick_locator cbar.update_ticks() cbar.ax.tick_params(labelsize=11) cbar.ax.yaxis.set_tick_params(pad=2) cbar.set_label('logprob', size=11) for line in cbar.lines: line.set_linewidth(10) #read countries for world dataset with more than 100 number of users with open('./data/country_count.json', 'r') as fin: top_countries = set(json.load(fin)) world_shp_info = m.readshapefile('./data/CNTR_2014_10M_SH/Data/CNTR_RG_10M_2014','world',drawbounds=False, zorder=100) for shapedict,state in zip(m.world_info, m.world): if dataset_name != 'world-final': if shapedict['CNTR_ID'] not in ['CA', 'MX']: continue else: if shapedict['CNTR_ID'] in top_countries: continue poly = MplPolygon(state,facecolor='gray',edgecolor='gray') ax.add_patch(poly) #plt.title('term: ' + word ) plt.tight_layout() filename = '{}{}_{}.pdf'.format(map_dir, word.encode('utf-8'), grid_interpolation_method) plt.savefig(filename, bbox_inches='tight') plt.close() del m