def create_raster_worldfile(self, path, xy_range=None): from globalmaptiles import GlobalMercator x_y = xy_range or self.xy_range im = Image.open(path) gw_path = ''.join(os.path.split(path)[-1].split('.')[:-1]) world_file_path = os.path.join( os.path.curdir, os.path.join(self.output_dir, "%s.jgw" % gw_path)) with open(world_file_path, 'w') as world: min_y, min_x = num2deg(x_y['xMin'], x_y['yMax'] + 1, self.zoom) max_y, max_x = num2deg(x_y['xMax'] + 1, x_y['yMin'], self.zoom) gm = GlobalMercator() min_x, min_y = gm.LatLonToMeters(min_y, min_x) max_x, max_y = gm.LatLonToMeters(max_y, max_x) x_pixel_size = (max_x - min_x) / im.size[0] y_pixel_size = (max_y - min_y) / im.size[1] world.write(b"%f\n" % x_pixel_size ) # pixel size in the x-direction in map units/pixel world.write(b"%f\n" % 0) # rotation about y-axis world.write(b"%f\n" % 0) # rotation about x-axis world.write( b"%f\n" % -(abs(y_pixel_size)) ) # pixel size in the y-direction in map units. Always negative world.write( b"%f\n" % min_x) # x-coordinate of the center of the upper left pixel world.write( b"%f\n" % max_y) # y-coordinate of the center of the upper left pixel
def GetGridID(Coord): lat=Coord[0]/1000 lon=Coord[1]/1000 tz=8 mercator = GlobalMercator() mx, my = mercator.LatLonToMeters( Coord[0]/1000.0, Coord[1]/1000.0 ) tx, ty = mercator.MetersToTile( mx, my, tz ) gx, gy = mercator.GoogleTile(tx, ty, tz) #print "\tGoogle:", gx, gy #print tx, ty return ("%03d" % gx)+("%03d" % gy)
def main(): merc = GlobalMercator() file = open('pts1990.csv', 'rb') reader = csv.DictReader(file, delimiter=',') print "x,y,quad,category" for row in reader: lat = float(row['lat']) long = float(row['long']) x, y = merc.LatLonToMeters(lat, long) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category # Export data to the database file print "{},{},{},{}".format(x, y, quadkey, row['group'])
def main(shapes_file_list, db_file, groups): field_ids = {} # Create a GlobalMercator object for later conversions merc = GlobalMercator() # Set-up the output db conn = sqlite3.connect(db_file) c = conn.cursor() #c.execute("drop table if exists people_by_group") c.execute( "create table if not exists people_by_group (x real, y real, quadkey text, rand real, group_type text)" ) c.execute("drop index if exists i_quadkey") # Open the shapefiles for input_filename in shapes_file_list: print "Processing file {0}".format(input_filename) ds = ogr.Open(input_filename) if ds is None: print "Open failed.\n" sys.exit(1) # Obtain the first (and only) layer in the shapefile lyr = ds.GetLayerByIndex(0) lyr.ResetReading() # Obtain the field definitions in the shapefile layer feat_defn = lyr.GetLayerDefn() field_defns = [ feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount()) ] # Set up a coordinate transformation to latlon wgs84 = osr.SpatialReference() wgs84.SetWellKnownGeogCS("WGS84") sr = lyr.GetSpatialRef() xformer = osr.CoordinateTransformation(sr, wgs84) # Obtain the index of the group fields for i, defn in enumerate(field_defns): if defn.GetName() in groups: field_ids[defn.GetName()] = i # Obtain the number of features (Census Blocks) in the layer n_features = len(lyr) # Iterate through every feature (Census Block Ploygon) in the layer, # obtain the population counts, and create a point for each person within # that feature. start_time = time.time() for j, feat in enumerate(lyr): # Print a progress read-out for every 1000 features and export to hard disk if j % 1000 == 0: conn.commit() perc_complete = (j + 1) / float(n_features) time_left = (1 - perc_complete) * ( (time.time() - start_time) / perc_complete) print "%s/%s (%0.2f%%) est. time remaining %0.2f mins" % ( j + 1, n_features, 100 * perc_complete, time_left / 60) # Obtain total population, racial counts, and state fips code of the individual census block counts = {} for f in field_ids: val = feat.GetField(field_ids[f]) if val: counts[f] = int(val) else: counts[f] = 0 # Obtain the OGR polygon object from the feature geom = feat.GetGeometryRef() if geom is None: continue # Convert the OGR Polygon into a Shapely Polygon poly = loads(geom.ExportToWkb()) if poly is None: continue # Obtain the "boundary box" of extreme points of the polygon bbox = poly.bounds if not bbox: continue leftmost, bottommost, rightmost, topmost = bbox # Generate a point object within the census block for every person by race for f in field_ids: for i in range(counts[f]): # Choose a random longitude and latitude within the boundary box # and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference try: # In general we don't know the coordinate system of input data # so transform it to latlon lon, lat, z = xformer.TransformPoint( samplepoint.x, samplepoint.y) x, y = merc.LatLonToMeters(lat, lon) except: print "Failed to convert ", lat, lon sys.exit(-1) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category group_type = f # Export data to the database file try: c.execute( "insert into people_by_group values (?,?,?,random(),?)", (x, y, quadkey, group_type)) except: print "Failed to insert ", x, y, tx, ty, group_type sys.exit(-1) c.execute( "create index if not exists i_quadkey on people_by_group(x, y, quadkey, rand, group_type)" ) conn.commit()
def main(input_filename, output_filename): # Create a GlobalMercator object for later conversions merc = GlobalMercator() # Open the shapefile ds = ogr.Open(input_filename) if ds is None: print "Open failed.\n" sys.exit(1) # Obtain the first (and only) layer in the shapefile lyr = ds.GetLayerByIndex(0) lyr.ResetReading() # Obtain the field definitions in the shapefile layer feat_defn = lyr.GetLayerDefn() field_defns = [ feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount()) ] # Obtain the index of the field for the count for whites, blacks, Asians, # Others, and Hispanics. for i, defn in enumerate(field_defns): if defn.GetName() == "POP10": pop_field = i if defn.GetName() == "nh_white_n": white_field = i if defn.GetName() == "nh_black_n": black_field = i if defn.GetName() == "nh_asian_n": asian_field = i if defn.GetName() == "hispanic_n": hispanic_field = i if defn.GetName() == "NH_Other_n": other_field = i if defn.GetName() == "STATEFP10": statefips_field = i # Set-up the output file conn = sqlite3.connect(output_filename) c = conn.cursor() c.execute( "create table if not exists people_by_race (statefips text, x text, y text, quadkey text, race_type text)" ) # Obtain the number of features (Census Blocks) in the layer n_features = len(lyr) # Iterate through every feature (Census Block Ploygon) in the layer, # obtain the population counts, and create a point for each person within # that feature. for j, feat in enumerate(lyr): # Print a progress read-out for every 1000 features and export to hard disk if j % 1000 == 0: conn.commit() print "%s/%s (%0.2f%%)" % (j + 1, n_features, 100 * ((j + 1) / float(n_features))) # Obtain total population, racial counts, and state fips code of the individual census block pop = int(feat.GetField(pop_field)) white = int(feat.GetField(white_field)) black = int(feat.GetField(black_field)) asian = int(feat.GetField(asian_field)) hispanic = int(feat.GetField(hispanic_field)) other = int(feat.GetField(other_field)) statefips = feat.GetField(statefips_field) # Obtain the OGR polygon object from the feature geom = feat.GetGeometryRef() if geom is None: continue # Convert the OGR Polygon into a Shapely Polygon poly = loads(geom.ExportToWkb()) if poly is None: continue # Obtain the "boundary box" of extreme points of the polygon bbox = poly.bounds if not bbox: continue leftmost, bottommost, rightmost, topmost = bbox # Generate a point object within the census block for every person by race for i in range(white): # Choose a random longitude and latitude within the boundary box # and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'w' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(black): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'b' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(asian): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'a' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(hispanic): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'h' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(other): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'o' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) conn.commit()
if zoomlevel == None or lat == None or lon == None: Usage("ERROR: Specify at least 'zoomlevel', 'lat' and 'lon'.") if latmax is not None and lonmax is None: Usage("ERROR: Both 'latmax' and 'lonmax' must be given.") if latmax != None and lonmax != None: if latmax < lat: Usage("ERROR: 'latmax' must be bigger then 'lat'") if lonmax < lon: Usage("ERROR: 'lonmax' must be bigger then 'lon'") boundingbox = (lon, lat, lonmax, latmax) tz = zoomlevel mercator = GlobalMercator() mx, my = mercator.LatLonToMeters(lat, lon) print "Spherical Mercator (ESPG:900913) coordinates for lat/lon: " print(mx, my) tminx, tminy = mercator.MetersToTile(mx, my, tz) if boundingbox: mx, my = mercator.LatLonToMeters(latmax, lonmax) print "Spherical Mercator (ESPG:900913) cooridnate for maxlat/maxlon: " print(mx, my) tmaxx, tmaxy = mercator.MetersToTile(mx, my, tz) else: tmaxx, tmaxy = tminx, tminy for ty in range(tminy, tmaxy + 1): for tx in range(tminx, tmaxx + 1): tilefilename = "%s/%s/%s" % (tz, tx, ty)
def main(input_filename, wac_filename, output_filename): wac = pd.io.parsers.read_csv(wac_filename) wac.set_index(wac['w_geocode'],inplace = True) #Create columns for four megasectors wac['makers'] = wac['CNS01']+wac['CNS02']+wac['CNS03']+wac['CNS04']+wac['CNS05']+wac['CNS06']+wac['CNS08'] wac['services'] = wac['CNS07']+wac['CNS14'] + wac['CNS17'] + wac['CNS18'] wac['professions'] = wac['CNS09'] + wac['CNS10'] + wac['CNS11'] + wac['CNS12'] + wac['CNS13'] wac['support'] = wac['CNS15'] + wac['CNS16'] + wac['CNS19'] + wac['CNS20'] assert sum(wac['C000'] -(wac['makers']+wac['services']+wac['professions']+wac['support'])) == 0 or rw[1]['abbrev'] == 'ny' #In NY there's one block in Brooklyn with 177000 jobs. It appears to be rounding entries > 100k, which is making the assertion fail. #This is the Brooklyn Post Office + Brooklyn Law School + Borough Hall. So maybe weirdness around post office? #Set up outfile as csv outf = open(output_filename,'w') outf.write('x,y,sect,inctype,quadkey\n') # Create a GlobalMercator object for later conversions merc = GlobalMercator() # Open the shapefile ds = ogr.Open(input_filename) if ds is None: print "Open failed.\n" sys.exit( 1 ) # Obtain the first (and only) layer in the shapefile lyr = ds.GetLayerByIndex(0) lyr.ResetReading() # Obtain the field definitions in the shapefile layer feat_defn = lyr.GetLayerDefn() field_defns = [feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())] # Obtain the index of the field for the count for whites, blacks, Asians, # Others, and Hispanics. for i, defn in enumerate(field_defns): print defn.GetName() #GEOID is what we want to merge on if defn.GetName() == "GEOID10": fips = i # Set-up the output file #conn = sqlite3.connect( output_filename ) #c = conn.cursor() #c.execute( "create table if not exists people_by_race (statefips text, x text, y text, quadkey text, race_type text)" ) # Obtain the number of features (Census Blocks) in the layer n_features = len(lyr) # Iterate through every feature (Census Block Ploygon) in the layer, # obtain the population counts, and create a point for each person within # that feature. for j, feat in enumerate( lyr ): # Print a progress read-out for every 1000 features and export to hard disk if j % 1000 == 0: #conn.commit() print "%s/%s (%0.2f%%)"%(j+1,n_features,100*((j+1)/float(n_features))) # Obtain total population, racial counts, and state fips code of the individual census block blkfips = int(feat.GetField(fips)) try: jobs = {'m':wac.loc[blkfips,'makers'],'s':wac.loc[blkfips,'services'],'p':wac.loc[blkfips,'professions'],'t':wac.loc[blkfips,'support']} except KeyError: #print "no" # missing.append(blkfips) #Missing just means no jobs there. Lots of blocks have this. continue income = {'l':wac.loc[blkfips,'CE01'],'m':wac.loc[blkfips,'CE02'],'h':wac.loc[blkfips,'CE03']} # Obtain the OGR polygon object from the feature geom = feat.GetGeometryRef() if geom is None: continue # Convert the OGR Polygon into a Shapely Polygon poly = loads(geom.ExportToWkb()) if poly is None: continue # Obtain the "boundary box" of extreme points of the polygon bbox = poly.bounds if not bbox: continue leftmost,bottommost,rightmost,topmost = bbox # Generate a point object within the census block for every person by race inccnt = 0 incord = ['l','m','h'] shuffle(incord) for sect in ['m','s','p','t']: for i in range(int(jobs[sect])): # Choose a random longitude and latitude within the boundary box # and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost),uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break x, y = merc.LatLonToMeters(samplepoint.y,samplepoint.x) tx,ty = merc.MetersToTile(x, y, 21) #Determine the right income inccnt += 1 inctype = '' assert inccnt <= income[incord[0]] + income[incord[1]] + income[incord[2]] or rw[1]['abbrev'] == 'ny' if inccnt <= income[incord[0]]: inctype = incord[0] elif inccnt <= income[incord[0]] + income[incord[1]]: inctype = incord[1] elif inccnt <= income[incord[0]] + income[incord[1]] + income[incord[2]]: inctype = incord[2] # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) outf.write("%s,%s,%s,%s,%s\n" %(x,y,sect,inctype,quadkey)) # Convert the longitude and latitude coordinates to meters and # a tile reference outf.close()
sd_lat = lat.std() sd_lon = lon.std() print sd_lat, sd_lon # In[184]: from globalmaptiles import GlobalMercator # In[185]: merc = GlobalMercator() # In[186]: meanX, meanY = merc.LatLonToMeters(mean_lat, mean_lon) print meanX, meanY # In[187]: lat_sdMet = merc.LatLonToMeters(mean_lat + sd_lat, mean_lon) lon_sdMet = merc.LatLonToMeters(mean_lat, mean_lon + sd_lon) print lat_sdMet, lon_sdMet # In[188]: import scipy.spatial as sp # In[191]: lat_dist = sp.distance.euclidean([meanX, meanY], lat_sdMet) / 1000.0
class Downloader(object): ''' Based on http://www.wellho.net/solutions/python-python-threads-a-first-example.html ''' def __init__(self, mapdir, minzoom, maxzoom): self.mercator = GlobalMercator(256) self.minzoom = minzoom self.maxzoom = maxzoom self.TopRightLat = None self.TopRightLon = None self.BottomLeftLat = None self.BottomLeftLon = None self.mminx = None self.mminy = None self.mmaxx = None self.mmaxy = None self.mapdir = mapdir self.jobs = Queue.Queue() def download(self, toprightlat, toprightlon, bottomleftlat, bottomleftlon): self.TopRightLat = toprightlat self.TopRightLon = toprightlon self.BottomLeftLat = bottomleftlat self.BottomLeftLon = bottomleftlon self.mminx, self.mminy = self.mercator.LatLonToMeters( toprightlat, toprightlon) self.mmaxx, self.mmaxy = self.mercator.LatLonToMeters( bottomleftlat, bottomleftlon) map(self.addJobForZoom, range(self.minzoom, self.maxzoom + 1)) self.runJobs() def addJobForZoom(self, zoom): tminx, tminy = self.mercator.MetersToTile(self.mminx, self.mminy, zoom) tmaxx, tmaxy = self.mercator.MetersToTile(self.mmaxx, self.mmaxy, zoom) if tminx > tmaxx: tminx, tmaxx = tmaxx, tminx if tminy > tmaxy: tminy, tmaxy = tmaxy, tminy for tx in range(tminx, tmaxx + 1): for ty in range(tminy, tmaxy + 1): gx, gy = self.mercator.GoogleTile(tx, ty, zoom) self.jobs.put({'x': gx, 'y': gy, 'z': zoom}) def runJobs(self): workers = [] for threadNum in range(0, MAX_THREADS): subdownloader = self.SubDownloader(self) workers.append(subdownloader) workers[-1].start() for worker in workers: worker.join(20) print "Finished!" class SubDownloader(Thread): def __init__(self, parent): Thread.__init__(self) self.parent = parent def run(self): while 1: try: job = self.parent.jobs.get(0) except Queue.Empty: return mt = random.randrange(0, 4) filename = '%i/gm_%i_%i_%i.png' % (job['z'], job['x'], job['y'], job['z']) if os.path.isfile('%s%s' % (self.parent.mapdir, filename)): # print "skippnig", filename, "left:", self.parent.jobs.qsize() continue if not os.path.isdir('%s%s' % (self.parent.mapdir, job['z'])): os.mkdir('%s%s' % (self.parent.mapdir, job['z'])) # http://mt1.google.com/vt/lyrs=m@115&hl=en&x=39141&s=&y=26445&z=16&s=Gali url = 'http://mt%i.google.com/vt/lyrs=m@115&hl=en&x=%i&y=%i&z=%i&s=' % ( mt, job['x'], job['y'], job['z']) try: tile = urllib2.urlopen(url=url, timeout=20).read() except: # print "Can't open", url, "left:", self.parent.jobs.qsize() continue fh = open(filename, 'wb') fh.write(tile) fh.close()
def pdfer(data, page_size=PAGE_SIZES['letter'], output='pdf'): shape_overlays = data.get('shape_overlays') point_overlays = data.get('point_overlays') grid = {'zoom': data.get('zoom')} center_lon, center_lat = data['center'] center_tile_x, center_tile_y = tileXY(float(center_lat), float(center_lon), int(data['zoom'])) dim_across, dim_up = data['dimensions'] if dim_across > dim_up: page_height, page_width, tiles_up, tiles_across = page_size else: page_width, page_height, tiles_across, tiles_up = page_size min_tile_x = center_tile_x - (tiles_across / 2) min_tile_y = center_tile_y - (tiles_up / 2) max_tile_x = min_tile_x + tiles_across max_tile_y = min_tile_y + tiles_up # Get base layer tiles base_pattern = 'http://d.tile.stamen.com/toner/{z}/{x}/{y}.png' if data.get('base_tiles'): base_pattern = data['base_tiles'] base_links = generateLinks(base_pattern, grid['zoom'], min_tile_x, min_tile_y, max_tile_x, max_tile_y) base_names = dl_write_all(base_links, 'base') # Get overlay tiles overlay_pattern = None if data.get('overlay_tiles'): overlay_pattern = data['overlay_tiles'] overlay_links = generateLinks(overlay_pattern, grid['zoom'], min_tile_x, min_tile_y, max_tile_x, max_tile_y) overlay_names = dl_write_all(overlay_links, 'overlay') now = datetime.now() date_string = datetime.strftime(now, '%Y-%m-%d_%H-%M-%S') outp_name = os.path.join('/tmp', '{0}.png'.format(date_string)) base_image_names = ['-'.join(l.split('/')[-3:]) for l in base_names] base_image_names = sorted([i.split('-')[-3:] for i in base_image_names], key=itemgetter(1)) for parts in base_image_names: z, x, y = parts y = y.rstrip('.png').rstrip('.jpg') z = z.rsplit('_', 1)[1] key = '-'.join([z, x, y]) grid[key] = {'bbox': tileEdges(float(x), float(y), int(z))} keys = sorted(grid.keys()) mercator = GlobalMercator() bb_poly = None bmin_rx = None bmin_ry = None if shape_overlays or point_overlays: polys = [] for k, v in grid.items(): try: one, two, three, four = grid[k]['bbox'] polys.append(box(two, one, four, three)) except TypeError: pass mpoly = MultiPolygon(polys) bb_poly = box(*mpoly.bounds) min_key = keys[0] max_key = keys[-2] bminx, bminy = grid[min_key]['bbox'][0], grid[min_key]['bbox'][1] bmaxx, bmaxy = grid[max_key]['bbox'][2], grid[max_key]['bbox'][3] bmin_mx, bmin_my = mercator.LatLonToMeters(bminx, bminy) bmax_mx, bmax_my = mercator.LatLonToMeters(bmaxx, bmaxy) bmin_px, bmin_py = mercator.MetersToPixels(bmin_mx, bmin_my, float(grid['zoom'])) bmax_px, bmax_py = mercator.MetersToPixels(bmax_mx, bmax_my, float(grid['zoom'])) bmin_rx, bmin_ry = mercator.PixelsToRaster(bmin_px, bmin_py, int(grid['zoom'])) if shape_overlays: all_polys = [] for shape_overlay in shape_overlays: shape_overlay = json.loads(shape_overlay) if shape_overlay.get('geometry'): shape_overlay = shape_overlay['geometry'] coords = shape_overlay['coordinates'][0] all_polys.append(Polygon(coords)) mpoly = MultiPolygon(all_polys) one, two, three, four, five = list( box(*mpoly.bounds).exterior.coords) left, right = LineString([one, two]), LineString([three, four]) top, bottom = LineString([two, three]), LineString([four, five]) left_to_right = left.distance(right) top_to_bottom = top.distance(bottom) if left_to_right > top_to_bottom: page_height, page_width, _, _ = page_size else: page_width, page_height, _, _ = page_size center_lon, center_lat = list(mpoly.centroid.coords)[0] if point_overlays: all_points = [] for point_overlay in point_overlays: point_overlay = json.loads(point_overlay) for p in point_overlay['points']: if p[0] and p[1]: all_points.append(p) mpoint = MultiPoint(all_points) center_lon, center_lat = list(mpoint.centroid.coords)[0] one, two, three, four, five = list( box(*mpoint.bounds).exterior.coords) left, right = LineString([one, two]), LineString([three, four]) top, bottom = LineString([two, three]), LineString([four, five]) left_to_right = left.distance(right) top_to_bottom = top.distance(bottom) if left_to_right > top_to_bottom: page_height, page_width, _, _ = page_size else: page_width, page_height, _, _ = page_size center_lon, center_lat = list(mpoint.centroid.coords)[0] print(center_lon, center_lat) arrays = [] for k, g in groupby(base_image_names, key=itemgetter(1)): images = list(g) fnames = ['/tmp/%s' % ('-'.join(f)) for f in images] array = [] for img in fnames: i = cv2.imread(img, -1) if isinstance(i, type(None)): i = np.zeros((256, 256, 4), np.uint8) elif i.shape[2] != 4: i = cv2.cvtColor(cv2.imread(img), cv2.COLOR_BGR2BGRA) array.append(i) arrays.append(np.vstack(array)) outp = np.hstack(arrays) cv2.imwrite(outp_name, outp) if overlay_pattern: overlay_outp_name = os.path.join('/tmp', 'overlay_{0}.png'.format(date_string)) overlay_image_names = [ '-'.join(l.split('/')[-3:]) for l in overlay_names ] overlay_image_names = sorted( [i.split('-')[-3:] for i in overlay_image_names], key=itemgetter(1)) arrays = [] for k, g in groupby(overlay_image_names, key=itemgetter(1)): images = list(g) fnames = ['/tmp/%s' % ('-'.join(f)) for f in images] array = [] for img in fnames: i = cv2.imread(img, -1) if isinstance(i, type(None)): i = np.zeros((256, 256, 4), np.uint8) elif i.shape[2] != 4: i = cv2.cvtColor(cv2.imread(img), cv2.COLOR_BGR2BGRA) array.append(i) arrays.append(np.vstack(array)) nuked = [os.remove(f) for f in fnames] outp = np.hstack(arrays) cv2.imwrite(overlay_outp_name, outp) base = cv2.imread(outp_name, -1) overlay = cv2.imread(overlay_outp_name, -1) overlay_g = cv2.cvtColor(overlay, cv2.COLOR_BGR2GRAY) ret, mask = cv2.threshold(overlay_g, 10, 255, cv2.THRESH_BINARY) inverted = cv2.bitwise_not(mask) overlay = cv2.bitwise_not(overlay, overlay, mask=inverted) base_alpha = 0.55 overlay_alpha = 1 for channel in range(3): x, y, d = overlay.shape base[:,:,channel] = (base[:,:,channel] * base_alpha + \ overlay[:,:,channel] * overlay_alpha * \ (1 - base_alpha)) / \ (base_alpha + overlay_alpha * (1 - base_alpha)) cv2.imwrite(outp_name, base) ########################################################################### # Code below here is for drawing vector layers within the PDF # # Leaving it in just because it was a pain to come up with the first time # ########################################################################### if shape_overlays or point_overlays: im = cairo.ImageSurface.create_from_png(outp_name) ctx = cairo.Context(im) if shape_overlays: for shape_overlay in shape_overlays: shape_overlay = json.loads(shape_overlay) if shape_overlay.get('geometry'): shape_overlay = shape_overlay['geometry'] color = hex_to_rgb('#f06eaa') coords = shape_overlay['coordinates'][0] x, y = get_pixel_coords(coords[0], grid['zoom'], bmin_rx, bmin_ry) ctx.move_to(x, y) ctx.set_line_width(4.0) red, green, blue = [float(c) for c in color] ctx.set_source_rgba(red / 255, green / 255, blue / 255, 0.3) for p in coords[1:]: x, y = get_pixel_coords(p, grid['zoom'], bmin_rx, bmin_ry) ctx.line_to(x, y) ctx.close_path() ctx.fill() ctx.set_source_rgba(red / 255, green / 255, blue / 255, 0.5) for p in coords[1:]: x, y = get_pixel_coords(p, grid['zoom'], bmin_rx, bmin_ry) ctx.line_to(x, y) ctx.close_path() ctx.stroke() ctx.set_line_width(2.0) if point_overlays: for point_overlay in point_overlays: point_overlay = json.loads(point_overlay) color = hex_to_rgb(point_overlay['color']) for p in point_overlay['points']: if p[0] and p[1]: pt = Point((float(p[0]), float(p[1]))) if bb_poly.contains(pt): nx, ny = get_pixel_coords(p, grid['zoom'], bmin_rx, bmin_ry) red, green, blue = [float(c) for c in color] ctx.set_source_rgba(red / 255, green / 255, blue / 255, 0.6) ctx.arc( nx, ny, 5.0, 0, 50) # args: center-x, center-y, radius, ?, ? ctx.fill() ctx.arc(nx, ny, 5.0, 0, 50) ctx.stroke() im.write_to_png(outp_name) scale = 1 # Crop image from center center_point_x, center_point_y = latlon2xy(float(center_lat), float(center_lon), float(data['zoom'])) offset_x = (center_point_x - float(center_tile_x)) + 50 offset_y = (center_point_y - float(center_tile_y)) - 50 outp_image = cv2.imread(outp_name, -1) pixels_up, pixels_across, channels = outp_image.shape center_x, center_y = (pixels_across / 2) + offset_x, (pixels_up / 2) + offset_y start_y, end_y = center_y - (page_height / 2), center_y + (page_height / 2) start_x, end_x = center_x - (page_width / 2), center_x + (page_width / 2) cv2.imwrite(outp_name, outp_image[start_y:end_y, start_x:end_x]) if output == 'pdf': outp_file_name = outp_name.rstrip('.png') + '.pdf' pdf = cairo.PDFSurface(outp_file_name, page_width, page_height) ctx = cairo.Context(pdf) image = cairo.ImageSurface.create_from_png(outp_name) ctx.set_source_surface(image) ctx.paint() pdf.finish() elif output == 'jpeg': outp_file_name = outp_name.rstrip('.png') + '.jpg' jpeg = cv2.cvtColor(cv2.imread(outp_name, -1), cv2.COLOR_RGBA2RGB) cv2.imwrite(outp_file_name, jpeg) return outp_file_name
zoom = 7 geographic = {'lat': 52.31, 'lon': 13.24} meters = {'mx': 1473870.058102942, 'my': 6856372.69101939} pixels = {'px': 17589.134222222223, 'py': 21990.22649522623} tile = {'tx': 68, 'ty': 85} googleTile = {'tx': 68, 'ty': 42} tileBounds = { 'minx': 1252344.271424327, 'miny': 6574807.42497772, 'maxx': 1565430.3392804079, 'maxy': 6887893.492833804 } quadKey = "1202120" result = gm.LatLonToMeters(geographic['lat'], geographic['lon']) print(result) result = gm.MetersToLatLon(meters['mx'], meters['my']) print(result) result = gm.MetersToPixels(meters['mx'], meters['my'], zoom) print(result) result = gm.PixelsToTile(pixels['px'], pixels['py']) print(result) result = gm.PixelsToMeters(pixels['px'], pixels['py'], zoom) print(result) result = gm.TileBounds(tile['tx'], tile['ty'], zoom)
class TileGrid(object): #def __init__(self, parentgeometry, bearing = 0.0, zoomlevel = 16, lat = dec.Decimal('32.829608'), lon = dec.Decimal('35.080498')): #def __init__(self, parentgeometry, bearing = 0.0, zoomlevel = 16, lat = dec.Decimal('32.330347'), lon = dec.Decimal('34.851395')): def __init__(self, bearing=0.0, zoomlevel=16, lat=decimal.Decimal('32.018300'), lon=decimal.Decimal('34.898161'), parent=None): #set initial values self.parent = parent self.bearingSensitivity = decimal.Decimal('0.00001') self.bearing = bearing self.zoomlevel = zoomlevel self.lat = lat self.lon = lon self.gx, self.gy = None, None self.velocity = 0.0 self.sysPath = os.path.join(sys.path[0], "") self.mapPath = self.sysPath self.maxZoomLevel = 16 self.destlat = decimal.Decimal('32.776250') self.destlon = decimal.Decimal('35.028946') self.distance = 0 self.setBounds(parent.geometry().width(), parent.geometry().height()) self.halfboundx = math.ceil(self.boundx / 2) self.halfboundy = math.ceil(self.boundy / 2) #make GlobalMercator instance self.mercator = GlobalMercator() # create pathways self.refresh() def setMapPath(self, path): if path != "": self.mapPath = path + "/" print self.mapPath def setBounds(self, newx, newy): self.boundx, self.boundy = newx, newy # adding 16px to halfbounds height and width to display icons self.halfboundx = int(math.ceil(self.boundx / 2)) + 31 self.halfboundy = int(math.ceil(self.boundy / 2)) + 31 def getOffset(self): 'get pixel offset of coordinates from 0,0 of tile' offpx = self.px - self.tx * self.mercator.tileSize #the y pixel coordinate system begins from top offpy = self.mercator.tileSize - (self.py - self.ty * self.mercator.tileSize) return offpx, offpy def moveTo(self, lat, lon, calculateBearing=True): 'move position to lat, lon and update all properties' #update bearing from previous position if calculateBearing: if abs(lon - self.lon) > self.bearingSensitivity \ or abs(lat - self.lat) > self.bearingSensitivity: self.bearing = math.degrees( math.atan2(lon - self.lon, lat - self.lat)) if self.bearing < 0: self.bearing += 360 self.lat = lat self.lon = lon #get meters from lat/lon mx, my = self.mercator.LatLonToMeters(float(self.lat), float(self.lon)) #dx, dy = self.mercator.LatLonToMeters( float(self.destlat), float(self.destlon) ) #self.distance = math.sqrt(math.pow(mx-dx, 2) + math.pow(my-dy, 2 )) #get pixels from meters self.px, self.py = self.mercator.MetersToPixels(mx, my, self.zoomlevel) #get tile from pixels self.tx, self.ty = self.mercator.PixelsToTile(int(self.px), int(self.py)) #get google tile self.gx, self.gy = self.mercator.GoogleTile(self.tx, self.ty, self.zoomlevel) #update offset of tile self.offpx, self.offpy = self.getOffset() #TODO: calculate loadRect bounds for peripherial tiles self.sizex = int(math.ceil(self.boundx / self.mercator.tileSize)) + 1 self.sizey = int(math.ceil(self.boundy / self.mercator.tileSize)) + 1 halfdeltax = int(math.ceil(self.sizex / 2)) + 1 halfdeltay = int(math.ceil(self.sizey / 2)) + 1 self.images = set() offtilex = halfdeltax * self.mercator.tileSize + self.offpx for x in range(self.gx - halfdeltax, self.gx + halfdeltax + 1): offtiley = halfdeltay * self.mercator.tileSize + self.offpy for y in range(self.gy - halfdeltay, self.gy + halfdeltay + 1): fname = "%i/gm_%i_%i_%i.png" % (self.zoomlevel, x, y, self.zoomlevel) if not QtCore.QFile.exists(self.mapPath + fname): fname = "404.png" self.images.add((QtCore.QPointF(-offtilex, -offtiley), fname)) offtiley -= self.mercator.tileSize offtilex -= self.mercator.tileSize # print fname self.bounds = { 'TL': (self.px - self.halfboundx, self.py - self.halfboundy), 'TR': (self.px + self.halfboundx, self.py - self.halfboundy), 'BR': (self.px + self.halfboundx, self.py + self.halfboundy), 'BL': (self.px - self.halfboundx, self.py + self.halfboundy) } self.pathways = QtGui.QPolygonF() # create waypoints icons self.visible_waypoints = set() for wp in self.waypoints_pixels: x = wp[0] - self.px y = self.py - wp[1] self.pathways.append(QtCore.QPointF(x, y)) if self.isVisible(wp): self.visible_waypoints.add(QtCore.QPointF(x - 15, y - 32)) # print self.waypoints_pixels def isVisible(self, wp): return wp[0] > self.bounds['TL'][0] and \ wp[0] < self.bounds['TR'][0] and \ wp[1] > self.bounds['TL'][1] and \ wp[1] < self.bounds['BL'][1] def refresh(self): self.waypoints_pixels = set() for wp in self.parent.waypoint: wpm = self.mercator.LatLonToMeters(wp[0], wp[1]) wppx = self.mercator.MetersToPixels(wpm[0], wpm[1], self.zoomlevel) self.waypoints_pixels.add(wppx) self.moveTo(self.lat, self.lon) def setZoom(self, zoom): self.zoomlevel = zoom self.refresh() def zoomIn(self): if self.zoomlevel < self.maxZoomLevel: self.zoomlevel += 1 self.refresh() def zoomOut(self): if self.zoomlevel > 0: self.zoomlevel -= 1 self.refresh() def setBearing(self, bear): self.bearing = bear def setVelocity(self, vel): self.velocity = vel
class OsmHandler(ContentHandler): """Base class for parsing OSM XML data""" def __init__(self, client): self.proj = GlobalMercator() self.nodeRecords = [] self.wayRecords = [] self.relationRecords = [] self.record = {} self.nodeLocations = {} self.client = client self.stats = {'nodes': 0, 'ways': 0, 'relations': 0} self.lastStatString = "" self.statsCount = 0 def writeStatsToScreen(self): for char in self.lastStatString: sys.stdout.write('\b') self.lastStatString = "%d nodes, %d ways, %d relations" % ( self.stats['nodes'], self.stats['ways'], self.stats['relations']) sys.stdout.write(self.lastStatString) def fillDefault(self, attrs): """Fill in default record values""" self.record['_id'] = int(attrs['id']) self.record['ts'] = self.isoToTimestamp(attrs['timestamp']) self.record['tg'] = [] if attrs.has_key('user'): self.record['u'] = attrs['user'] if attrs.has_key('uid'): self.record['uid'] = int(attrs['uid']) if attrs.has_key('version'): self.record['v'] = int(attrs['version']) if attrs.has_key('changeset'): self.record['c'] = int(attrs['changeset']) def isoToTimestamp(self, isotime): """Parse a date and return a time tuple""" t = datetime.strptime(isotime, "%Y-%m-%dT%H:%M:%SZ") return time.mktime(t.timetuple()) def quadKey(self, lat, lon, zoom): (mx, my) = self.proj.LatLonToMeters(lat, lon) (tx, ty) = self.proj.MetersToTile(mx, my, zoom) return self.proj.QuadTree(tx, ty, zoom) def startElement(self, name, attrs): """Parse the XML element at the start""" if name == 'node': self.fillDefault(attrs) self.record['loc'] = { 'lat': float(attrs['lat']), 'lon': float(attrs['lon']) } self.record['qk'] = self.quadKey(float(attrs['lat']), float(attrs['lon']), 17) self.nodeLocations[self.record['_id']] = self.record['qk'] elif name == 'changeset': self.fillDefault(attrs) elif name == 'tag': k = attrs['k'] v = attrs['v'] # MongoDB doesn't let us have dots in the key names. #k = k.replace('.', ',,') self.record['tg'].append((k, v)) elif name == 'way': self.fillDefault(attrs) self.record['n'] = [] self.record['loc'] = [] elif name == 'relation': self.fillDefault(attrs) self.record['m'] = [] elif name == 'nd': ref = int(attrs['ref']) self.record['n'].append(ref) refLoc = self.nodeLocations[ref] if refLoc not in self.record['loc']: self.record['loc'].append(refLoc) elif name == 'member': ref = int(attrs['ref']) member = {'type': attrs['type'], 'ref': ref, 'role': attrs['role']} self.record['m'].append(member) if attrs['type'] == 'way': ways2relations = self.client.osm.ways.find_one({'_id': ref}) if ways2relations: if 'relations' not in ways2relations: ways2relations['relations'] = [] ways2relations['relations'].append(self.record['_id']) self.client.osm.ways.save(ways2relations) elif attrs['type'] == 'node': nodes2relations = self.client.osm.nodes.find_one({'_id': ref}) if nodes2relations: if 'relations' not in nodes2relations: nodes2relations['relations'] = [] nodes2relations['relations'].append(self.record['_id']) self.client.osm.nodes.save(nodes2relations) def endElement(self, name): """Finish parsing an element (only really used with nodes, ways and relations)""" if name == 'node': self.nodeRecords.append(self.record) if len(self.nodeRecords) > 1500: self.client.osm.nodes.insert(self.nodeRecords) self.nodeRecords = [] self.writeStatsToScreen() self.record = {} self.stats['nodes'] = self.stats['nodes'] + 1 elif name == 'way': # Clean up any existing nodes if len(self.nodeRecords) > 0: self.client.osm.nodes.insert(self.nodeRecords) self.nodeRecords = [] self.wayRecords.append(self.record) if len(self.wayRecords) > 100: self.client.osm.ways.insert(self.wayRecords) self.wayRecords = [] self.writeStatsToScreen() self.record = {} self.stats['ways'] = self.stats['ways'] + 1 elif name == 'relation': self.client.osm.relations.save(self.record) self.record = {} self.statsCount = self.statsCount + 1 if self.statsCount > 10: self.writeStatsToScreen() self.statsCount = 0 self.stats['relations'] = self.stats['relations'] + 1