def __init__(self, bearing=0.0, zoomlevel=16, lat=decimal.Decimal('32.018300'), lon=decimal.Decimal('34.898161'), parent=None): #set initial values self.parent = parent self.bearingSensitivity = decimal.Decimal('0.00001') self.bearing = bearing self.zoomlevel = zoomlevel self.lat = lat self.lon = lon self.gx, self.gy = None, None self.velocity = 0.0 self.sysPath = os.path.join(sys.path[0], "") self.mapPath = self.sysPath self.maxZoomLevel = 16 self.destlat = decimal.Decimal('32.776250') self.destlon = decimal.Decimal('35.028946') self.distance = 0 self.setBounds(parent.geometry().width(), parent.geometry().height()) self.halfboundx = math.ceil(self.boundx / 2) self.halfboundy = math.ceil(self.boundy / 2) #make GlobalMercator instance self.mercator = GlobalMercator() # create pathways self.refresh()
class OsmApi: def __init__(self): self.client = Connection(host="mongomaster") self.proj = GlobalMercator() def getTile(self, zoom, x, y): (x, y) = self.proj.GoogleTile(x, y, zoom) quadkey = self.proj.QuadTree(x, y, zoom) print "Querying for %s." % (quadkey, ) (minlat, minlon, maxlat, maxlon) = self.proj.TileLatLonBounds(x, y, zoom) # Nodes in the tile nodes = {} cursor = self.client.osm.nodes.find( {'qk': { '$regex': "^%s" % (quadkey, ) }}) for row in cursor: nodes[row['_id']] = row # Ways with nodes in the tile ways = {} cursor = self.client.osm.ways.find( {'loc': { '$regex': "^%s" % (quadkey, ) }}) for row in cursor: ways[row['_id']] = row # Nodes on ways that extend beyond the bounding box otherNids = set() for way in ways.values(): for nid in way['nodes']: otherNids.add(nid) cursor = self.client.osm.nodes.find({'_id': {'$in': list(otherNids)}}) for row in cursor: nodes[row['_id']] = row # Relations that contain any of the above as members relations = {} # Sort the results by id nodes = sorted(nodes.iteritems()) ways = sorted(ways.iteritems()) relations = sorted(relations.iteritems()) doc = { 'bounds': { 'minlat': minlat, 'minlon': minlon, 'maxlat': maxlat, 'maxlon': maxlon }, 'nodes': nodes, 'ways': ways, 'relations': relations } return doc
def create_raster_worldfile(self, path, xy_range=None): from globalmaptiles import GlobalMercator x_y = xy_range or self.xy_range im = Image.open(path) gw_path = ''.join(os.path.split(path)[-1].split('.')[:-1]) world_file_path = os.path.join( os.path.curdir, os.path.join(self.output_dir, "%s.jgw" % gw_path)) with open(world_file_path, 'w') as world: min_y, min_x = num2deg(x_y['xMin'], x_y['yMax'] + 1, self.zoom) max_y, max_x = num2deg(x_y['xMax'] + 1, x_y['yMin'], self.zoom) gm = GlobalMercator() min_x, min_y = gm.LatLonToMeters(min_y, min_x) max_x, max_y = gm.LatLonToMeters(max_y, max_x) x_pixel_size = (max_x - min_x) / im.size[0] y_pixel_size = (max_y - min_y) / im.size[1] world.write(b"%f\n" % x_pixel_size ) # pixel size in the x-direction in map units/pixel world.write(b"%f\n" % 0) # rotation about y-axis world.write(b"%f\n" % 0) # rotation about x-axis world.write( b"%f\n" % -(abs(y_pixel_size)) ) # pixel size in the y-direction in map units. Always negative world.write( b"%f\n" % min_x) # x-coordinate of the center of the upper left pixel world.write( b"%f\n" % max_y) # y-coordinate of the center of the upper left pixel
def main(tiles_path, db_file, groups, zoom_levels): merc = GlobalMercator() # Set-up the output db conn = sqlite3.connect(db_file) c = conn.cursor() for zoom in [zoom_levels]: #TODO zoom levels results_set = c.execute("select x, y, quadkey, group_type from people_by_group order by quadkey asc, rand asc" ) use_ellipse, radius_rel, gamma, os_scale = STYLE[zoom] radius = os_scale*radius_rel/4/2 quadkey = None img = None for i,r in enumerate(results_set): if (i % 1000 == 0): print i x = float(r[0]) y = float(r[1]) next_quadkey = r[2][:zoom] group = r[3] if next_quadkey != quadkey: #finish last tile if img: save_tile(img, tiles_path, zoom, gtx, gty) quadkey = next_quadkey tx, ty = merc.MetersToTile(x, y, zoom) gtx, gty = merc.GoogleTile(tx,ty,zoom) img = Image.new("RGB", (TILE_X*os_scale, TILE_Y*os_scale), "white") draw = ImageDraw.Draw(img) minx, miny, maxx, maxy = (c/A for c in merc.TileBounds(tx, ty, zoom)) xscale = (TILE_X*os_scale)/(maxx - minx) yscale = (TILE_Y*os_scale)/(maxy - miny) #print 'minx', minx, 'miny', miny, 'maxx', maxx, 'maxy', maxy #print 'xscale',xscale,'yscale',yscale #print 'x',x,'y',y,'tx',tx,'ty',ty # Translate coordinates to tile-relative, google ready coordinates rx = (x/A - minx)*xscale ry = (maxy - y/A)*yscale fill=ImageColor.getrgb(groups[group]['color']) if use_ellipse: draw.ellipse((rx-radius,ry-radius,rx+radius,ry+radius), fill=fill) else: draw.point((rx, ry), fill=fill) #print "Draw at ", (rx-radius,ry-radius,rx+radius,ry+radius), ImageColor.getrgb(groups[group]['color']) save_tile(img, tiles_path, zoom, gtx, gty) save_defined_tiles(tiles_path)
def __init__(self, min_lat, min_lon, max_lat, max_lon, width, max_zoom=18): self.tiles = [] self.min_lat = min_lat self.min_lon = min_lon self.max_lat = max_lat self.max_lon = max_lon self.mercator = GlobalMercator() self.downloader = Downloader() # count how many horizontal tiles we need self.x_tiles_needed = math.ceil(width / self.TILE_WIDTH) self.max_zoom = max_zoom
def __init__(self, client): self.proj = GlobalMercator() self.nodeRecords = [] self.wayRecords = [] self.relationRecords = [] self.record = {} self.nodeLocations = {} self.client = client self.stats = {'nodes': 0, 'ways': 0, 'relations': 0} self.lastStatString = "" self.statsCount = 0
def process_vectors_in_dir(self, rootdir): self.gm = GlobalMercator() num_images = self.count_rasters_in_dir(rootdir) * pow( self.tile_size / self.thumb_size, 2) print("num_images is {} in {}".format(num_images, rootdir)) labels = None if self.train_vector_tiles_dir == rootdir: self.train_labels = numpy.zeros(num_images * 2, dtype=numpy.float32) self.train_labels = self.train_labels.reshape(num_images, 2) labels = self.train_labels else: self.test_labels = numpy.zeros(num_images * 2, dtype=numpy.float32) self.test_labels = self.test_labels.reshape(num_images, 2) labels = self.test_labels index = 0 for folder, subs, files in os.walk(rootdir): for filename in files: if not filename.endswith('.json'): continue has_ways = False with open(os.path.join(folder, filename), 'r') as src: linestrings = self.linestrings_for_vector_tile(src) tile_matrix = self.empty_tile_matrix() tile = self.tile_for_folder_and_filename( folder, filename, rootdir) for linestring in linestrings: # check if tile has any linestrings to set it's one-hot tile_matrix = self.add_linestring_to_matrix( linestring, tile, tile_matrix) # self.print_matrix(tile_matrix) # print '\n\n\n' # Now set the one_hot value for this label for y in range(int(self.tile_size / self.thumb_size)): for x in range(int(self.tile_size / self.thumb_size)): for tmy in range(self.thumb_size): for tmx in range(self.thumb_size): if tile_matrix[tmx][tmy] == 1: has_ways = True if has_ways: labels[index][0] = 1 else: labels[index][1] = 1 index += 1
def __init__(self, mapdir, minzoom, maxzoom): self.mercator = GlobalMercator(256) self.minzoom = minzoom self.maxzoom = maxzoom self.TopRightLat = None self.TopRightLon = None self.BottomLeftLat = None self.BottomLeftLon = None self.mminx = None self.mminy = None self.mmaxx = None self.mmaxy = None self.mapdir = mapdir self.jobs = Queue.Queue()
def GetGridID(Coord): lat=Coord[0]/1000 lon=Coord[1]/1000 tz=8 mercator = GlobalMercator() mx, my = mercator.LatLonToMeters( Coord[0]/1000.0, Coord[1]/1000.0 ) tx, ty = mercator.MetersToTile( mx, my, tz ) gx, gy = mercator.GoogleTile(tx, ty, tz) #print "\tGoogle:", gx, gy #print tx, ty return ("%03d" % gx)+("%03d" % gy)
def __init__(self, renderer, cache_dir): super(CustomMapLayer, self).__init__(renderer) self.cache_dir = cache_dir self.mercator = GlobalMercator() self.tileloader = None if self.tiles is not None: map_envelope = self.m.envelope() # map_envelope is in mercator projection, convert it to # long/lat projection envelope = renderer.merc_to_lnglat(map_envelope) min_lon = envelope.minx min_lat = envelope.miny max_lon = envelope.maxx max_lat = envelope.maxy width = self.m.width indexing = self.tiles.get('indexing') max_zoom = self.tiles.get('maxZoom') if indexing == 'google': self.tileloader = GoogleTileLoader(min_lat, min_lon, max_lat, max_lon, width, max_zoom) elif indexing == 'tms': self.tileloader = TMSTileLoader(min_lat, min_lon, max_lat, max_lon, width, max_zoom) elif indexing == 'f': self.tileloader = FTileLoader(min_lat, min_lon, max_lat, max_lon, width, max_zoom)
def main(): merc = GlobalMercator() file = open('pts1990.csv', 'rb') reader = csv.DictReader(file, delimiter=',') print "x,y,quad,category" for row in reader: lat = float(row['lat']) long = float(row['long']) x, y = merc.LatLonToMeters(lat, long) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category # Export data to the database file print "{},{},{},{}".format(x, y, quadkey, row['group'])
def ImageryRequest(tileStr): tile = tileRequest(tileStr) z = tile.zoom x = tile.tx y = tile.ty downloadedTileList = os.listdir('DownloadedTiles/') tileFileName = str(z)+'.'+str(y)+'.'+str(x)+'.png' print(x,y,z) tilesize = 256 tx = tile.tx ty =tile.ty zoom = tile.zoom px = tx*tilesize py = ty*tilesize gm = GlobalMercator() mx1,my1 = gm.PixelsToMeters(px, py, zoom) mx2,my2 = gm.PixelsToMeters(px+tilesize, py+tilesize, zoom) print(mx1,-my2,mx2,-my1) os.system('rm Subset.TIF') os.system('gdalwarp -q -t_srs epsg:3857 -te '+str(mx1)+' '+str(-my2)+' '+str(mx2)+' '+str(-my1)+' -r Lanczos -ts 256 256 Warped.TIF Subset.TIF') #Open the image tileImage = Image.open('Subset.TIF') #Turn the image into a string buffer_image = StringIO() tileImage.save(buffer_image, 'png') buffer_image.seek(0) #Send the string return(send_file(buffer_image, mimetype='image/png'))
parser.add_option( '-f', '--format', action='store', dest='format', default='', help='tile image format', ) (options, args) = parser.parse_args() #parse the bounds boundsarr = options.bounds.split(';') lonarr = sorted([float(boundsarr[0]), float(boundsarr[2])]) latarr = sorted([float(boundsarr[1]), float(boundsarr[3])]) z = int(options.zoom) gm = GlobalMercator() #Convert bounds to meters mx0, my0 = gm.LatLonToMeters(latarr[0], lonarr[0]) mx1, my1 = gm.LatLonToMeters(latarr[1], lonarr[1]) #get TMS tile address range tx0, ty0 = gm.MetersToTile(mx0, my0, z) tx1, ty1 = gm.MetersToTile(mx1, my1, z) #sort the tile addresses low to high xarr = sorted([tx0, tx1]) yarr = sorted([ty0, ty1]) #figure out relevant extensions extension = "." + options.format #getExtension(options.template) wf = getWorldFileExtension(extension) #create the destination location using the z value root = options.destination + '/' + str(z) try:
class OSMDataNormalizer: def __init__(self, mapzen_key): self.mapzen_key = mapzen_key self.tile_size = 256 # the square size to chop the imagery up into for analysis self.thumb_size = 8 # select a random half of tiles for training self.train_vector_tiles_dir = self.make_directory( "data/train/vector-tiles", full_path=True) self.train_raster_tiles_dir = self.make_directory( "data/train/raster-tiles", full_path=True) # select a random half of tiles for testing self.test_vector_tiles_dir = self.make_directory( "data/test/vector-tiles", full_path=True) self.test_raster_tiles_dir = self.make_directory( "data/test/raster-tiles", full_path=True) # put even tiles in train, odd tiles in test self.download_count = 0 def make_directory(self, new_dir, full_path=False): ''' try to make a new directory ''' if full_path: path = '' for token in new_dir.split('/'): path += token + '/' try: os.mkdir(path) except: pass return path try: os.mkdir(new_dir) except: pass return new_dir def default_bounds_to_analyze(self): ''' analyze a small chunk around Yosemite Village, by default ''' yosemite_village_bb = BoundingBox() yosemite_village_bb.northeast.lat = 37.81385 yosemite_village_bb.northeast.lon = -119.48559 yosemite_village_bb.southwest.lat = 37.66724 yosemite_village_bb.southwest.lon = -119.72454 return yosemite_village_bb def default_zoom(self): ''' analyze tiles at TMS zoom level 14, by default ''' return 15 def default_vector_tile_base_url(self): ''' the default server to get vector data to train on ''' return 'http://vector.mapzen.com/osm/' def default_raster_tile_base_url(self): ''' the default server to get satellite imagery to analyze ''' return 'http://otile2.mqcdn.com/tiles/1.0.0/sat/' def download_tiles(self): ''' download raster satellite and geojson tiles for the region to be analyzed ''' bounding_box = self.default_bounds_to_analyze() zoom = self.default_zoom() tile_download_count = 0 for tile in self.tiles_for_bounding_box(bounding_box, zoom): tile_download_count += 1 vector_tiles_dir = self.train_vector_tiles_dir if tile_download_count % 2 == 0: vector_tiles_dir = self.test_vector_tiles_dir self.download_tile(self.default_vector_tile_base_url(), 'json', vector_tiles_dir, tile, suffix='?api_key={}'.format(self.mapzen_key), layers='roads') raster_tiles_dir = self.train_raster_tiles_dir if tile_download_count % 2 == 0: raster_tiles_dir = self.test_raster_tiles_dir self.download_tile(self.default_raster_tile_base_url(), 'jpg', raster_tiles_dir, tile) def tiles_for_bounding_box(self, bounding_box, zoom): ''' returns a list of MeractorTiles that intersect the bounding_box at the given zoom ''' tile_array = [] ne_tile = self.tile_with_coordinates_and_zoom(bounding_box.northeast, zoom) sw_tile = self.tile_with_coordinates_and_zoom(bounding_box.southwest, zoom) min_x = min(ne_tile.x, sw_tile.x) min_y = min(ne_tile.y, sw_tile.y) max_y = max(ne_tile.y, sw_tile.y) max_x = max(ne_tile.x, sw_tile.x) for y in range(min_y, max_y): for x in range(min_x, max_x): new_tile = MercatorTile() new_tile.x = x new_tile.y = y new_tile.z = zoom tile_array.append(new_tile) return tile_array def tile_with_coordinates_and_zoom(self, coordinates, zoom): ''' returns a MeractorTile for the given coordinates and zoom ''' scale = (1 << zoom) normalized_point = self.normalize_pixel_coords(coordinates) return MercatorTile(int(normalized_point.lat * scale), int(normalized_point.lon * scale), int(zoom)) def normalize_pixel_coords(self, coord): ''' convert lat lon to TMS meters ''' if coord.lon > 180: coord.lon -= 360 coord.lon /= 360.0 coord.lon += 0.5 coord.lat = 0.5 - ((math.log( math.tan((math.pi / 4) + ((0.5 * math.pi * coord.lat) / 180.0))) / math.pi) / 2.0) return coord def download_tile(self, base_url, format, directory, tile, suffix='', layers=None): ''' download a map tile from a TMS server ''' url = self.url_for_tile(base_url, format, tile, suffix, layers) print('DOWNLOADING: ' + url) z_dir = directory + str(tile.z) y_dir = z_dir + "/" + str(tile.y) self.make_directory(z_dir) self.make_directory(y_dir) filename = '{}.{}'.format(tile.x, format) download_path = y_dir + "/" urllib.request.urlretrieve(url, download_path + filename) if format == 'jpg': self.chop_tile(download_path, filename) def chop_tile(self, path, filename): subdir = path + filename.split('.')[0] try: os.mkdir(subdir) except: pass height = self.thumb_size width = self.thumb_size input = path + filename im = Image.open(input) imgwidth, imgheight = im.size img_count = 0 for y in range(int(self.tile_size / self.thumb_size)): for x in range(int(self.tile_size / self.thumb_size)): box = (x * self.thumb_size, y * self.thumb_size, x * self.thumb_size + self.thumb_size, y * self.thumb_size + self.thumb_size) a = im.crop(box) chunk_path = subdir + '/' + str(img_count) + '.jpg' if (img_count < 10): chunk_path = subdir + '/' + '0000' + str( img_count) + '.jpg' elif (img_count < 100): chunk_path = subdir + '/' + '000' + str(img_count) + '.jpg' elif (img_count < 1000): chunk_path = subdir + '/' + '00' + str(img_count) + '.jpg' elif (img_count < 10000): chunk_path = subdir + '/' + '0' + str(img_count) + '.jpg' a.save(chunk_path) img_count += 1 os.remove(path + filename) def url_for_tile(self, base_url, format, tile, suffix='', layers=None): ''' compose a URL for a TMS server ''' filename = '{}.{}'.format(tile.x, format) url = base_url if layers: url += '{}/'.format(layers) url = url + '{}/{}/{}{}'.format(tile.z, tile.y, filename, suffix) return url def process_geojson(self): ''' convert geojson vector tiles to 256 x 256 matrices matrix is 1 if the pixel has road on it, 0 if not ''' self.process_vectors_in_dir(self.train_vector_tiles_dir) self.process_vectors_in_dir(self.test_vector_tiles_dir) def process_vectors_in_dir(self, rootdir): self.gm = GlobalMercator() num_images = self.count_rasters_in_dir(rootdir) * pow( self.tile_size / self.thumb_size, 2) print("num_images is {} in {}".format(num_images, rootdir)) labels = None if self.train_vector_tiles_dir == rootdir: self.train_labels = numpy.zeros(num_images * 2, dtype=numpy.float32) self.train_labels = self.train_labels.reshape(num_images, 2) labels = self.train_labels else: self.test_labels = numpy.zeros(num_images * 2, dtype=numpy.float32) self.test_labels = self.test_labels.reshape(num_images, 2) labels = self.test_labels index = 0 for folder, subs, files in os.walk(rootdir): for filename in files: if not filename.endswith('.json'): continue has_ways = False with open(os.path.join(folder, filename), 'r') as src: linestrings = self.linestrings_for_vector_tile(src) tile_matrix = self.empty_tile_matrix() tile = self.tile_for_folder_and_filename( folder, filename, rootdir) for linestring in linestrings: # check if tile has any linestrings to set it's one-hot tile_matrix = self.add_linestring_to_matrix( linestring, tile, tile_matrix) # self.print_matrix(tile_matrix) # print '\n\n\n' # Now set the one_hot value for this label for y in range(int(self.tile_size / self.thumb_size)): for x in range(int(self.tile_size / self.thumb_size)): for tmy in range(self.thumb_size): for tmx in range(self.thumb_size): if tile_matrix[tmx][tmy] == 1: has_ways = True if has_ways: labels[index][0] = 1 else: labels[index][1] = 1 index += 1 def process_rasters(self): ''' convert raster satellite tiles to 256 x 256 matrices floats represent some color info about each pixel help in tensorflow data pipeline from https://github.com/silberman/polygoggles/blob/master/datasets.py ''' self.train_images = self.process_rasters_in_dir( self.train_raster_tiles_dir) self.test_images = self.process_rasters_in_dir( self.test_raster_tiles_dir) print("analyzing {} training images and {} test images".format( len(self.train_images), len(self.test_images))) def process_rasters_in_dir(self, rootdir): ''' descends through a TMS tile structure and converts the images to a matrix of dimensions: num_images * width * height, dtype=numpy.uint8 ''' height = self.thumb_size width = self.thumb_size num_images = self.count_rasters_in_dir(rootdir) images = numpy.zeros(num_images * width * height, dtype=numpy.uint8) images = images.reshape(num_images, height, width) index = 0 for folder, subs, files in os.walk(rootdir): for filename in files: if not filename.endswith('.jpg'): continue tile = self.tile_for_folder_and_filename( folder, filename, rootdir) image_filename = os.path.join(folder, filename) with open(image_filename, 'rb') as img_file: with Image.open(img_file) as open_pil_img: pil_image = open_pil_img.convert("L") pil_image = ImageOps.invert(pil_image) image_matrix = numpy.asarray(pil_image, dtype=numpy.uint8) images[index] = image_matrix index += 1 print( "Packing {} images to a matrix of size num_images * width * height, dtype=numpy.uint8" .format(index)) # Reshape to add a depth dimension return images.reshape(num_images, width, height, 1) def count_rasters_in_dir(self, rootdir): num_images = 0 for folder, subs, files in os.walk(rootdir): for filename in files: num_images += 1 return num_images def tile_for_folder_and_filename(self, folder, filename, directory): ''' the MeractorTile given a path to a file on disk ''' dir_string = folder.split(directory) try: z, x = dir_string[1].split('/') y = filename.split('.')[0] except: # it's a tile cropping z, x, y = dir_string[1].split('/') return MercatorTile(int(x), int(y), int(z)) def linestrings_for_vector_tile(self, file_data): ''' flatten linestrings and multilinestrings in a geojson tile to a list of linestrings ''' features = json.loads(file_data.read())['features'] linestrings = [] count = 0 for f in features: if f['geometry']['type'] == 'LineString': linestring = f['geometry']['coordinates'] linestrings.append(linestring) if f['geometry']['type'] == 'MultiLineString': for ls in f['geometry']['coordinates']: linestrings.append(ls) return linestrings def add_linestring_to_matrix(self, linestring, tile, matrix): ''' add a pixel linestring to the matrix for a given tile ''' line_matrix = self.pixel_matrix_for_linestring(linestring, tile) for x in range(0, self.tile_size): for y in range(0, self.tile_size): if line_matrix[x][y]: matrix[x][y] = line_matrix[x][y] return matrix def print_matrix(self, matrix): ''' print an ascii matrix in cosole ''' for row in numpy.rot90(numpy.fliplr(matrix)): row_str = '' for cell in row: row_str += str(cell) print(row_str) def empty_tile_matrix(self): ''' initialize the array to all zeroes ''' tile_matrix = [] for x in range(0, self.tile_size): tile_matrix.append([]) for y in range(0, self.tile_size): tile_matrix[x].append(0) return tile_matrix def pixel_matrix_for_linestring(self, linestring, tile): ''' set pixel_matrix to 1 for every point between all points on the line string ''' line_matrix = self.empty_tile_matrix() zoom = tile.z count = 0 for current_point in linestring: if count == len(linestring) - 1: break next_point = linestring[count + 1] current_point_obj = Coordinate(current_point[1], current_point[0]) next_point_obj = Coordinate(next_point[1], next_point[0]) start_pixel = self.fromLatLngToPoint(current_point_obj.lat, current_point_obj.lon, tile) end_pixel = self.fromLatLngToPoint(next_point_obj.lat, next_point_obj.lon, tile) pixels = self.pixels_between(start_pixel, end_pixel) for p in pixels: line_matrix[p.x][p.y] = 1 count += 1 return line_matrix def fromLatLngToPoint(self, lat, lng, current_tile): ''' convert a lat/lng/zoom to a pixel on a self.tile_size sized tile ''' zoom = current_tile.z tile_for_point = self.gm.GoogleTileFromLatLng(lat, lng, zoom) tile_x_offset = (tile_for_point[0] - current_tile.x) * self.tile_size tile_y_offset = (tile_for_point[1] - current_tile.y) * self.tile_size # http://stackoverflow.com/a/17419232/108512 _pixelOrigin = Pixel() _pixelOrigin.x = self.tile_size / 2.0 _pixelOrigin.y = self.tile_size / 2.0 _pixelsPerLonDegree = self.tile_size / 360.0 _pixelsPerLonRadian = self.tile_size / (2 * math.pi) point = Pixel() point.x = _pixelOrigin.x + lng * _pixelsPerLonDegree # Truncating to 0.9999 effectively limits latitude to 89.189. This is # about a third of a tile past the edge of the world tile. siny = self.bound(math.sin(self.degreesToRadians(lat)), -0.9999, 0.9999) point.y = _pixelOrigin.y + 0.5 * math.log( (1 + siny) / (1 - siny)) * -_pixelsPerLonRadian num_tiles = 1 << zoom point.x = int( point.x * num_tiles) + tile_x_offset - current_tile.x * self.tile_size point.y = int( point.y * num_tiles) + tile_y_offset - current_tile.y * self.tile_size return point def degreesToRadians(self, deg): ''' return radians given degrees ''' return deg * (math.pi / 180) def bound(self, val, valMin, valMax): ''' used to cap the TMS bounding box to clip the poles ''' res = 0 res = max(val, valMin) res = min(val, valMax) return res def pixels_between(self, start_pixel, end_pixel): ''' return a list of pixels along the ling from start_pixel to end_pixel ''' pixels = [] if end_pixel.x - start_pixel.x == 0: for y in range(min(end_pixel.y, start_pixel.y), max(end_pixel.y, start_pixel.y)): p = Pixel() p.x = end_pixel.x p.y = y if self.pixel_is_on_tile(p): pixels.append(p) return pixels slope = (end_pixel.y - start_pixel.y) / float(end_pixel.x - start_pixel.x) offset = end_pixel.y - slope * end_pixel.x num_points = self.tile_size i = 0 while i < num_points: p = Pixel() floatx = start_pixel.x + (end_pixel.x - start_pixel.x) * i / float(num_points) p.x = int(floatx) p.y = int(offset + slope * floatx) i += 1 if self.pixel_is_on_tile(p): pixels.append(p) return pixels def pixel_is_on_tile(self, p): ''' return true of p.x and p.y are >= 0 and < self.tile_size ''' if (p.x >= 0 and p.x < self.tile_size and p.y >= 0 and p.y < self.tile_size): return True return False
def main(shapes_file_list, db_file, groups): field_ids = {} # Create a GlobalMercator object for later conversions merc = GlobalMercator() # Set-up the output db conn = sqlite3.connect(db_file) c = conn.cursor() #c.execute("drop table if exists people_by_group") c.execute( "create table if not exists people_by_group (x real, y real, quadkey text, rand real, group_type text)" ) c.execute("drop index if exists i_quadkey") # Open the shapefiles for input_filename in shapes_file_list: print "Processing file {0}".format(input_filename) ds = ogr.Open(input_filename) if ds is None: print "Open failed.\n" sys.exit(1) # Obtain the first (and only) layer in the shapefile lyr = ds.GetLayerByIndex(0) lyr.ResetReading() # Obtain the field definitions in the shapefile layer feat_defn = lyr.GetLayerDefn() field_defns = [ feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount()) ] # Set up a coordinate transformation to latlon wgs84 = osr.SpatialReference() wgs84.SetWellKnownGeogCS("WGS84") sr = lyr.GetSpatialRef() xformer = osr.CoordinateTransformation(sr, wgs84) # Obtain the index of the group fields for i, defn in enumerate(field_defns): if defn.GetName() in groups: field_ids[defn.GetName()] = i # Obtain the number of features (Census Blocks) in the layer n_features = len(lyr) # Iterate through every feature (Census Block Ploygon) in the layer, # obtain the population counts, and create a point for each person within # that feature. start_time = time.time() for j, feat in enumerate(lyr): # Print a progress read-out for every 1000 features and export to hard disk if j % 1000 == 0: conn.commit() perc_complete = (j + 1) / float(n_features) time_left = (1 - perc_complete) * ( (time.time() - start_time) / perc_complete) print "%s/%s (%0.2f%%) est. time remaining %0.2f mins" % ( j + 1, n_features, 100 * perc_complete, time_left / 60) # Obtain total population, racial counts, and state fips code of the individual census block counts = {} for f in field_ids: val = feat.GetField(field_ids[f]) if val: counts[f] = int(val) else: counts[f] = 0 # Obtain the OGR polygon object from the feature geom = feat.GetGeometryRef() if geom is None: continue # Convert the OGR Polygon into a Shapely Polygon poly = loads(geom.ExportToWkb()) if poly is None: continue # Obtain the "boundary box" of extreme points of the polygon bbox = poly.bounds if not bbox: continue leftmost, bottommost, rightmost, topmost = bbox # Generate a point object within the census block for every person by race for f in field_ids: for i in range(counts[f]): # Choose a random longitude and latitude within the boundary box # and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference try: # In general we don't know the coordinate system of input data # so transform it to latlon lon, lat, z = xformer.TransformPoint( samplepoint.x, samplepoint.y) x, y = merc.LatLonToMeters(lat, lon) except: print "Failed to convert ", lat, lon sys.exit(-1) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category group_type = f # Export data to the database file try: c.execute( "insert into people_by_group values (?,?,?,random(),?)", (x, y, quadkey, group_type)) except: print "Failed to insert ", x, y, tx, ty, group_type sys.exit(-1) c.execute( "create index if not exists i_quadkey on people_by_group(x, y, quadkey, rand, group_type)" ) conn.commit()
mean_lon = lon.mean() mean_lon # In[183]: sd_lat = lat.std() sd_lon = lon.std() print sd_lat, sd_lon # In[184]: from globalmaptiles import GlobalMercator # In[185]: merc = GlobalMercator() # In[186]: meanX, meanY = merc.LatLonToMeters(mean_lat, mean_lon) print meanX, meanY # In[187]: lat_sdMet = merc.LatLonToMeters(mean_lat + sd_lat, mean_lon) lon_sdMet = merc.LatLonToMeters(mean_lat, mean_lon + sd_lon) print lat_sdMet, lon_sdMet # In[188]: import scipy.spatial as sp
if google_image_folder is None or output_jpeg_file is None or map_type is None or format is None or tz is None or lon is None or lat is None or radius is None or bottom_crop is None or KEY is None or image_size is None or scale is None or resume is None or debug is None or tif_output is None: print("invalid parameter exists!") exit() actual_tile_size = image_size * scale debug_print("actual tile size %d" % actual_tile_size) if not resume: if os.path.exists(google_image_folder): shutil.rmtree(google_image_folder) if os.path.exists(output_jpeg_file): os.unlink(output_jpeg_file) if not os.path.exists(google_image_folder): os.makedirs(google_image_folder) mercator = GlobalMercator() cx, cy = mercator.LatLonToMeters(lat, lon) minx = cx - radius maxx = cx + radius miny = cy - radius maxy = cy + radius debug_print('minx = %f, miny = %f, maxx = %f, maxy = %f\n' % (minx, miny, maxx, maxy)) tminx, tminy = mercator.MetersToTile(minx, miny, tz) tmaxx, tmaxy = mercator.MetersToTile(maxx, maxy, tz) total_tiles = (tmaxx - tminx + 1) * (tmaxy - tminy + 1) debug_print('count = %d' % total_tiles) # progress bar
import requests from globalmaptiles import GlobalMercator from tilenames import tileXY, tileEdges from operator import itemgetter from itertools import groupby import cv2 import numpy as np import cairo import os from helpers import dl_write_all, hex_to_rgb, get_pixel_coords from datetime import datetime from shapely.geometry import box, Polygon, MultiPolygon, Point mercator = GlobalMercator() PAGE_SIZES = { 'letter': ( 1275, 1650, 5, 7, ), 'tabloid': ( 2550, 3300, 10, 14, ), }
def __init__(self): self.client = Connection() self.proj = GlobalMercator()
def main(input_filename, wac_filename, output_filename): wac = pd.io.parsers.read_csv(wac_filename) wac.set_index(wac['w_geocode'],inplace = True) #Create columns for four megasectors wac['makers'] = wac['CNS01']+wac['CNS02']+wac['CNS03']+wac['CNS04']+wac['CNS05']+wac['CNS06']+wac['CNS08'] wac['services'] = wac['CNS07']+wac['CNS14'] + wac['CNS17'] + wac['CNS18'] wac['professions'] = wac['CNS09'] + wac['CNS10'] + wac['CNS11'] + wac['CNS12'] + wac['CNS13'] wac['support'] = wac['CNS15'] + wac['CNS16'] + wac['CNS19'] + wac['CNS20'] assert sum(wac['C000'] -(wac['makers']+wac['services']+wac['professions']+wac['support'])) == 0 or rw[1]['abbrev'] == 'ny' #In NY there's one block in Brooklyn with 177000 jobs. It appears to be rounding entries > 100k, which is making the assertion fail. #This is the Brooklyn Post Office + Brooklyn Law School + Borough Hall. So maybe weirdness around post office? #Set up outfile as csv outf = open(output_filename,'w') outf.write('x,y,sect,inctype,quadkey\n') # Create a GlobalMercator object for later conversions merc = GlobalMercator() # Open the shapefile ds = ogr.Open(input_filename) if ds is None: print "Open failed.\n" sys.exit( 1 ) # Obtain the first (and only) layer in the shapefile lyr = ds.GetLayerByIndex(0) lyr.ResetReading() # Obtain the field definitions in the shapefile layer feat_defn = lyr.GetLayerDefn() field_defns = [feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())] # Obtain the index of the field for the count for whites, blacks, Asians, # Others, and Hispanics. for i, defn in enumerate(field_defns): print defn.GetName() #GEOID is what we want to merge on if defn.GetName() == "GEOID10": fips = i # Set-up the output file #conn = sqlite3.connect( output_filename ) #c = conn.cursor() #c.execute( "create table if not exists people_by_race (statefips text, x text, y text, quadkey text, race_type text)" ) # Obtain the number of features (Census Blocks) in the layer n_features = len(lyr) # Iterate through every feature (Census Block Ploygon) in the layer, # obtain the population counts, and create a point for each person within # that feature. for j, feat in enumerate( lyr ): # Print a progress read-out for every 1000 features and export to hard disk if j % 1000 == 0: #conn.commit() print "%s/%s (%0.2f%%)"%(j+1,n_features,100*((j+1)/float(n_features))) # Obtain total population, racial counts, and state fips code of the individual census block blkfips = int(feat.GetField(fips)) try: jobs = {'m':wac.loc[blkfips,'makers'],'s':wac.loc[blkfips,'services'],'p':wac.loc[blkfips,'professions'],'t':wac.loc[blkfips,'support']} except KeyError: #print "no" # missing.append(blkfips) #Missing just means no jobs there. Lots of blocks have this. continue income = {'l':wac.loc[blkfips,'CE01'],'m':wac.loc[blkfips,'CE02'],'h':wac.loc[blkfips,'CE03']} # Obtain the OGR polygon object from the feature geom = feat.GetGeometryRef() if geom is None: continue # Convert the OGR Polygon into a Shapely Polygon poly = loads(geom.ExportToWkb()) if poly is None: continue # Obtain the "boundary box" of extreme points of the polygon bbox = poly.bounds if not bbox: continue leftmost,bottommost,rightmost,topmost = bbox # Generate a point object within the census block for every person by race inccnt = 0 incord = ['l','m','h'] shuffle(incord) for sect in ['m','s','p','t']: for i in range(int(jobs[sect])): # Choose a random longitude and latitude within the boundary box # and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost),uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break x, y = merc.LatLonToMeters(samplepoint.y,samplepoint.x) tx,ty = merc.MetersToTile(x, y, 21) #Determine the right income inccnt += 1 inctype = '' assert inccnt <= income[incord[0]] + income[incord[1]] + income[incord[2]] or rw[1]['abbrev'] == 'ny' if inccnt <= income[incord[0]]: inctype = incord[0] elif inccnt <= income[incord[0]] + income[incord[1]]: inctype = incord[1] elif inccnt <= income[incord[0]] + income[incord[1]] + income[incord[2]]: inctype = incord[2] # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) outf.write("%s,%s,%s,%s,%s\n" %(x,y,sect,inctype,quadkey)) # Convert the longitude and latitude coordinates to meters and # a tile reference outf.close()
class TileLoader(object): TILE_WIDTH = 256 # tile is square TILE_FORMAT = 'png' def __init__(self, min_lat, min_lon, max_lat, max_lon, width, max_zoom=18): self.tiles = [] self.min_lat = min_lat self.min_lon = min_lon self.max_lat = max_lat self.max_lon = max_lon self.mercator = GlobalMercator() self.downloader = Downloader() # count how many horizontal tiles we need self.x_tiles_needed = math.ceil(width / self.TILE_WIDTH) self.max_zoom = max_zoom def download(self, cache_dir, url, http_headers): """Downloads tiles and returns list of downloaded tiles.""" tile_files = {} tiles = self._get_tile_list() for (tx, ty, tz) in tiles: cx, cy, cz = self._convert_tile(tx, ty, tz) tile_url = url.replace('{x}', str(cx)).replace('{y}', str(cy)).replace( '{z}', str(cz)) tile_file = self._gen_tile_file(tx, ty, tz, cache_dir) self.downloader.download(tile_file, tile_url, http_headers) tile_files[tile_url] = tile_file # wait downloads to be finished self.downloader.wait() # validate all tiles valid = True for tile_url, tile_file in tile_files.iteritems(): if self.TILE_FORMAT == 'png' and imghdr.what(tile_file) != 'png': sys.stderr.write("%s is not PNG image\n" % tile_url) valid = False if not valid: return None return tile_files.values() def _get_tile_list(self): """Returns list of tiles needed to cover bounding box.""" tiles = [] tile_info = self._find_tiles() if tile_info is not None: (tminx, tminy, tmaxx, tmaxy, tz) = tile_info for ty in range(tminy, tmaxy + 1): for tx in range(tminx, tmaxx + 1): tiles.append((tx, ty, tz)) return tiles def _find_tiles(self): """Returns optimal zoom level based on given width.""" for zoom_level in range(1, self.max_zoom + 1): tminx, tminy = self._lat_lon_to_tile(self.min_lat, self.min_lon, zoom_level) tmaxx, tmaxy = self._lat_lon_to_tile(self.max_lat, self.max_lon, zoom_level) x_tiles = tmaxx + 1 - tminx if x_tiles > self.x_tiles_needed or zoom_level == self.max_zoom: # optimal zoom level found return (tminx, tminy, tmaxx, tmaxy, zoom_level) return None def _lat_lon_to_tile(self, lat, lon, zoom_level): """Converts given latLon to tile XY""" mx, my = self.mercator.LatLonToMeters(lat, lon) tx, ty = self.mercator.MetersToTile(mx, my, zoom_level) return (tx, ty) def _gen_tile_file(self, tx, ty, tz, cache_dir): """Returns filename where tile will be saved as.""" filename = "%d_%d_%d.%s" % (tx, ty, tz, self.TILE_FORMAT) return os.path.join(cache_dir, filename)
class Downloader(object): ''' Based on http://www.wellho.net/solutions/python-python-threads-a-first-example.html ''' def __init__(self, mapdir, minzoom, maxzoom): self.mercator = GlobalMercator(256) self.minzoom = minzoom self.maxzoom = maxzoom self.TopRightLat = None self.TopRightLon = None self.BottomLeftLat = None self.BottomLeftLon = None self.mminx = None self.mminy = None self.mmaxx = None self.mmaxy = None self.mapdir = mapdir self.jobs = Queue.Queue() def download(self, toprightlat, toprightlon, bottomleftlat, bottomleftlon): self.TopRightLat = toprightlat self.TopRightLon = toprightlon self.BottomLeftLat = bottomleftlat self.BottomLeftLon = bottomleftlon self.mminx, self.mminy = self.mercator.LatLonToMeters( toprightlat, toprightlon) self.mmaxx, self.mmaxy = self.mercator.LatLonToMeters( bottomleftlat, bottomleftlon) map(self.addJobForZoom, range(self.minzoom, self.maxzoom + 1)) self.runJobs() def addJobForZoom(self, zoom): tminx, tminy = self.mercator.MetersToTile(self.mminx, self.mminy, zoom) tmaxx, tmaxy = self.mercator.MetersToTile(self.mmaxx, self.mmaxy, zoom) if tminx > tmaxx: tminx, tmaxx = tmaxx, tminx if tminy > tmaxy: tminy, tmaxy = tmaxy, tminy for tx in range(tminx, tmaxx + 1): for ty in range(tminy, tmaxy + 1): gx, gy = self.mercator.GoogleTile(tx, ty, zoom) self.jobs.put({'x': gx, 'y': gy, 'z': zoom}) def runJobs(self): workers = [] for threadNum in range(0, MAX_THREADS): subdownloader = self.SubDownloader(self) workers.append(subdownloader) workers[-1].start() for worker in workers: worker.join(20) print "Finished!" class SubDownloader(Thread): def __init__(self, parent): Thread.__init__(self) self.parent = parent def run(self): while 1: try: job = self.parent.jobs.get(0) except Queue.Empty: return mt = random.randrange(0, 4) filename = '%i/gm_%i_%i_%i.png' % (job['z'], job['x'], job['y'], job['z']) if os.path.isfile('%s%s' % (self.parent.mapdir, filename)): # print "skippnig", filename, "left:", self.parent.jobs.qsize() continue if not os.path.isdir('%s%s' % (self.parent.mapdir, job['z'])): os.mkdir('%s%s' % (self.parent.mapdir, job['z'])) # http://mt1.google.com/vt/lyrs=m@115&hl=en&x=39141&s=&y=26445&z=16&s=Gali url = 'http://mt%i.google.com/vt/lyrs=m@115&hl=en&x=%i&y=%i&z=%i&s=' % ( mt, job['x'], job['y'], job['z']) try: tile = urllib2.urlopen(url=url, timeout=20).read() except: # print "Can't open", url, "left:", self.parent.jobs.qsize() continue fh = open(filename, 'wb') fh.write(tile) fh.close()
'hirds_rainfalldepth_duration96.0_ARI250.0.tif', 'hirds_rainfalldepth_duration120.0_ARI1.58.tif', 'hirds_rainfalldepth_duration120.0_ARI2.0.tif', 'hirds_rainfalldepth_duration120.0_ARI5.0.tif', 'hirds_rainfalldepth_duration120.0_ARI10.0.tif', 'hirds_rainfalldepth_duration120.0_ARI20.0.tif', 'hirds_rainfalldepth_duration120.0_ARI30.0.tif', 'hirds_rainfalldepth_duration120.0_ARI40.0.tif', 'hirds_rainfalldepth_duration120.0_ARI50.0.tif', 'hirds_rainfalldepth_duration120.0_ARI60.0.tif', 'hirds_rainfalldepth_duration120.0_ARI80.0.tif', 'hirds_rainfalldepth_duration120.0_ARI100.0.tif', 'hirds_rainfalldepth_duration120.0_ARI250.0.tif', ] gm = GlobalMercator() top_left_tile = gm.TileBounds(61, 25, 6) #using TMS numbering system bottom_right_tile = gm.TileBounds(63, 22, 6) filename_regex = re.compile( r'hirds_rainfalldepth_duration(?P<duration>\d+.\d+)_ARI(?P<ari>\d+.\d+).tif' ) def convert(filename, output_folder, xmin, ymin, xmax, ymax, resolution): # Convert tiff with 1 channel of 32-bit floats to a file with 3 channels of # 8-bit integers #Start by reprojecting to EPSG:3857 *before* doing anything else, so that #GDAL interpolates values correctly as_3857_filename = os.path.join(
def __init__(self): self.client = Connection(host="mongomaster") self.proj = GlobalMercator()
def getTile(self, zoomlevel): mercator = GlobalMercator() mx, my = mercator.LatLonToMeters(self.lat, self.lon) tminx, tminy = mercator.MetersToTile(mx, my, zoomlevel) gx, gy = mercator.GoogleTile(tminx, tminy, zoomlevel) #+1? return gx, gy, zoomlevel
class AddressController(AbstractProxyController): url = 'http://flof.com.ar/feeds/xml/address/' class DistanceController(AbstractProxyController): url = 'http://flof.com.ar/feeds/xml/distance/' class SpotLookupController(AbstractProxyController): url = 'http://flof.com.ar/bin/spot/lookup/' from globalmaptiles import GlobalMercator gm = GlobalMercator() class FlofTile(object): __slots__ = ("layer", "id", "x", "y", "z", "data", "width", 'height') def __init__(self, layer, id): self.layer = layer self.id = id spot = flof.geoinfo(self.id) self.x, self.y = gm.LatLonToMeters(float(spot['lat']), float(spot['lon'])) self.z = 1000.0 self.data = 0 self.data = None self.width = 320
from itertools import chain import math max_blocks = float(350e3) # Read the FIPS codes from a file with open('states.txt', 'r') as states_file: FIPS = [line.strip() for line in states_file] # Override the full fips code list for shorter processing FIPS = [6] FIPS = [str(x).zfill(2) for x in FIPS] comm = MPI.COMM_WORLD merc = GlobalMercator() for state_fips in FIPS: #%% Phase 1: Generate People # timing start_time = time.time() # specify zoom level limits lowerzoom = 3 upperzoom = 13 # specify shapefile shapefile = os.path.join("Shapefiles","tabblock2010_{}_pophu.shp".format(state_fips)) # open the shapefile
Usage("ERROR: Sorry, given profile is not implemented yet.") if zoomlevel == None or lat == None or lon == None: Usage("ERROR: Specify at least 'zoomlevel', 'lat' and 'lon'.") if latmax is not None and lonmax is None: Usage("ERROR: Both 'latmax' and 'lonmax' must be given.") if latmax != None and lonmax != None: if latmax < lat: Usage("ERROR: 'latmax' must be bigger then 'lat'") if lonmax < lon: Usage("ERROR: 'lonmax' must be bigger then 'lon'") boundingbox = (lon, lat, lonmax, latmax) tz = zoomlevel mercator = GlobalMercator() mx, my = mercator.LatLonToMeters(lat, lon) print "Spherical Mercator (ESPG:900913) coordinates for lat/lon: " print(mx, my) tminx, tminy = mercator.MetersToTile(mx, my, tz) if boundingbox: mx, my = mercator.LatLonToMeters(latmax, lonmax) print "Spherical Mercator (ESPG:900913) cooridnate for maxlat/maxlon: " print(mx, my) tmaxx, tmaxy = mercator.MetersToTile(mx, my, tz) else: tmaxx, tmaxy = tminx, tminy for ty in range(tminy, tmaxy + 1):
def main(input_filename, output_filename): print "Processing: %s - Ctrl-Z to cancel"%input_filename merc = GlobalMercator() # open the shapefile ds = ogr.Open( input_filename ) if ds is None: print "Open failed.\n" sys.exit( 1 ) lyr = ds.GetLayerByIndex( 0 ) lyr.ResetReading() feat_defn = lyr.GetLayerDefn() field_defns = [feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())] # look up the index of the field we're interested in for i, defn in enumerate( field_defns ): if defn.GetName()=="POP10": pop_field = i # set up the output file # if it already exists, ask for confirmation to delete and remake it if os.path.isfile(output_filename): if not confirm(" Database %s exists, overwrite?"%output_filename, False): return False else: os.system("rm %s"%output_filename) # if file removal failed, the file may be locked: # ask for confirmation to unlock it if os.path.isfile(output_filename): if not confirm(" Attempt to unlock database %s?"%output_filename, False): return False else: unlock(output_filename) # if it's still there, there's a problem, bail if os.path.isfile(output_filename): print "Trouble - exiting." sys.exit() else: print "Success - continuing:" conn = sqlite3.connect( output_filename ) c = conn.cursor() c.execute( "create table if not exists people (x real, y real, quadkey text)" ) n_features = len(lyr) for j, feat in enumerate( lyr ): if j%1000==0: conn.commit() if j%10000==0: print " %s/%s (%0.2f%%)"%(j+1,n_features,100*((j+1)/float(n_features))) else: sys.stdout.write(".") sys.stdout.flush() pop = feat.GetField(pop_field) geom = feat.GetGeometryRef() if geom is None: continue bbox = get_bbox( geom ) if not bbox: continue ll,bb,rr,tt = bbox # generate a sample within the geometry for every person for i in range(pop): while True: samplepoint = make_ogr_point( uniform(ll,rr), uniform(bb,tt) ) if geom.Intersects( samplepoint ): break x, y = merc.LatLonToMeters( samplepoint.GetY(), samplepoint.GetX() ) tx,ty = merc.MetersToTile( x, y, 21) quadkey = merc.QuadTree( tx, ty, 21 ) c.execute( "insert into people values (?,?,?)", (x, y, quadkey) ) conn.commit() print "Finished processing %s"%output_filename
def main(input_filename, output_filename): # Create a GlobalMercator object for later conversions merc = GlobalMercator() # Open the shapefile ds = ogr.Open(input_filename) if ds is None: print "Open failed.\n" sys.exit(1) # Obtain the first (and only) layer in the shapefile lyr = ds.GetLayerByIndex(0) lyr.ResetReading() # Obtain the field definitions in the shapefile layer feat_defn = lyr.GetLayerDefn() field_defns = [ feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount()) ] # Obtain the index of the field for the count for whites, blacks, Asians, # Others, and Hispanics. for i, defn in enumerate(field_defns): if defn.GetName() == "POP10": pop_field = i if defn.GetName() == "nh_white_n": white_field = i if defn.GetName() == "nh_black_n": black_field = i if defn.GetName() == "nh_asian_n": asian_field = i if defn.GetName() == "hispanic_n": hispanic_field = i if defn.GetName() == "NH_Other_n": other_field = i if defn.GetName() == "STATEFP10": statefips_field = i # Set-up the output file conn = sqlite3.connect(output_filename) c = conn.cursor() c.execute( "create table if not exists people_by_race (statefips text, x text, y text, quadkey text, race_type text)" ) # Obtain the number of features (Census Blocks) in the layer n_features = len(lyr) # Iterate through every feature (Census Block Ploygon) in the layer, # obtain the population counts, and create a point for each person within # that feature. for j, feat in enumerate(lyr): # Print a progress read-out for every 1000 features and export to hard disk if j % 1000 == 0: conn.commit() print "%s/%s (%0.2f%%)" % (j + 1, n_features, 100 * ((j + 1) / float(n_features))) # Obtain total population, racial counts, and state fips code of the individual census block pop = int(feat.GetField(pop_field)) white = int(feat.GetField(white_field)) black = int(feat.GetField(black_field)) asian = int(feat.GetField(asian_field)) hispanic = int(feat.GetField(hispanic_field)) other = int(feat.GetField(other_field)) statefips = feat.GetField(statefips_field) # Obtain the OGR polygon object from the feature geom = feat.GetGeometryRef() if geom is None: continue # Convert the OGR Polygon into a Shapely Polygon poly = loads(geom.ExportToWkb()) if poly is None: continue # Obtain the "boundary box" of extreme points of the polygon bbox = poly.bounds if not bbox: continue leftmost, bottommost, rightmost, topmost = bbox # Generate a point object within the census block for every person by race for i in range(white): # Choose a random longitude and latitude within the boundary box # and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'w' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(black): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'b' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(asian): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'a' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(hispanic): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'h' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) for i in range(other): # Choose a random longitude and latitude within the boundary box # points and within the orginial ploygon of the census block while True: samplepoint = Point(uniform(leftmost, rightmost), uniform(bottommost, topmost)) if samplepoint is None: break if poly.contains(samplepoint): break # Convert the longitude and latitude coordinates to meters and # a tile reference x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x) tx, ty = merc.MetersToTile(x, y, 21) # Create a unique quadkey for each point object quadkey = merc.QuadTree(tx, ty, 21) # Create categorical variable for the race category race_type = 'o' # Export data to the database file c.execute("insert into people_by_race values (?,?,?,?,?)", (statefips, x, y, quadkey, race_type)) conn.commit()