コード例 #1
0
ファイル: dotmap.py プロジェクト: sdwfrost/DotMapper
def main(tiles_path, db_file, groups, zoom_levels):
    merc = GlobalMercator()

    # Set-up the output db
    
    conn = sqlite3.connect(db_file)
    c = conn.cursor()

    for zoom in [zoom_levels]: #TODO zoom levels
        results_set = c.execute("select x, y, quadkey, group_type from people_by_group order by quadkey asc, rand asc" )
        use_ellipse, radius_rel, gamma, os_scale = STYLE[zoom]
        radius = os_scale*radius_rel/4/2
        quadkey = None
        img = None

        for i,r in enumerate(results_set):
            if (i % 1000 == 0):
                print i
    
            x = float(r[0])
            y = float(r[1])
            next_quadkey = r[2][:zoom]
            group = r[3]
    
            if next_quadkey != quadkey:
                #finish last tile
                if img:
                    save_tile(img, tiles_path, zoom, gtx, gty)
                
                quadkey = next_quadkey
                tx, ty = merc.MetersToTile(x, y, zoom)
                gtx, gty = merc.GoogleTile(tx,ty,zoom)
        
                img = Image.new("RGB", (TILE_X*os_scale, TILE_Y*os_scale), "white")
                draw = ImageDraw.Draw(img)
                
            minx, miny, maxx, maxy = (c/A for c in merc.TileBounds(tx, ty, zoom))
            xscale = (TILE_X*os_scale)/(maxx - minx)
            yscale = (TILE_Y*os_scale)/(maxy - miny)


            #print 'minx', minx, 'miny', miny, 'maxx', maxx, 'maxy', maxy
            #print 'xscale',xscale,'yscale',yscale
            #print 'x',x,'y',y,'tx',tx,'ty',ty
        
            # Translate coordinates to tile-relative, google ready coordinates
            rx = (x/A - minx)*xscale
            ry = (maxy - y/A)*yscale
    
            fill=ImageColor.getrgb(groups[group]['color'])
            if use_ellipse:
                draw.ellipse((rx-radius,ry-radius,rx+radius,ry+radius), fill=fill)
            else:
                draw.point((rx, ry), fill=fill)
            #print "Draw at ", (rx-radius,ry-radius,rx+radius,ry+radius), ImageColor.getrgb(groups[group]['color'])

        save_tile(img, tiles_path, zoom, gtx, gty)
    
    save_defined_tiles(tiles_path)
コード例 #2
0
def GetGridID(Coord):
  lat=Coord[0]/1000
  lon=Coord[1]/1000

  tz=8

  mercator = GlobalMercator()
  mx, my = mercator.LatLonToMeters( Coord[0]/1000.0, Coord[1]/1000.0 )
  tx, ty = mercator.MetersToTile( mx, my, tz )

  gx, gy = mercator.GoogleTile(tx, ty, tz)
	#print "\tGoogle:", gx, gy

  #print tx, ty

  return ("%03d" % gx)+("%03d" % gy)
コード例 #3
0
def main():

    merc = GlobalMercator()

    file = open('pts1990.csv', 'rb')
    reader = csv.DictReader(file, delimiter=',')
    print "x,y,quad,category"

    for row in reader:
        lat = float(row['lat'])
        long = float(row['long'])
        x, y = merc.LatLonToMeters(lat, long)
        tx, ty = merc.MetersToTile(x, y, 21)

        # Create a unique quadkey for each point object

        quadkey = merc.QuadTree(tx, ty, 21)

        # Create categorical variable for the race category

        # Export data to the database file

        print "{},{},{},{}".format(x, y, quadkey, row['group'])
コード例 #4
0
     leftmost,bottommost,rightmost,topmost = bbox
     # obtain population in each census block
     people_x = np.array([])
     people_y = np.array([])
     people_quadkey = np.array([])
     for i in range(pop):
         # choose a random longitude and latitude within the boundary box
         # and within the polygon of the census block            
         while True:                
             samplepoint = Point(uniform(leftmost, rightmost),uniform(bottommost, topmost))                       
             if poly.contains(samplepoint):
                 break
         # convert the longitude and latitude coordinates to meters and
         # a tile reference
         x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x)
         tx,ty = merc.MetersToTile(x, y, upperzoom)            
         # create a quadkey for each point object
         people_quadkey = np.append(people_quadkey,merc.QuadTree(tx, ty, upperzoom))
         people_x = np.append(people_x,x)
         people_y = np.append(people_y,y)
     lat = np.append(lat,people_y)
     lon = np.append(lon,people_x)
     quadkey = np.append(quadkey,people_quadkey)
     lat = np.round(lat,1)
     lon = np.round(lon,1)
     
 if comm.rank == 0:
     t1 = time.time()
     print("partition {} complete {:.1f}s".format(k,t1-t0))
     
 Y = comm.gather(lat, root=0)
コード例 #5
0
 def getTile(self, zoomlevel):
     mercator = GlobalMercator()
     mx, my = mercator.LatLonToMeters(self.lat, self.lon)
     tminx, tminy = mercator.MetersToTile(mx, my, zoomlevel)
     gx, gy = mercator.GoogleTile(tminx, tminy, zoomlevel)  #+1?
     return gx, gy, zoomlevel
コード例 #6
0
class TileLoader(object):
    TILE_WIDTH = 256  # tile is square
    TILE_FORMAT = 'png'

    def __init__(self, min_lat, min_lon, max_lat, max_lon, width, max_zoom=18):
        self.tiles = []
        self.min_lat = min_lat
        self.min_lon = min_lon
        self.max_lat = max_lat
        self.max_lon = max_lon
        self.mercator = GlobalMercator()
        self.downloader = Downloader()
        # count how many horizontal tiles we need
        self.x_tiles_needed = math.ceil(width / self.TILE_WIDTH)
        self.max_zoom = max_zoom

    def download(self, cache_dir, url, http_headers):
        """Downloads tiles and returns list of downloaded tiles."""
        tile_files = {}
        tiles = self._get_tile_list()
        for (tx, ty, tz) in tiles:
            cx, cy, cz = self._convert_tile(tx, ty, tz)
            tile_url = url.replace('{x}', str(cx)).replace('{y}',
                                                           str(cy)).replace(
                                                               '{z}', str(cz))
            tile_file = self._gen_tile_file(tx, ty, tz, cache_dir)
            self.downloader.download(tile_file, tile_url, http_headers)
            tile_files[tile_url] = tile_file

        # wait downloads to be finished
        self.downloader.wait()

        # validate all tiles
        valid = True
        for tile_url, tile_file in tile_files.iteritems():
            if self.TILE_FORMAT == 'png' and imghdr.what(tile_file) != 'png':
                sys.stderr.write("%s is not PNG image\n" % tile_url)
                valid = False
        if not valid:
            return None

        return tile_files.values()

    def _get_tile_list(self):
        """Returns list of tiles needed to cover bounding box."""
        tiles = []
        tile_info = self._find_tiles()
        if tile_info is not None:
            (tminx, tminy, tmaxx, tmaxy, tz) = tile_info
            for ty in range(tminy, tmaxy + 1):
                for tx in range(tminx, tmaxx + 1):
                    tiles.append((tx, ty, tz))
        return tiles

    def _find_tiles(self):
        """Returns optimal zoom level based on given width."""
        for zoom_level in range(1, self.max_zoom + 1):
            tminx, tminy = self._lat_lon_to_tile(self.min_lat, self.min_lon,
                                                 zoom_level)
            tmaxx, tmaxy = self._lat_lon_to_tile(self.max_lat, self.max_lon,
                                                 zoom_level)
            x_tiles = tmaxx + 1 - tminx
            if x_tiles > self.x_tiles_needed or zoom_level == self.max_zoom:
                # optimal zoom level found
                return (tminx, tminy, tmaxx, tmaxy, zoom_level)
        return None

    def _lat_lon_to_tile(self, lat, lon, zoom_level):
        """Converts given latLon to tile XY"""
        mx, my = self.mercator.LatLonToMeters(lat, lon)
        tx, ty = self.mercator.MetersToTile(mx, my, zoom_level)
        return (tx, ty)

    def _gen_tile_file(self, tx, ty, tz, cache_dir):
        """Returns filename where tile will be saved as."""
        filename = "%d_%d_%d.%s" % (tx, ty, tz, self.TILE_FORMAT)
        return os.path.join(cache_dir, filename)
コード例 #7
0
ファイル: tile_grab2.py プロジェクト: geobabbler/tile-grab
    default='',
    help='tile image format',
)
(options, args) = parser.parse_args()
#parse the bounds
boundsarr = options.bounds.split(';')
lonarr = sorted([float(boundsarr[0]), float(boundsarr[2])])
latarr = sorted([float(boundsarr[1]), float(boundsarr[3])])
z = int(options.zoom)

gm = GlobalMercator()
#Convert bounds to meters
mx0, my0 = gm.LatLonToMeters(latarr[0], lonarr[0])
mx1, my1 = gm.LatLonToMeters(latarr[1], lonarr[1])
#get TMS tile address range
tx0, ty0 = gm.MetersToTile(mx0, my0, z)
tx1, ty1 = gm.MetersToTile(mx1, my1, z)
#sort the tile addresses low to high
xarr = sorted([tx0, tx1])
yarr = sorted([ty0, ty1])
#figure out relevant extensions
extension = "." + options.format  #getExtension(options.template)
wf = getWorldFileExtension(extension)
#create the destination location using the z value
root = options.destination + '/' + str(z)
try:
    if os.path.exists(root) == False:
        os.makedirs(root)
except:
    print "Could not create destination. It may already exist."
コード例 #8
0
def main(shapes_file_list, db_file, groups):
    field_ids = {}
    # Create a GlobalMercator object for later conversions

    merc = GlobalMercator()

    # Set-up the output db

    conn = sqlite3.connect(db_file)
    c = conn.cursor()
    #c.execute("drop table if exists people_by_group")
    c.execute(
        "create table if not exists people_by_group (x real, y real, quadkey text, rand real, group_type text)"
    )
    c.execute("drop index if exists i_quadkey")

    # Open the shapefiles

    for input_filename in shapes_file_list:
        print "Processing file {0}".format(input_filename)
        ds = ogr.Open(input_filename)

        if ds is None:
            print "Open failed.\n"
            sys.exit(1)

        # Obtain the first (and only) layer in the shapefile

        lyr = ds.GetLayerByIndex(0)

        lyr.ResetReading()

        # Obtain the field definitions in the shapefile layer

        feat_defn = lyr.GetLayerDefn()
        field_defns = [
            feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())
        ]

        # Set up a coordinate transformation to latlon
        wgs84 = osr.SpatialReference()
        wgs84.SetWellKnownGeogCS("WGS84")
        sr = lyr.GetSpatialRef()
        xformer = osr.CoordinateTransformation(sr, wgs84)

        # Obtain the index of the group fields
        for i, defn in enumerate(field_defns):
            if defn.GetName() in groups:
                field_ids[defn.GetName()] = i

        # Obtain the number of features (Census Blocks) in the layer
        n_features = len(lyr)

        # Iterate through every feature (Census Block Ploygon) in the layer,
        # obtain the population counts, and create a point for each person within
        # that feature.
        start_time = time.time()
        for j, feat in enumerate(lyr):

            # Print a progress read-out for every 1000 features and export to hard disk
            if j % 1000 == 0:
                conn.commit()
                perc_complete = (j + 1) / float(n_features)
                time_left = (1 - perc_complete) * (
                    (time.time() - start_time) / perc_complete)
                print "%s/%s (%0.2f%%) est. time remaining %0.2f mins" % (
                    j + 1, n_features, 100 * perc_complete, time_left / 60)

            # Obtain total population, racial counts, and state fips code of the individual census block

            counts = {}
            for f in field_ids:
                val = feat.GetField(field_ids[f])
                if val:
                    counts[f] = int(val)
                else:
                    counts[f] = 0

            # Obtain the OGR polygon object from the feature
            geom = feat.GetGeometryRef()
            if geom is None:
                continue

            # Convert the OGR Polygon into a Shapely Polygon
            poly = loads(geom.ExportToWkb())

            if poly is None:
                continue

            # Obtain the "boundary box" of extreme points of the polygon
            bbox = poly.bounds

            if not bbox:
                continue

            leftmost, bottommost, rightmost, topmost = bbox

            # Generate a point object within the census block for every person by race

            for f in field_ids:
                for i in range(counts[f]):
                    # Choose a random longitude and latitude within the boundary box
                    # and within the orginial ploygon of the census block
                    while True:
                        samplepoint = Point(uniform(leftmost, rightmost),
                                            uniform(bottommost, topmost))
                        if samplepoint is None:
                            break
                        if poly.contains(samplepoint):
                            break

                    # Convert the longitude and latitude coordinates to meters and
                    # a tile reference

                    try:
                        # In general we don't know the coordinate system of input data
                        # so transform it to latlon
                        lon, lat, z = xformer.TransformPoint(
                            samplepoint.x, samplepoint.y)
                        x, y = merc.LatLonToMeters(lat, lon)
                    except:
                        print "Failed to convert ", lat, lon
                        sys.exit(-1)
                    tx, ty = merc.MetersToTile(x, y, 21)

                    # Create a unique quadkey for each point object
                    quadkey = merc.QuadTree(tx, ty, 21)

                    # Create categorical variable for the race category
                    group_type = f

                    # Export data to the database file
                    try:
                        c.execute(
                            "insert into people_by_group values (?,?,?,random(),?)",
                            (x, y, quadkey, group_type))
                    except:
                        print "Failed to insert ", x, y, tx, ty, group_type
                        sys.exit(-1)

        c.execute(
            "create index if not exists i_quadkey on people_by_group(x, y, quadkey, rand, group_type)"
        )
        conn.commit()
コード例 #9
0
def main(input_filename, output_filename):

    # Create a GlobalMercator object for later conversions

    merc = GlobalMercator()

    # Open the shapefile

    ds = ogr.Open(input_filename)

    if ds is None:
        print "Open failed.\n"
        sys.exit(1)

    # Obtain the first (and only) layer in the shapefile

    lyr = ds.GetLayerByIndex(0)

    lyr.ResetReading()

    # Obtain the field definitions in the shapefile layer

    feat_defn = lyr.GetLayerDefn()
    field_defns = [
        feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())
    ]

    # Obtain the index of the field for the count for whites, blacks, Asians,
    # Others, and Hispanics.

    for i, defn in enumerate(field_defns):

        if defn.GetName() == "POP10":
            pop_field = i

        if defn.GetName() == "nh_white_n":
            white_field = i

        if defn.GetName() == "nh_black_n":
            black_field = i

        if defn.GetName() == "nh_asian_n":
            asian_field = i

        if defn.GetName() == "hispanic_n":
            hispanic_field = i

        if defn.GetName() == "NH_Other_n":
            other_field = i

        if defn.GetName() == "STATEFP10":
            statefips_field = i

    # Set-up the output file

    conn = sqlite3.connect(output_filename)
    c = conn.cursor()
    c.execute(
        "create table if not exists people_by_race (statefips text, x text, y text, quadkey text, race_type text)"
    )

    # Obtain the number of features (Census Blocks) in the layer

    n_features = len(lyr)

    # Iterate through every feature (Census Block Ploygon) in the layer,
    # obtain the population counts, and create a point for each person within
    # that feature.

    for j, feat in enumerate(lyr):

        # Print a progress read-out for every 1000 features and export to hard disk

        if j % 1000 == 0:
            conn.commit()
            print "%s/%s (%0.2f%%)" % (j + 1, n_features, 100 *
                                       ((j + 1) / float(n_features)))

        # Obtain total population, racial counts, and state fips code of the individual census block

        pop = int(feat.GetField(pop_field))
        white = int(feat.GetField(white_field))
        black = int(feat.GetField(black_field))
        asian = int(feat.GetField(asian_field))
        hispanic = int(feat.GetField(hispanic_field))
        other = int(feat.GetField(other_field))
        statefips = feat.GetField(statefips_field)

        # Obtain the OGR polygon object from the feature

        geom = feat.GetGeometryRef()

        if geom is None:
            continue

        # Convert the OGR Polygon into a Shapely Polygon

        poly = loads(geom.ExportToWkb())

        if poly is None:
            continue

        # Obtain the "boundary box" of extreme points of the polygon

        bbox = poly.bounds

        if not bbox:
            continue

        leftmost, bottommost, rightmost, topmost = bbox

        # Generate a point object within the census block for every person by race

        for i in range(white):

            # Choose a random longitude and latitude within the boundary box
            # and within the orginial ploygon of the census block

            while True:

                samplepoint = Point(uniform(leftmost, rightmost),
                                    uniform(bottommost, topmost))

                if samplepoint is None:
                    break

                if poly.contains(samplepoint):
                    break

            # Convert the longitude and latitude coordinates to meters and
            # a tile reference

            x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x)
            tx, ty = merc.MetersToTile(x, y, 21)

            # Create a unique quadkey for each point object

            quadkey = merc.QuadTree(tx, ty, 21)

            # Create categorical variable for the race category

            race_type = 'w'

            # Export data to the database file

            c.execute("insert into people_by_race values (?,?,?,?,?)",
                      (statefips, x, y, quadkey, race_type))

        for i in range(black):

            # Choose a random longitude and latitude within the boundary box
            # points and within the orginial ploygon of the census block

            while True:

                samplepoint = Point(uniform(leftmost, rightmost),
                                    uniform(bottommost, topmost))

                if samplepoint is None:
                    break

                if poly.contains(samplepoint):
                    break

            # Convert the longitude and latitude coordinates to meters and
            # a tile reference

            x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x)
            tx, ty = merc.MetersToTile(x, y, 21)

            # Create a unique quadkey for each point object

            quadkey = merc.QuadTree(tx, ty, 21)

            # Create categorical variable for the race category

            race_type = 'b'

            # Export data to the database file

            c.execute("insert into people_by_race values (?,?,?,?,?)",
                      (statefips, x, y, quadkey, race_type))

        for i in range(asian):

            # Choose a random longitude and latitude within the boundary box
            # points and within the orginial ploygon of the census block

            while True:

                samplepoint = Point(uniform(leftmost, rightmost),
                                    uniform(bottommost, topmost))

                if samplepoint is None:
                    break

                if poly.contains(samplepoint):
                    break

            # Convert the longitude and latitude coordinates to meters and
            # a tile reference

            x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x)
            tx, ty = merc.MetersToTile(x, y, 21)

            # Create a unique quadkey for each point object

            quadkey = merc.QuadTree(tx, ty, 21)

            # Create categorical variable for the race category

            race_type = 'a'

            # Export data to the database file

            c.execute("insert into people_by_race values (?,?,?,?,?)",
                      (statefips, x, y, quadkey, race_type))

        for i in range(hispanic):

            # Choose a random longitude and latitude within the boundary box
            # points and within the orginial ploygon of the census block

            while True:

                samplepoint = Point(uniform(leftmost, rightmost),
                                    uniform(bottommost, topmost))

                if samplepoint is None:
                    break

                if poly.contains(samplepoint):
                    break

            # Convert the longitude and latitude coordinates to meters and
            # a tile reference

            x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x)
            tx, ty = merc.MetersToTile(x, y, 21)

            # Create a unique quadkey for each point object

            quadkey = merc.QuadTree(tx, ty, 21)

            # Create categorical variable for the race category

            race_type = 'h'

            # Export data to the database file

            c.execute("insert into people_by_race values (?,?,?,?,?)",
                      (statefips, x, y, quadkey, race_type))

        for i in range(other):

            # Choose a random longitude and latitude within the boundary box
            # points and within the orginial ploygon of the census block

            while True:

                samplepoint = Point(uniform(leftmost, rightmost),
                                    uniform(bottommost, topmost))

                if samplepoint is None:
                    break

                if poly.contains(samplepoint):
                    break

            # Convert the longitude and latitude coordinates to meters and
            # a tile reference

            x, y = merc.LatLonToMeters(samplepoint.y, samplepoint.x)
            tx, ty = merc.MetersToTile(x, y, 21)

            # Create a unique quadkey for each point object

            quadkey = merc.QuadTree(tx, ty, 21)

            # Create categorical variable for the race category

            race_type = 'o'

            # Export data to the database file

            c.execute("insert into people_by_race values (?,?,?,?,?)",
                      (statefips, x, y, quadkey, race_type))

    conn.commit()
コード例 #10
0
class OsmHandler(ContentHandler):
    """Base class for parsing OSM XML data"""
    def __init__(self, client):
        self.proj = GlobalMercator()
        self.nodeRecords = []
        self.wayRecords = []
        self.relationRecords = []
        self.record = {}
        self.nodeLocations = {}
        self.client = client

        self.stats = {'nodes': 0, 'ways': 0, 'relations': 0}
        self.lastStatString = ""
        self.statsCount = 0

    def writeStatsToScreen(self):
        for char in self.lastStatString:
            sys.stdout.write('\b')
        self.lastStatString = "%d nodes, %d ways, %d relations" % (
            self.stats['nodes'], self.stats['ways'], self.stats['relations'])
        sys.stdout.write(self.lastStatString)

    def fillDefault(self, attrs):
        """Fill in default record values"""
        self.record['_id'] = int(attrs['id'])
        self.record['ts'] = self.isoToTimestamp(attrs['timestamp'])
        self.record['tg'] = []
        if attrs.has_key('user'):
            self.record['u'] = attrs['user']
        if attrs.has_key('uid'):
            self.record['uid'] = int(attrs['uid'])
        if attrs.has_key('version'):
            self.record['v'] = int(attrs['version'])
        if attrs.has_key('changeset'):
            self.record['c'] = int(attrs['changeset'])

    def isoToTimestamp(self, isotime):
        """Parse a date and return a time tuple"""
        t = datetime.strptime(isotime, "%Y-%m-%dT%H:%M:%SZ")
        return time.mktime(t.timetuple())

    def quadKey(self, lat, lon, zoom):
        (mx, my) = self.proj.LatLonToMeters(lat, lon)
        (tx, ty) = self.proj.MetersToTile(mx, my, zoom)
        return self.proj.QuadTree(tx, ty, zoom)

    def startElement(self, name, attrs):
        """Parse the XML element at the start"""
        if name == 'node':
            self.fillDefault(attrs)
            self.record['loc'] = {
                'lat': float(attrs['lat']),
                'lon': float(attrs['lon'])
            }
            self.record['qk'] = self.quadKey(float(attrs['lat']),
                                             float(attrs['lon']), 17)
            self.nodeLocations[self.record['_id']] = self.record['qk']
        elif name == 'changeset':
            self.fillDefault(attrs)
        elif name == 'tag':
            k = attrs['k']
            v = attrs['v']
            # MongoDB doesn't let us have dots in the key names.
            #k = k.replace('.', ',,')
            self.record['tg'].append((k, v))
        elif name == 'way':
            self.fillDefault(attrs)
            self.record['n'] = []
            self.record['loc'] = []
        elif name == 'relation':
            self.fillDefault(attrs)
            self.record['m'] = []
        elif name == 'nd':
            ref = int(attrs['ref'])
            self.record['n'].append(ref)
            refLoc = self.nodeLocations[ref]
            if refLoc not in self.record['loc']:
                self.record['loc'].append(refLoc)
        elif name == 'member':
            ref = int(attrs['ref'])
            member = {'type': attrs['type'], 'ref': ref, 'role': attrs['role']}
            self.record['m'].append(member)

            if attrs['type'] == 'way':
                ways2relations = self.client.osm.ways.find_one({'_id': ref})
                if ways2relations:
                    if 'relations' not in ways2relations:
                        ways2relations['relations'] = []
                    ways2relations['relations'].append(self.record['_id'])
                    self.client.osm.ways.save(ways2relations)
            elif attrs['type'] == 'node':
                nodes2relations = self.client.osm.nodes.find_one({'_id': ref})
                if nodes2relations:
                    if 'relations' not in nodes2relations:
                        nodes2relations['relations'] = []
                    nodes2relations['relations'].append(self.record['_id'])
                    self.client.osm.nodes.save(nodes2relations)

    def endElement(self, name):
        """Finish parsing an element
        (only really used with nodes, ways and relations)"""
        if name == 'node':
            self.nodeRecords.append(self.record)
            if len(self.nodeRecords) > 1500:
                self.client.osm.nodes.insert(self.nodeRecords)
                self.nodeRecords = []
                self.writeStatsToScreen()
            self.record = {}
            self.stats['nodes'] = self.stats['nodes'] + 1
        elif name == 'way':
            # Clean up any existing nodes
            if len(self.nodeRecords) > 0:
                self.client.osm.nodes.insert(self.nodeRecords)
                self.nodeRecords = []

            self.wayRecords.append(self.record)
            if len(self.wayRecords) > 100:
                self.client.osm.ways.insert(self.wayRecords)
                self.wayRecords = []
                self.writeStatsToScreen()
            self.record = {}
            self.stats['ways'] = self.stats['ways'] + 1
        elif name == 'relation':
            self.client.osm.relations.save(self.record)
            self.record = {}
            self.statsCount = self.statsCount + 1
            if self.statsCount > 10:
                self.writeStatsToScreen()
                self.statsCount = 0
            self.stats['relations'] = self.stats['relations'] + 1
コード例 #11
0
ファイル: dotfile_wac.py プロジェクト: sloreti/jobmaps
def main(input_filename, wac_filename, output_filename):
    
    wac = pd.io.parsers.read_csv(wac_filename)
    wac.set_index(wac['w_geocode'],inplace = True)
    
    #Create columns for four megasectors
    
    wac['makers'] = wac['CNS01']+wac['CNS02']+wac['CNS03']+wac['CNS04']+wac['CNS05']+wac['CNS06']+wac['CNS08']
    wac['services'] = wac['CNS07']+wac['CNS14'] + wac['CNS17'] + wac['CNS18']
    wac['professions'] = wac['CNS09'] + wac['CNS10'] + wac['CNS11'] + wac['CNS12'] + wac['CNS13']
    wac['support'] = wac['CNS15'] + wac['CNS16'] + wac['CNS19'] + wac['CNS20']

    assert sum(wac['C000'] -(wac['makers']+wac['services']+wac['professions']+wac['support'])) == 0 or rw[1]['abbrev'] == 'ny'

    #In NY there's one block in Brooklyn with 177000 jobs. It appears to be rounding entries > 100k, which is making the assertion fail.
    #This is the Brooklyn Post Office + Brooklyn Law School + Borough Hall. So maybe weirdness around post office? 

    #Set up outfile as csv
    outf = open(output_filename,'w')
    outf.write('x,y,sect,inctype,quadkey\n')
    
    # Create a GlobalMercator object for later conversions
    
    merc = GlobalMercator()

    # Open the shapefile
    
    ds = ogr.Open(input_filename)
    
    if ds is None:
        print "Open failed.\n"
        sys.exit( 1 )

    # Obtain the first (and only) layer in the shapefile
    
    lyr = ds.GetLayerByIndex(0)

    lyr.ResetReading()

    # Obtain the field definitions in the shapefile layer

    feat_defn = lyr.GetLayerDefn()
    field_defns = [feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())]

    # Obtain the index of the field for the count for whites, blacks, Asians, 
    # Others, and Hispanics.
    
    for i, defn in enumerate(field_defns):
        print defn.GetName()
        #GEOID is what we want to merge on
        if defn.GetName() == "GEOID10":
            fips = i

    # Set-up the output file
    
    #conn = sqlite3.connect( output_filename )
    #c = conn.cursor()
    #c.execute( "create table if not exists people_by_race (statefips text, x text, y text, quadkey text, race_type text)" )

    # Obtain the number of features (Census Blocks) in the layer
    
    n_features = len(lyr)

    # Iterate through every feature (Census Block Ploygon) in the layer,
    # obtain the population counts, and create a point for each person within
    # that feature.

    for j, feat in enumerate( lyr ):
        # Print a progress read-out for every 1000 features and export to hard disk
        
        if j % 1000 == 0:
            #conn.commit()
            print "%s/%s (%0.2f%%)"%(j+1,n_features,100*((j+1)/float(n_features)))
            
        # Obtain total population, racial counts, and state fips code of the individual census block
        blkfips = int(feat.GetField(fips))
        
        try:
            jobs = {'m':wac.loc[blkfips,'makers'],'s':wac.loc[blkfips,'services'],'p':wac.loc[blkfips,'professions'],'t':wac.loc[blkfips,'support']}
        except KeyError:
            #print "no"
#            missing.append(blkfips) #Missing just means no jobs there. Lots of blocks have this.
            continue            
        income = {'l':wac.loc[blkfips,'CE01'],'m':wac.loc[blkfips,'CE02'],'h':wac.loc[blkfips,'CE03']}
        # Obtain the OGR polygon object from the feature

        geom = feat.GetGeometryRef()
        
        if geom is None:
            continue
        
        # Convert the OGR Polygon into a Shapely Polygon
        
        poly = loads(geom.ExportToWkb())
        
        if poly is None:
            continue        
            
        # Obtain the "boundary box" of extreme points of the polygon

        bbox = poly.bounds
        
        if not bbox:
            continue
     
        leftmost,bottommost,rightmost,topmost = bbox
    
        # Generate a point object within the census block for every person by race
        inccnt = 0
        incord = ['l','m','h']
        shuffle(incord)
        
        for sect in ['m','s','p','t']:
            for i in range(int(jobs[sect])):

                # Choose a random longitude and latitude within the boundary box
                # and within the orginial ploygon of the census block
                    
                while True:
                        
                    samplepoint = Point(uniform(leftmost, rightmost),uniform(bottommost, topmost))
                        
                    if samplepoint is None:
                        break
                    
                    if poly.contains(samplepoint):
                        break
        
                x, y = merc.LatLonToMeters(samplepoint.y,samplepoint.x)
                tx,ty = merc.MetersToTile(x, y, 21)
                    
                    
                #Determine the right income
                inccnt += 1
                inctype = ''
                assert inccnt <= income[incord[0]] + income[incord[1]] + income[incord[2]] or rw[1]['abbrev'] == 'ny'
                if inccnt <= income[incord[0]]:
                    inctype = incord[0]
                elif inccnt <= income[incord[0]] + income[incord[1]]:
                    inctype = incord[1]
                elif inccnt <= income[incord[0]] + income[incord[1]] + income[incord[2]]:
                    inctype = incord[2]
                        
                # Create a unique quadkey for each point object
                    
                quadkey = merc.QuadTree(tx, ty, 21)       
                 
                outf.write("%s,%s,%s,%s,%s\n" %(x,y,sect,inctype,quadkey))
                # Convert the longitude and latitude coordinates to meters and
                # a tile reference

    outf.close() 
コード例 #12
0
class Downloader(object):
    '''
    Based on http://www.wellho.net/solutions/python-python-threads-a-first-example.html
    '''
    def __init__(self, mapdir, minzoom, maxzoom):
        self.mercator = GlobalMercator(256)
        self.minzoom = minzoom
        self.maxzoom = maxzoom
        self.TopRightLat = None
        self.TopRightLon = None
        self.BottomLeftLat = None
        self.BottomLeftLon = None
        self.mminx = None
        self.mminy = None
        self.mmaxx = None
        self.mmaxy = None
        self.mapdir = mapdir
        self.jobs = Queue.Queue()

    def download(self, toprightlat, toprightlon, bottomleftlat, bottomleftlon):
        self.TopRightLat = toprightlat
        self.TopRightLon = toprightlon
        self.BottomLeftLat = bottomleftlat
        self.BottomLeftLon = bottomleftlon
        self.mminx, self.mminy = self.mercator.LatLonToMeters(
            toprightlat, toprightlon)
        self.mmaxx, self.mmaxy = self.mercator.LatLonToMeters(
            bottomleftlat, bottomleftlon)

        map(self.addJobForZoom, range(self.minzoom, self.maxzoom + 1))

        self.runJobs()

    def addJobForZoom(self, zoom):
        tminx, tminy = self.mercator.MetersToTile(self.mminx, self.mminy, zoom)
        tmaxx, tmaxy = self.mercator.MetersToTile(self.mmaxx, self.mmaxy, zoom)

        if tminx > tmaxx:
            tminx, tmaxx = tmaxx, tminx
        if tminy > tmaxy:
            tminy, tmaxy = tmaxy, tminy

        for tx in range(tminx, tmaxx + 1):
            for ty in range(tminy, tmaxy + 1):
                gx, gy = self.mercator.GoogleTile(tx, ty, zoom)
                self.jobs.put({'x': gx, 'y': gy, 'z': zoom})

    def runJobs(self):
        workers = []
        for threadNum in range(0, MAX_THREADS):
            subdownloader = self.SubDownloader(self)
            workers.append(subdownloader)
            workers[-1].start()

        for worker in workers:
            worker.join(20)

        print "Finished!"

    class SubDownloader(Thread):
        def __init__(self, parent):
            Thread.__init__(self)
            self.parent = parent

        def run(self):
            while 1:
                try:
                    job = self.parent.jobs.get(0)
                except Queue.Empty:
                    return
                mt = random.randrange(0, 4)
                filename = '%i/gm_%i_%i_%i.png' % (job['z'], job['x'],
                                                   job['y'], job['z'])
                if os.path.isfile('%s%s' % (self.parent.mapdir, filename)):
                    #                    print "skippnig", filename, "left:", self.parent.jobs.qsize()
                    continue
                if not os.path.isdir('%s%s' % (self.parent.mapdir, job['z'])):
                    os.mkdir('%s%s' % (self.parent.mapdir, job['z']))
#                http://mt1.google.com/vt/lyrs=m@115&hl=en&x=39141&s=&y=26445&z=16&s=Gali
                url = 'http://mt%i.google.com/vt/lyrs=m@115&hl=en&x=%i&y=%i&z=%i&s=' % (
                    mt, job['x'], job['y'], job['z'])
                try:
                    tile = urllib2.urlopen(url=url, timeout=20).read()
                except:
                    #                    print "Can't open", url, "left:", self.parent.jobs.qsize()
                    continue
                fh = open(filename, 'wb')
                fh.write(tile)
                fh.close()
コード例 #13
0
ファイル: test.py プロジェクト: gboysking/globalmaptiles
print(result)

result = gm.MetersToLatLon(meters['mx'], meters['my'])
print(result)

result = gm.MetersToPixels(meters['mx'], meters['my'], zoom)
print(result)

result = gm.PixelsToTile(pixels['px'], pixels['py'])
print(result)

result = gm.PixelsToMeters(pixels['px'], pixels['py'], zoom)
print(result)

result = gm.TileBounds(tile['tx'], tile['ty'], zoom)
print(result)

result = gm.LatLonToTile(geographic['lat'], geographic['lon'], zoom)
print(result)

result = gm.MetersToTile(meters['mx'], meters['my'], zoom)
print(result)

result = gm.GoogleTile(tile['tx'], tile['ty'], zoom)
print(result)

result = gm.QuadTree(tile['tx'], tile['ty'], zoom)
print(result)

tx, ty, zoom = gm.QuadKeyToTile(quadKey)
print(tx, ty, zoom)
コード例 #14
0
        if os.path.exists(output_jpeg_file):
            os.unlink(output_jpeg_file)

    if not os.path.exists(google_image_folder):
        os.makedirs(google_image_folder)

    mercator = GlobalMercator()
    cx, cy = mercator.LatLonToMeters(lat, lon)
    minx = cx - radius
    maxx = cx + radius
    miny = cy - radius
    maxy = cy + radius
    debug_print('minx = %f, miny = %f, maxx = %f, maxy = %f\n' %
                (minx, miny, maxx, maxy))

    tminx, tminy = mercator.MetersToTile(minx, miny, tz)
    tmaxx, tmaxy = mercator.MetersToTile(maxx, maxy, tz)

    total_tiles = (tmaxx - tminx + 1) * (tmaxy - tminy + 1)
    debug_print('count = %d' % total_tiles)

    # progress bar
    widgets = [Bar('>'), ' ', Percentage(), ' ', Timer(), ' ', ETA()]
    pbar = ProgressBar(widgets=widgets, maxval=total_tiles).start()

    tile_list = []
    for ty in range(tminy, tmaxy + 1):
        for tx in range(tminx, tmaxx + 1):
            tile_list.append([tx, ty])

    print("Downloading images ...")
コード例 #15
0
        Usage("ERROR: Both 'latmax' and 'lonmax' must be given.")

    if latmax != None and lonmax != None:
        if latmax < lat:
            Usage("ERROR: 'latmax' must be bigger then 'lat'")
        if lonmax < lon:
            Usage("ERROR: 'lonmax' must be bigger then 'lon'")
        boundingbox = (lon, lat, lonmax, latmax)

    tz = zoomlevel
    mercator = GlobalMercator()

    mx, my = mercator.LatLonToMeters(lat, lon)
    print "Spherical Mercator (ESPG:900913) coordinates for lat/lon: "
    print(mx, my)
    tminx, tminy = mercator.MetersToTile(mx, my, tz)

    if boundingbox:
        mx, my = mercator.LatLonToMeters(latmax, lonmax)
        print "Spherical Mercator (ESPG:900913) cooridnate for maxlat/maxlon: "
        print(mx, my)
        tmaxx, tmaxy = mercator.MetersToTile(mx, my, tz)
    else:
        tmaxx, tmaxy = tminx, tminy

    for ty in range(tminy, tmaxy + 1):
        for tx in range(tminx, tmaxx + 1):
            tilefilename = "%s/%s/%s" % (tz, tx, ty)
            print tilefilename, "( TileMapService: z / x / y )"

            gx, gy = mercator.GoogleTile(tx, ty, tz)
コード例 #16
0
ファイル: makedotsshp.py プロジェクト: npr99/dotmap
def main(input_filename, output_filename):
        print "Processing: %s - Ctrl-Z to cancel"%input_filename
        merc = GlobalMercator()

        # open the shapefile
        ds = ogr.Open( input_filename )
        if ds is None:
                print "Open failed.\n"
                sys.exit( 1 )

        lyr = ds.GetLayerByIndex( 0 )

        lyr.ResetReading()

        feat_defn = lyr.GetLayerDefn()
        field_defns = [feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount())]

        # look up the index of the field we're interested in
        for i, defn in enumerate( field_defns ):
                if defn.GetName()=="POP10":
                        pop_field = i

        # set up the output file
        # if it already exists, ask for confirmation to delete and remake it
        if os.path.isfile(output_filename):
                if not confirm("  Database %s exists, overwrite?"%output_filename, False):
                        return False
                else:
                        os.system("rm %s"%output_filename)
        
        # if file removal failed, the file may be locked:
        # ask for confirmation to unlock it
        if os.path.isfile(output_filename):
                if not confirm("  Attempt to unlock database %s?"%output_filename, False):
                        return False
                else:
                        unlock(output_filename)
                # if it's still there, there's a problem, bail
                if os.path.isfile(output_filename):
                        print "Trouble - exiting."
                        sys.exit()
                else:
                        print "Success - continuing:"

        conn = sqlite3.connect( output_filename )
        c = conn.cursor()
        c.execute( "create table if not exists people (x real, y real, quadkey text)" )
        
        n_features = len(lyr)

        for j, feat in enumerate( lyr ):
                if j%1000==0:
                        conn.commit()
                        if j%10000==0:
                                print " %s/%s (%0.2f%%)"%(j+1,n_features,100*((j+1)/float(n_features)))
                        else:
                                sys.stdout.write(".")
                                sys.stdout.flush()

                pop = feat.GetField(pop_field)

                geom = feat.GetGeometryRef()
                if geom is None:
                        continue

                bbox = get_bbox( geom )
                if not bbox:
                        continue
                ll,bb,rr,tt = bbox

                # generate a sample within the geometry for every person
                for i in range(pop):
                        while True:
                                samplepoint = make_ogr_point( uniform(ll,rr), uniform(bb,tt) )
                                if geom.Intersects( samplepoint ):
                                        break

                        x, y = merc.LatLonToMeters( samplepoint.GetY(), samplepoint.GetX() )
                        tx,ty = merc.MetersToTile( x, y, 21)
                        quadkey = merc.QuadTree( tx, ty, 21 )

                        c.execute( "insert into people values (?,?,?)", (x, y, quadkey) )
        
        conn.commit()
        print "Finished processing %s"%output_filename
コード例 #17
0
ファイル: main.py プロジェクト: alastaircarson/test-reproject
def main():
    # Code to open an image, and apply a transform
    # open the image
    # image = Image.open("Ally.jpg")
    # w = image.width
    # h = image.height
    # print((w, h))
    # Create a transformation to apply to the image
    # shift = Shift(-w/2, -h/2)
    # rotate = Rotation(math.pi/2)
    # scale = Scale(2)
    # shift2 = Shift(h/2, w/2)
    # combined = PositionTransform()
    # combined = combined.combine(shift)
    # combined = combined.combine(rotate)
    # combined = combined.combine(scale)
    # combined = combined.combine(shift2)
    # inverse the transformation (to apply it)
    # t = combined.inverse()

    # Image.transform(size, method, data=None, resample=0, fill=1, fillcolor=None)
    # img2 = image.transform((h, w), Image.AFFINE, _get_image_transform(t))
    # img2.save("Test.jpg")

    # Code to create a mosaic 4x4 tile world map at level 2
    # tm = TileMosaic(OSMTileRequester(), 2, 0, 3, 0, 3)
    # tm.save("world2.png")

    # Sample coordinates to avoid caring about the transformation just now
    bng_coords = [(300000, 600000), (300000, 601000), (301000, 601000),
                  (301000, 600000)]
    gwm_coords = [(-398075.709110655, 7417169.44503078),
                  (-398115.346383602, 7418925.37709793),
                  (-396363.034574031, 7418964.91393662),
                  (-396323.792660911, 7417208.95976453)]

    bng_x = [bng[0] for bng in bng_coords]
    bng_y = [bng[1] for bng in bng_coords]
    bng_box = (min(bng_x), min(bng_y), max(bng_x), max(bng_y))

    gwm_x = [gwm[0] for gwm in gwm_coords]
    gwm_y = [gwm[1] for gwm in gwm_coords]
    gwm_box = (min(gwm_x), min(gwm_y), max(gwm_x), max(gwm_y))

    # If the coords above relate to a 400x400 map, calculate the resolution
    bng_map_size = 600
    bng_res = (bng_box[2] - bng_box[0]) / bng_map_size
    print(bng_res)

    # Use the GlobalMercator class to calculate the optimal zoom level to use
    gwm = GlobalMercator()
    gwm_zoom = gwm.ZoomForPixelSize(bng_res)
    print(gwm_zoom)

    # Calculate the min/max tile x and y for the given area at the calculates zoom level
    tiles_x = []
    tiles_y = []
    for coord in gwm_coords:
        tx, ty = gwm.MetersToTile(coord[0], coord[1], gwm_zoom)
        tiles_x.append(tx)
        tiles_y.append(ty)
        print(f"{gwm_zoom} {tx} {ty}")
        # print(OSMTileRequester().request_tile(gwm_zoom, tx, ty))

    # Create a mosaic image from these tiles
    start_x = min(tiles_x)
    end_x = max(tiles_x)
    start_y = min(tiles_y)
    end_y = max(tiles_y)
    gwm_mosaic = TileMosaic(OSMTileRequester(), gwm_zoom, start_x, end_x,
                            start_y, end_y)
    gwm_mosaic.save("mosaic.png")

    # Get the bbox for these tiles
    gwm_mosaic_box_tl = gwm.TileBounds(start_x, start_y, gwm_zoom)
    gwm_mosaic_box_tr = gwm.TileBounds(end_x, start_y, gwm_zoom)
    gwm_mosaic_box_bl = gwm.TileBounds(start_x, end_y, gwm_zoom)
    gwm_mosaic_box_br = gwm.TileBounds(end_x, end_y, gwm_zoom)
    gwm_mosaic_box = (min(gwm_mosaic_box_tl[0], gwm_mosaic_box_bl[0]),
                      min(gwm_mosaic_box_bl[3], gwm_mosaic_box_br[3]),
                      max(gwm_mosaic_box_tr[2], gwm_mosaic_box_br[2]),
                      max(gwm_mosaic_box_tl[1], gwm_mosaic_box_tr[1]))

    print(gwm_mosaic_box)

    test = [(0, 0), (400, 0), (400, 400), (0, 400)]

    # Create a transformation to convert pixels of the target BNG image to the GWM mosaic image
    bng_img_to_gwm_image = PositionTransform()
    # Translate/scale image px to BNG
    bng_img_to_gwm_image = bng_img_to_gwm_image.combine(HorizontalFlip())
    bng_img_to_gwm_image = bng_img_to_gwm_image.combine(Scale(bng_res))
    bng_img_to_gwm_image = bng_img_to_gwm_image.combine(
        Shift(bng_box[0], bng_box[3]))
    test_coords(test, bng_img_to_gwm_image)

    # Transform BNG to GWM coords
    bng_gwm_transform = SamplePointTransform(bng_coords[0], bng_coords[1],
                                             bng_coords[2], gwm_coords[0],
                                             gwm_coords[1], gwm_coords[2])
    bng_img_to_gwm_image = bng_img_to_gwm_image.combine(bng_gwm_transform)
    test_coords(test, bng_img_to_gwm_image)

    # Translate/scale GWM coords to GWM mosaic image coords
    bng_img_to_gwm_image = bng_img_to_gwm_image.combine(
        Shift(-gwm_mosaic_box[0], -gwm_mosaic_box[3]))
    bng_img_to_gwm_image = bng_img_to_gwm_image.combine(
        Scale(1 / gwm.Resolution(gwm_zoom)))
    bng_img_to_gwm_image = bng_img_to_gwm_image.combine(HorizontalFlip())

    test_coords(test, bng_img_to_gwm_image)

    bng_result = gwm_mosaic.image.transform(
        (bng_map_size, bng_map_size), Image.AFFINE,
        _get_image_transform(bng_img_to_gwm_image))

    bng_result.save("BNG.jpg")