def timings(): print 'loading' data = pg.VectorData( r"C:\Users\kimok\Desktop\gazetteer data\raw\global_urban_extent_polygons_v1.01.shp", encoding='latin') #data = list(pg.VectorData(r"C:\Users\kimok\Desktop\gazetteer data\raw\ne_10m_admin_0_countries.shp", encoding='latin')) * 3 print len(data) print 'making shapely (no copy)' t = time() shapelys = [asShape(f.geometry) for f in data] print time() - t print 'making shapely (copy)' t = time() shapelys = [shape(f.geometry) for f in data] print time() - t print 'dump geoj (interface)' t = time() geojs = [s.__geo_interface__ for s in shapelys] print time() - t ##print 'dump geoj (mapping)' ##t = time() ##geojs = [mapping(s) for s in shapelys] ##print time()-t print 'load geoj asShape (no copy)' t = time() shapelys = [asShape(geoj) for geoj in geojs] print time() - t print 'load geoj shape (copy)' t = time() shapelys = [shape(geoj) for geoj in geojs] print time() - t print 'dump wkt' t = time() wkts = [s.wkt for s in shapelys] print time() - t print 'load wkt' t = time() shapelys = [wkt_loads(wkt) for wkt in wkts] print time() - t print 'dump wkb' t = time() wkbs = [s.wkb for s in shapelys] print time() - t print 'load wkb' t = time() shapelys = [wkb_loads(wkb) for wkb in wkbs] print time() - t
def viewresult(res): vz = pg.VectorData() for r in res: props = [] #r[:-1] if len(r) > 1 else [] if r[-1]: geoj = json.loads( r[-1] ) # assumes last value of row is returned as geojson string vz.add_feature(props, geoj) vz.view()
def get_data(path, iso): d = pg.VectorData(fields=['adm_level']) reader = shapefile.Reader(path) max_level = 0 for rec in reader.iterRecords(fields=['iso_countr', 'adm_level']): if rec['iso_countr'] == iso: shape = reader.shape(rec.oid) #rec = reader.record() d.add_feature([rec['adm_level']], shape.__geo_interface__) max_level = max(max_level, rec['adm_level']) print('max level', max_level) return d
def load(self): # TODO: should prob be threaded filepath = self.filepath.get() data = None for typ in pg.vector.loader.file_extensions: if filepath.endswith(typ): data = pg.VectorData(filepath, encoding=self.encoding.get()) break for typ in pg.raster.loader.file_extensions: if filepath.endswith(typ): data = pg.RasterData(filepath) break self.onsuccess(data)
def accept(self): # check geoj geoj = self.draw_geoj geoj['coordinates'].pop( -1) # drop the latest one, either empty or unfinished if geoj['coordinates']: # make vectordata d = pg.VectorData() d.add_feature([], geoj) # add to renderer self.mapview.renderer.add_layer(d) for cntr in self.mapview.controls: if isinstance(cntr, StaticLayersControl): cntr.update_layers() # exit draw mode self.cancel()
def view(results, text=False): import pythongis as pg # setup map m = pg.renderer.Map() m.add_layer( r"C:\Users\kimok\Desktop\gazetteer data\raw\ne_10m_admin_0_countries.shp", fillcolor='gray') # options kwargs = {} if text: kwargs.update(text=lambda f: f[text][:20], textoptions=dict(textsize=3)) # add d = pg.VectorData(fields=[]) for row in results: d.add_feature([], row[-1].__geo_interface__) m.add_layer(d, fillcolor='blue', **kwargs) # view m.view()
def sqlite_geoms(): print 'load shapefile' t = time() #data = pg.VectorData(r"C:\Users\kimok\Desktop\gazetteer data\raw\global_urban_extent_polygons_v1.01.shp", encoding='latin') #data = pg.VectorData(r"C:\Users\kimok\Desktop\gazetteer data\raw\atlas_urban.geojson", encoding='latin') data = pg.VectorData( r"C:\Users\kimok\Desktop\gazetteer data\raw\global_settlement_points_v1.01.shp", encoding='latin') #data = pg.VectorData(r"C:\Users\kimok\Desktop\gazetteer data\raw\ne_10m_admin_0_countries.shp", encoding='latin') print time() - t print 'making shapely' t = time() shapelys = [ shape(f.geometry) for f in data ] # CRUCIAL SPEEDUP, SHAPELY SHOULD BE FROM SHAPE, NOT ASSHAPE WHICH IS INDIRECT REFERENCING print time() - t print 'dump wkb' t = time() wkbs = [s.wkb for s in shapelys] print time() - t print 'convert to binary' from sqlite3 import Binary t = time() blobs = [Binary(wkb) for wkb in wkbs] print time() - t print 'insert wkb into db' fields = ['ID', 'geom'] typs = ['int', 'BLOB'] w = Writer('testgeodb::data', fields=zip(fields, typs), replace=True) t = time() for i, blb in enumerate(blobs): w.add([i, blb]) print time() - t print 'load wkb from db' t = time() shapelys = [wkb_loads(bytes(blb)) for ID, blb in w.select('*')] print time() - t
def view_footprints(self, mapIDs=None): import pythongis as pg render = pg.renderer.Map() render.add_layer(r"C:\Users\kimok\Downloads\cshapes\cshapes.shp", fillcolor=(222, 222, 222)) d = pg.VectorData(fields=['mapID', 'link']) sql = 'select oid,link,xmin,ymin,xmax,ymax from maps' if mapIDs is not None: sql += ' where oid in ({})'.format(','.join(map(str, mapIDs))) for oid, link, x1, y1, x2, y2 in self.query(sql): if x1 is None: continue row = [oid, link] geoj = { 'type': 'Polygon', 'coordinates': [[(x1, y1), (x2, y1), (x2, y2), (x1, y2)]] } d.add_feature(row, geoj) if len(d): render.add_layer(d, fillcolor=(0, 200, 0, 100), outlinewidth=0.2) render.zoom_bbox(*d.bbox) render.view()
self._index.cur.execute('INSERT INTO links VALUES (?, ?)', (self.nodeid, itemid)) ###################################### # TESTING if __name__ == '__main__': import pythongis as pg DEBUG = False PROFILE = True print 'loading' d = pg.VectorData( r"C:\Users\kimok\Downloads\ne_10m_admin_1_states_provinces (1)\ne_10m_admin_1_states_provinces.shp" ) #d = pg.VectorData(r"C:\Users\kimok\Desktop\BIGDATA\gazetteer data\raw\global_settlement_points_v1.01.shp", encoding='latin') items = [(f.id, f.bbox) for f in d] # items = [(i+1, f.bbox) for i,f in enumerate(d)] print len(items) print 'building' spindex = QuadTree(-180, -90, 180, 90) if PROFILE: import cProfile prof = cProfile.Profile() prof.enable() spindex.build(items) if PROFILE: print prof.print_stats('cumtime')
import pythongis as pg from time import time # test distance vect = pg.VectorData(r"C:\Users\kimo\Downloads\cshapes_0.6\cshapes.shp", ) #select=lambda f: f["GWCODE"]==666) hist = vect.histogram("GWCODE") #hist.view() t = time() distrast = pg.raster.analyzer.distance(vect, bbox=[-180, 90, 180, -90], width=72 * 10, height=36 * 10) #distrast = pg.RasterData("C:/Users/kimo/Desktop/world.jpg", bbox=[-180,90,180,-90], width=512, height=256) print time() - t hist = distrast.bands[0].histogram() print hist #hist.view() #mapp = distrast.render() mapp = pg.renderer.Map() mapp.add_layer(distrast) mapp.add_layer(vect, fillcolor=None) #mapp.add_legend() mapp.view()
import unittest import pythongis as pg # data pointdata = pg.VectorData('data/ne_10m_populated_places_simple.shp', encoding='latin') linedata = pg.VectorData('data/ne_10m_railroads.shp', encoding='latin') polygondata = pg.VectorData('data/ne_10m_admin_0_countries.shp', encoding='latin') rasterdata = pg.RasterData('data/land_shallow_topo_2048.png') rasterdata.set_geotransform(width=2048, height=1024, affine=[0.175781250, 0, -180, 0, -0.175781250, 90]) # base class class BaseTestCases: class DrawShapes(unittest.TestCase): width = 600 height = 300 kwargs = {'fillcolor': 'yellow', 'outlinecolor': 'black'} output_prefix = 'render_projections' crs = None def create_map(self): self.map = pg.renderer.Map(self.width, self.height, background='gray',
import pythongis as pg poly = pg.VectorData("data/ne_10m_admin_0_countries.shp") def diagonal_left(width, height): import pyagg c = pyagg.Canvas(width, height) for frac in range(0, 100 + 1, 10): frac = frac / 100.0 x = width * frac y = height * frac c.draw_line([(x, 0), (0, y)], fillcolor='black', outlinecolor=None) c.draw_line([(x, height), (width, y)], fillcolor='black', outlinecolor=None) return c diagonal_left(1000, 1000).view() dsfsd mapp = pg.renderer.Map(1000, 500, background=None) #,background=(255,0,0)) lyr = mapp.add_layer(poly, fillcolor=filleffect) mapp.view()
def get_table(filepath, page): import os #os.chdir(os.path.abspath('')) #print os.getcwd() os.system( '''cd "{wdir}" & "xpdf-tools-win-4.00\\bin64\pdftopng.exe" -f {page} -l {page} -r 300 -gray "{filepath}" "output\page{pagestr}"''' .format(filepath=filepath, page=page, pagestr=str(page).zfill(6), wdir=os.path.abspath(''))) im = PIL.Image.open('output/page{pagestr}-{pagestr}.png'.format( pagestr=str(page).zfill(6))).convert('1').point(lambda v: v == 0) # grow to connect tiny gaps global oim oim = im #im = PIL.ImageMorph.MorphOp(op_name='dilation4').apply(im)[1] #im = PIL.ImageMorph.MorphOp(op_name='dilation4').apply(im)[1] ## im = PIL.ImageMorph.MorphOp(patterns=['1(... ... ...)->0', ## '4(000 111 000)->1', ## '4(111 111 000)->1', ## '4(000 110 010)->1', ## '4(000 110 110)->1',]).apply(im)[1] # collect lines hlines, vlines = find_lines(im, 50, mingap=2, maxthick=None) # extend lines ## for i,hl in enumerate(hlines): ## (x1,y1),(x2,y2) = hl ## extend = 10 #int((x2-x1)*0.05) # 5 percent ## hlines[i] = (x1-extend,y1),(x2+extend,y2) ## for i,vl in enumerate(vlines): ## (x1,y1),(x2,y2) = vl ## extend = 10 #int((y2-y1)*0.05) # 5 percent ## vlines[i] = (x1,y1-extend),(x2,y2+extend) # draw lines lineimg = PIL.Image.new('1', im.size, 0) draw = PIL.ImageDraw.Draw(lineimg) for s, e in hlines: draw.line(s + e, fill=255) for s, e in vlines: draw.line(s + e, fill=255) if DEBUG: lineimg.show() lineimg.save('output/page%s-lines.png' % str(page).zfill(6)) ## # ALT1: thin lines to single pixel ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## #lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## #lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## #lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## #lineimg.show() ## for _ in range(30): ## print _ ## lineimg = PIL.ImageMorph.MorphOp(patterns=['4(000 .1. 111)->0','4(.00 110 .1.)->0', ## '1(111 111 000)->1','1(110 110 110)->1', ## ]).apply(lineimg)[1] ## lineimg.show() # ALT2: collect new lines, only 1 per maxthick pixels # TODO: compare all neighbouring lines and get longest one ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## lineimg = PIL.ImageMorph.MorphOp(op_name='dilation8').apply(lineimg)[1] ## lineimg.show() hlines, vlines = find_lines(lineimg, 150, mingap=50, maxthick=None, move=True) #30, offset=0, move=1) # group lines ghlines, gvlines = group_lines(hlines, vlines) # draw grouped lines lineimg = PIL.Image.new('1', im.size, 0) draw = PIL.ImageDraw.Draw(lineimg) for s, e in ghlines: draw.line(s + e, fill=255) for s, e in gvlines: draw.line(s + e, fill=255) if DEBUG: lineimg.show() # find corners # ALT1: from image corners ## cornimg = PIL.ImageMorph.MorphOp(patterns=['1(... ... ...)->0', ## '4(010 110 000)->1', #right corner ## '4(000 110 000)->1',#right tip ## '4(010 111 000)->1',#3way junction ## '1(010 111 010)->1',#4way junction ## ]).apply(lineimg)[1] ## ## PIL.ImageMorph.MorphOp(op_name="dilation8").apply(cornimg)[1].show() ## ## # thin each corner to one pixel ## corners = [] ## pixels = cornimg.load() ## for y in range(cornimg.height): ## for x in range(cornimg.width): ## val = pixels[x,y] ## if val: ## similar = any((abs(x-cx)<10 and abs(y-cy)<10 ## for cx,cy in corners)) ## # dont add if a similar one has already been added, < 10 pixels ## if similar: ## continue ## corners.append((x,y)) ## ## newcornimg = PIL.Image.new('1', cornimg.size, 0) ## pixels = newcornimg.load() ## for x,y in corners: ## pixels[x,y] = 255 ## ## PIL.ImageMorph.MorphOp(op_name="dilation8").apply(newcornimg)[1].show() # ALT2: instead, if we already have all straight connected lines, then just get the list of all their geom intersections isecs = [] for hline in ghlines: for vline in gvlines: if hline[0][0] - 10 <= vline[0][0] <= hline[1][0] + 10 and vline[ 0][1] - 10 <= hline[0][1] <= vline[1][1] + 10: x, y = vline[0][0], hline[0][1] isecs.append((x, y)) corners = isecs # add corners of really long lines hlines = [l for l in ghlines if l[1][0] - l[0][0] > 1000] for l in hlines: corners.append(l[0]) corners.append(l[1]) # draw isecs cornimg = PIL.Image.new('1', im.size, 0) pixels = cornimg.load() for x, y in corners: pixels[x, y] = 255 if DEBUG: PIL.ImageMorph.MorphOp(op_name="dilation8").apply(cornimg)[1].show() # add lines that contains all isecs xmin, ymin = min((c[0] for c in corners)), min((c[1] for c in corners)) xmax, ymax = max((c[0] for c in corners)), max((c[1] for c in corners)) ghlines.append([(xmin, ymin), (xmax, ymin)]) ghlines.append([(xmin, ymax), (xmax, ymax)]) gvlines.append([(xmin, ymin), (xmin, ymax)]) gvlines.append([(xmax, ymin), (xmax, ymax)]) # find boxes boxes = find_boxes(corners, ghlines, gvlines) # test ## for box in boxes: ## text = get_text(oim, box) ## print box, text # find all lines that are really long hlines = [l for l in ghlines if l[1][0] - l[0][0] > 1000] print hlines # draw lines tablineimg = PIL.Image.new('1', im.size, 0) draw = PIL.ImageDraw.Draw(tablineimg) for s, e in hlines: draw.line(s + e, fill=255) if DEBUG: tablineimg.show() # two top ones defines the fields region, bottom ones the column/data region tabfieldtop = hlines[0][0][1] tabfieldbottom = hlines[1][0][1] tabdattop = hlines[1][0][1] tabdatbottom = hlines[2][0][1] # detect top text toprows = detect_data(oim, bbox=[0, 0, oim.size[0], tabfieldtop]) # read table title topmargin = 90 + 60 chapter = get_text(toprows, [(0, 0), (oim.size[0], topmargin)])[0] titlerows = filter_data(toprows, [0, topmargin, oim.size[0], tabfieldtop]) title = ' '.join((r['text'] for r in titlerows if r['text'].strip())) print chapter print title # detect all text drows = detect_data(oim, bbox=[0, 0, oim.size[0], tabdatbottom]) # print all fields text for debugging for line in filter_data(drows, [0, tabfieldtop, oim.size[0], tabfieldbottom]): if 'text' in line and line['text']: print line # loop through all topleft corners of boxes inside the fields region, left to right, ignoring ca duplicate x corners fieldboxes = [bbox for bbox in boxes if bbox[1][1] <= tabfieldbottom + 20] fieldxs = sorted(set((b[0][0] for b in fieldboxes))) # for each corner, find ca duplicate x corners fields = [] for x in fieldxs: print x g = [bbox for bbox in fieldboxes if bbox[0][0] <= x < bbox[1][0]] # loop bboxes of those duplicate corners, top to bottom g = sorted(g, key=lambda b: b[0][1]) print g # define field name as their concatenated texts names = [] for bbox in g: text, notes = get_text(drows, bbox) if not text: # try detecting again, on subset print 'no text in field', bbox[0] + bbox[1] x1, y1, x2, y2 = bbox[0] + bbox[1] trimbox = x1 + 5, y1 + 5, x2 - 5, y2 - 5 fielddata = detect_data(oim, trimbox) if DEBUG: oim.crop(trimbox).show() try: text, notes = get_text(fielddata) print 'second pass found', repr(text) except Exception as err: print 'UNKNOWN FAIL', err names.append(text or '') print names name = '|'.join(reversed(names)) # define bbox width as the bottom bbox width x1, x2 = g[-1][0][0], g[-1][1][0] # define column bbox as same width, but extending from lower fields lines to lower table line y1, y2 = tabdattop, tabdatbottom bbox = [(x1, y1), (x2, y2)] print bbox # add as field obj fields.append((name, bbox)) # for first field, consider its data column bbox (x1, y1), (x2, y2) = fields[0][1] croprows = filter_data(drows, [x1, y1, x2, y2]) # loop through column text lines via image_to_data, top to bottom lines = [] line = dict(text='') for row in croprows: # process if row['word_num'] == '0' and line['text'].strip(): lines.append(line) line = dict(text='') if line['text'] and 'text' in row and row['text'].strip(): # update bbox = row['left'], row[ 'top'], row['left'] + row['width'], row['top'] + row['height'] prevbbox = line['bbox'] line['bbox'] = min(bbox[0], prevbbox[0]), min(bbox[1], prevbbox[1]), max( bbox[2], prevbbox[2]), max(bbox[3], prevbbox[3]) line['text'] += ' ' + row['text'] elif not line['text'] and 'text' in row and row['text'].strip(): # new line bbox = row['left'], row[ 'top'], row['left'] + row['width'], row['top'] + row['height'] line = dict(text=row['text'], bbox=bbox) lines.append(line) # for each text rows = [] row = dict() lines = sorted(lines, key=lambda l: l['bbox'][1]) for i in range(len(lines)): line = lines[i] print line ## rowtop = line['bbox'][1] ## try:rowbottom = lines[i+1]['bbox'][1] ## except:rowbottom = line['bbox'][3] # last line ## ## try:crop.crop([0,rowtop,crop.size[0]-1,rowbottom]).show() ## except:pass # process if not row: # first new row['rowtop'] = line['bbox'][1] - 3 row['text'] = line['text'] # if firstval is indented, consider a subunit # ... else: # continued row['text'] += '\n' + line['text'] # if ends with dot ".", add row as dict if line['text'].endswith('.') or line['text'].endswith(u'\u2018') \ or (len(lines) >= i+1 and lines[i+1]['bbox'][1] - line['bbox'][3] > 30): # or if large gap until next line, colon, or is mostly upper caps, add this as header/grouping row['rowbottom'] = line['bbox'][3] # loop through all fields and check for text/data within the top/bottom range of the text vals = [] for fname, fbox in fields: bbox = [(fbox[0][0], row['rowtop']), (fbox[1][0], row['rowbottom'])] text, notes = get_text(drows, bbox) vals.append(text) row['vals'] = vals # add and reset print row rows.append(row) row = dict() ## for row in rows: ## # testview ## oim.crop([0,row['rowtop'],oim.size[0]-1,row['rowbottom']]).show() import pythongis as pg d = pg.VectorData(fields=[f[0] for f in fields]) #.split('|')[-1] for r in rows: d.add_feature(row=r['vals']) return chapter, title, d
import pythongis as pg import gc countries = pg.VectorData(r"C:\Users\kimok\Desktop\ch est\data\cshapes.shp") countries = countries.select( lambda f: f["GWCODE"] != -1 and f["GWEYEAR"] == 2016) print countries # mapit #rast = pg.RasterData(r'C:\Users\kimok\Downloads\F182013.v4c_web.stable_lights.avg_vis.tif') rast = pg.RasterData( r'C:\Users\kimok\Downloads\SVDNB_npp_20170701-20170731_75N060W_vcmcfg_v10_c201708061230.avg_rade9.tif' ) print rast for iso in ['NGA', 'COD', 'YEM']: print iso c = countries.select(lambda f: f["ISO1AL3"] == iso) clip = rast.manage.clip(c, bbox=c.bbox) print clip clip.view(cutoff=(0.1, 99.9)) print 'finished!'
def test_example(self): import pythongis as pg DEBUG = False PROFILE = True print 'loading' d = pg.VectorData( r"C:\Users\kimok\Downloads\ne_10m_admin_1_states_provinces (1)\ne_10m_admin_1_states_provinces.shp" ) #d = pg.VectorData(r"C:\Users\kimok\Desktop\BIGDATA\gazetteer data\raw\global_settlement_points_v1.01.shp", encoding='latin') d = d.select(lambda f: f.id < 1000) items = [(f.id, f.bbox) for f in d] # items = [(i+1, f.bbox) for i,f in enumerate(d)] print len(items) ##################### # build print 'building' spindex = QuadTree(xmin=-180, ymin=-90, xmax=180, ymax=90) spindex.save() if PROFILE: import cProfile prof = cProfile.Profile() prof.enable() spindex.build(items) if PROFILE: print prof.print_stats('cumtime') #fdsdfd print 'items', Item.objects.all().count() print 'nodes', Node.objects.all().count() print 'links', ItemNodeLink.objects.all().count() # visualize m = pg.renderer.Map() # quad structure print 'explore nodes and items' quads = pg.VectorData(fields=['nodeid', 'parent', 'depth', 'count']) for node in spindex.nodes.all(): x1, y1, x2, y2 = node.xmin, node.ymin, node.xmax, node.ymax box = { 'type': 'Polygon', 'coordinates': [[(x1, y1), (x1, y2), (x2, y2), (x2, y1)]] } quads.add_feature( [node.pk, node.parent, node.depth, node.item_count], box) items = pg.VectorData(fields=['item']) for item in Item.objects.all(): x1, y1, x2, y2 = item.xmin, item.ymin, item.xmax, item.ymax box = { 'type': 'Polygon', 'coordinates': [[(x1, y1), (x1, y2), (x2, y2), (x2, y1)]] } items.add_feature([item], box) print(quads) m.add_layer(d, fillcolor='red') m.add_layer(items, fillcolor=None, outlinecolor='blue') m.add_layer( quads, fillcolor=None, outlinecolor='green', ) #text=lambda f: f['nodeid'], textoptions={'textcolor':'green','textsize':6}) m.render_all() m.view() ################### # intersection print 'intersecting' testbox = (0, 0, 90, 45) #testbox = (100,1,120,20) #testbox = (100,15,105,20) if PROFILE: import cProfile prof = cProfile.Profile() prof.enable() matches = spindex.intersect(testbox) if PROFILE: print prof.print_stats('cumtime') #fdsdfds # visualize m = pg.renderer.Map() # result item boxes boxes = pg.VectorData() res = pg.VectorData() for match in matches: #print match #oid,itemid,x1,y1,x2,y2,depth,path = match oid, itemid, x1, y1, x2, y2 = [ getattr(match, k) for k in 'pk,item_id,xmin,ymin,xmax,ymax'.split(',') ] f = d[itemid] res.add_feature([], f.geometry) box = { 'type': 'Polygon', 'coordinates': [[(x1, y1), (x1, y2), (x2, y2), (x2, y1)]] } boxes.add_feature([], box) m.add_layer(res, fillcolor='red') m.add_layer(boxes, fillcolor=None, outlinecolor='green') # result node boxes ## nodematches = spindex.intersect_nodes(testbox) ## nodedata = pg.VectorData(fields=['path','count']) ## for node in nodematches: ## #print match ## count = node[-3] ## x1,y1,x2,y2 = node[1:5] #f.bbox ## path = node[-1] ## box = {'type':'Polygon', 'coordinates':[[(x1,y1),(x1,y2),(x2,y2),(x2,y1)]]} ## nodedata.add_feature([path,count], box) ## m.add_layer(nodedata, fillcolor=None, outlinecolor='blue') # testbox testboxdata = pg.VectorData() x1, y1, x2, y2 = testbox geoj = { 'type': 'Polygon', 'coordinates': [[(x1, y1), (x1, y2), (x2, y2), (x2, y1)]] } testboxdata.add_feature([], geoj) m.add_layer(testboxdata, fillcolor=None, outlinecolor='black', outlinewidth='3px') m.render_all() m.view()
import pythongis as pg rw = 6 rh = 3 w = 360 h = 180 xscale = w / float(rw) yscale = h / float(rh) affine = [xscale, 0, -180, 0, -yscale, 90] r = pg.RasterData(width=rw, height=rh, mode='float32', affine=affine) rb = r.add_band() for y in range(rh): for x in range(rw): rb.set(x, y, (x + 1) * (y + 1)) v = pg.VectorData() bounds = [(-180, -90), (180, -90), (180, 90), (-180, 90)] bounds.append(bounds[0]) v.add_feature([], {'type': 'Polygon', 'coordinates': [bounds]}) m = pg.renderer.Map(2000, 1000, 'white') m.add_layer(r) m.add_layer(v, fillcolor=None, outlinecolor='red') m.zoom_auto() m.view()
import pythongis as pg import pycrs pg.vector.data.DEFAULT_SPATIAL_INDEX = 'quadtree' data = pg.VectorData(r"P:\(Temp Backup)\priocountries\priocountries.shp")#"C:\Users\kimok\Desktop\BIGDATA\priocountries\priocountries.shp") rast = pg.RasterData(r"C:\Users\kimok\Documents\GitHub\AutoMap\tests\testmaps\burkina_pol96_georeferenced.tif") #testcrs = '+proj=robin +datum=WGS84 +ellps=WGS84 +a=6378137.0 +rf=298.257223563 +pm=0 +lon_0=0 +x_0=0 +y_0=0 +units=m +axis=enu +no_defs' #testcrs = pycrs.parse.from_sr_code(6980).to_proj4() # space #testcrs = pycrs.parse.from_sr_code(7619).to_proj4() # goode? #testcrs = next(pycrs.utils.search('van der grinten'))['proj4'] testcrs = next(pycrs.utils.search('eckert iv'))['proj4'] #### original crs #data.view() #### test on-the-fly crs #data.view(crs=testcrs) #### raster crs rast.save('C:/Users/kimok/Desktop/testmap.png') #rast.view() #testcrs) #rast.manage.reproject(testcrs, resample='nearest').view() #rast.manage.reproject(testcrs, resample='bilinear').view() dsadsads
import pythongis as pg import math points = pg.VectorData("data/ne_10m_populated_places_simple.shp", encoding='latin') #points.browse() def radsize(f): val = f['pop_max'] sz = round(val * 2 / 10000000.0, 2) # weird relative dist error due to small e- nr... return sz def areasize(f): val = f['pop_max'] area = round(val * 2 / 10000000.0, 2) sz = math.sqrt(area / math.pi) return sz # custom #points.view(fillsize=radsize, fillcolor='yellow') #points.view(fillsize=areasize, fillcolor='yellow') # builtin #points.view(fillsize=dict(breaks='proportional', key='pop_max', sizes=[0.1,1]), fillcolor='yellow') mapp = pg.renderer.Map() mapp.add_layer(points, fillsize=dict(breaks='proportional',
node.update(itemid, parent=node.nodeid) def add_item(self, item, bbox): xmin, ymin, xmax, ymax = bbox parent = self.nodeid self._index.cur.execute('INSERT INTO items VALUES (?, ?, ?, ?, ?, ?)', (parent, item, xmin, ymin, xmax, ymax)) if __name__ == '__main__': import pythongis as pg DEBUG = False print 'loading' d = pg.VectorData( r"C:\Users\kimok\Downloads\ne_10m_admin_1_states_provinces (1)\ne_10m_admin_1_states_provinces.shp" ) items = [(i, f.bbox) for i, f in enumerate(d)] print 'building' spindex = QuadTree(-180, -90, 180, 90) spindex.build(items) print 'intersecting' matches = spindex.intersect((1, 1, 20, 20)) ## print 'viewing' ## boxes = pg.VectorData(fields=['path','parent']) ## res = pg.VectorData() ## for match in matches: ## #print match
import pythongis as pg data = pg.VectorData("data/ne_10m_admin_0_countries.shp") print data # vector proj = data.manage.reproject("+proj=latlong +datum=WGS84", "+proj=robin +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs") print proj proj.view() grid = pg.VectorData() for x in range(-180,180+1,20): line = [[x,y] for y in range(90,-90-1,-10)] geoj = dict(type='LineString', coordinates=line) grid.add_feature(geometry=geoj) grid = grid.manage.reproject("+proj=latlong +datum=WGS84", "+proj=robin +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs") grid.view(outlinecolor=None) # raster rast = pg.raster.manager.rasterize(data, bbox=[-170,80,170,-80], width=720, height=360) #rast.view() rast.crs = "+proj=latlong +datum=WGS84"
import pythongis as pg data = pg.VectorData(r"C:\Users\kimo\Downloads\cshapes_0.6\cshapes.shp") pt = pg.VectorData() pt.add_feature([], dict(type="Point", coordinates=(10, 30))) pt.add_feature([], dict(type="Point", coordinates=(11, 31))) snap = pt.manage.snap(data, 0.5) for f in snap: print f.__geo_interface__ #snap.view() mapp = pg.renderer.Map() mapp.add_layer(data) mapp.add_layer(pt.manage.buffer(lambda f: 0.5)) mapp.add_layer(pt) mapp.add_layer(snap) mapp.view() # test speed (very slow...???) from random import randrange pt = pg.VectorData() for _ in range(4): print _ pt.add_feature([], dict(type="Point", coordinates=(randrange(180), randrange(90)))) snap = pt.manage.snap(data, 1)
import pythongis as pg c = pg.VectorData(r"C:\Users\kimok\Desktop\ch est\data\cshapes.shp", select=lambda f: f['GWEYEAR']==2016) # normal colors mapp = pg.renderer.Map(4000,2000,title='World Map') mapp.add_layer(c, fillcolor=dict(breaks='natural', key='GWCODE', colors=['green','red'])) mapp.add_legend(legendoptions=dict(title='Legend')) mapp.save("normal color breaks.png") # normal sizes mapp = pg.renderer.Map(2000,1000,title='World Map') mapp.add_layer(c.convert.to_points(), fillsize=dict(breaks='natural', key='GWCODE', sizes=[0.2,2.0])) mapp.add_legend() mapp.save("normal size breaks.png") # normal multiple breaks mapp = pg.renderer.Map(2000,1000,title='World Map') mapp.add_layer(c.convert.to_points(), fillsize=dict(breaks='natural', key='GWCODE', sizes=[0.2,2.0]), fillcolor=dict(breaks='natural', key='GWCODE', colors=['green','red']))
import pythongis as pg from time import time if 0: # conflictsite confs = pg.VectorData("C:/Users/kimo/Downloads/Conflict Site Dataset 2.0/Conflict Site 4-2006.xls", xfield="Longitude", yfield="Latitude") print confs, confs.fields confs = confs.manage.buffer(lambda f: f["Radius"], geodetic=True, resolution=20) #confs.view() countries = pg.VectorData("C:/Users/kimo/Downloads/cshapes_0.6/cshapes.shp") countries = countries.select(lambda f: f["GWCODE"]!=-1) # clip to countries (hacky for now, what happened to the intersection method?) from shapely.ops import cascaded_union for f in confs: print ["vector intersecting",f,f.id,len(confs)] fg = f.get_shapely() intsecs = [fg.intersection(gw.get_shapely()) for gw in countries if gw["GWSYEAR"] <= f["Year"] <= gw["GWEYEAR"] \ and int(float(gw["GWCODE"])) in [int(float(cod)) for cod in str(f["Conflict site"]).split(",")]] if len(intsecs) == 1: newg = intsecs[0].__geo_interface__ elif len(intsecs) > 1: newg = cascaded_union(intsecs).__geo_interface__ else: continue # no gw match
import pythongis as pg W = 2000 H = 1000 SPACING = 330 RIGHTSPIN = 500 DOWNTILT = 700 c = pg.VectorData("ch est/data/cshapes.shp") import PIL, PIL.Image def tilt(img, oldplane, newplane): pb,pa = oldplane,newplane grid = [] for p1,p2 in zip(pa, pb): grid.append([p1[0], p1[1], 1, 0, 0, 0, -p2[0]*p1[0], -p2[0]*p1[1]]) grid.append([0, 0, 0, p1[0], p1[1], 1, -p2[1]*p1[0], -p2[1]*p1[1]]) import advmatrix as mt A = mt.Matrix(grid) B = mt.Vec([xory for xy in pb for xory in xy]) AT = A.tr() ATA = AT.mmul(A) gridinv = ATA.inverse() invAT = gridinv.mmul(AT) res = invAT.mmul(B) transcoeff = res.flatten() #then calculate new coords, thanks to http://math.stackexchange.com/questions/413860/is-perspective-transform-affine-if-it-is-why-its-impossible-to-perspective-a" new = img.transform(img.size, PIL.Image.PERSPECTIVE, transcoeff, PIL.Image.BILINEAR) return new
or (len(lines) >= i+1 and lines[i+1]['bbox'][1] - line['bbox'][3] > 30): # or if large gap until next line, colon, or is mostly upper caps, add this as header/grouping row['rowbottom'] = line['bbox'][3] # loop through all fields and check for text/data within the top/bottom range of the text vals = [] for fname,fbox in fields: bbox = [(fbox[0][0],row['rowtop']), (fbox[1][0], row['rowbottom'])] text,notes = get_text(drows, bbox) vals.append(text) row['vals'] = vals # add and reset print row rows.append(row) row = dict() ## for row in rows: ## # testview ## oim.crop([0,row['rowtop'],oim.size[0]-1,row['rowbottom']]).show() import pythongis as pg d=pg.VectorData(fields=[f[0].split('|')[-1] for f in fields]) for r in rows: d.add_feature(row=r['vals']) d.browse()
import pythongis as pg from time import time poly = pg.VectorData("data/ne_10m_admin_1_states_provinces.shp", encoding="latin") points = pg.VectorData("data/ne_10m_populated_places_simple.shp", encoding="latin") print points ##t=time() ##join = points.manage.spatial_join(points, "distance", ## radius=10, n=3, # 3 nearest within 10k ## key=lambda f1,f2: f1.geometry != f2.geometry) # not self ##print time()-t, join t = time() join = poly.manage.spatial_join( points, "intersects", clip=lambda f1, f2: f2.geometry) # poly contains points print "fast join country to each point", time() - t, join # HEAVY ONES # Slow point-poly ##t=time() ##join = points.manage.spatial_join(poly, "intersects") # point in polys ##print time()-t, join t = time() join = points.manage.spatial_join(poly, "distance", radius=60, n=1)
import pythongis as pg conf = pg.VectorData('rosling/data/ged171_excelcsv.csv', encoding='latin', xfield='longitude', yfield='latitude', select=lambda f: f['year'] == 2016) conf.compute('dum', 1) width = 720 height = 360 agg = pg.raster.manager.rasterize(conf, valuekey=lambda f: f['dum'], stat='sum', width=width, height=height, bbox=[-180, 90, 180, -90]) mapp = agg.render(cutoff=(0, 100)) mapp.add_legend() mapp.save('agg.png') smooth = pg.raster.analyzer.smooth( conf, rasterdef=dict(mode='float32', width=width, height=height, bbox=[-180, 90, 180, -90]), algorithm='radial', radius=3, )
import pythongis as pg cshapes = pg.VectorData("cshapes.shp", select=lambda f: f["GWCODE"] != -1) cshapes = pg.VectorData("selfisec.geojson") print cshapes mapp = pg.renderer.Map(width=1000) mapp.add_layer(cshapes, fillcolor="blue") selfint = cshapes.intersections() print selfint selfint.view(1000, 1000, flipy=1, fillcolor=pg.renderer.Color("red", opacity=155)) selfint = selfint.duplicates(fieldmapping=[("count", lambda f: 1, "count")]) print selfint mapp.add_layer(selfint, fillcolor=pg.renderer.Color("red", opacity=155)) mapp.render_all() mapp.view()
import pythongis as pg data = pg.VectorData('data/ne_10m_admin_0_countries.shp') data.view(fillcolor='green', fillopacity={ 'key': 'POP_EST', 'opacities': [0.3, 1.0] }) ##data.view(fillcolor={'key':'POP_EST', 'colors':['beige','red']}, ## fillopacity={'key':lambda f: f.get_shapely().area, 'opacities':[1.0,0.3]}, ## )
import unittest import pythongis as pg # data pointdata = pg.VectorData('data/ne_10m_populated_places_simple.shp', encoding='latin') # base class class BaseTestCases: class TestInitSpindex(unittest.TestCase): def create_spindex(self, **kwargs): self.data.create_spatial_index(**kwargs) def test_default_rtree(self): # on local pc should fail and fallback to quadtree pg.vector.data.DEFAULT_SPATIAL_INDEX = 'rtree' self.create_spindex() self.assertTrue(hasattr(self.data, 'spindex')) self.assertTrue(not isinstance(self.data.spindex, pg.vector.spindex.Rtree)) def test_default_quadtree(self): pg.vector.data.DEFAULT_SPATIAL_INDEX = 'quadtree' self.create_spindex() self.assertTrue(hasattr(self.data, 'spindex')) self.assertTrue(isinstance(self.data.spindex, pg.vector.spindex.QuadTree))