def test_dynamically_defining_headers3(**kwargs): ds = mapnik.Datasource(type='csv', file=os.path.join('../data/csv/fails','needs_headers_one_line_no_newline.csv'), headers='x,y,name') eq_(len(ds.fields()),3) eq_(ds.fields(),['x','y','name']) eq_(ds.field_types(),['int','int','str']) fs = ds.featureset() feat = fs.next() eq_(feat['x'],0) eq_(feat['y'],0) eq_(feat['name'],'data_name') desc = ds.describe() eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) eq_(len(ds.all_features()),1)
def test_geojson_properties(): ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.json') f = ds.all_features()[0] desc = ds.describe() eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) eq_(f['name'], u'Test') eq_(f['int'], 1) eq_(f['double'], 1.1) eq_(f['boolean'], True) eq_(f['NOM_FR'], u'Qu\xe9bec') eq_(f['NOM_FR'], u'Québec') eq_(f['spaces'], u'this has spaces') eq_(f['description'], u'Test: \u005C')
def test_geojson_properties(): ds = mapnik.Datasource(type='geojson', file='../data/json/escaped.geojson') f = ds.features_at_point(s.envelope().center()).features[0] desc = ds.describe() eq_(desc['geometry_type'], mapnik.DataGeometryType.Point) eq_(f['name'], u'test') eq_(f['description'], u'Test: \u005C') eq_(f['int'], 1) eq_(f['double'], u'Quebec') eq_(f['boolean'], True) eq_(f['NOM_FR'], u'Qu\xe9bec') eq_(f['NOM_FR'], u'Québec')
def test_broken_files(visual=False): broken = glob.glob("../data/csv/fails/*.*") broken.extend(glob.glob("../data/csv/warns/*.*")) # Add a filename that doesn't exist broken.append("../data/csv/fails/does_not_exist.csv") for csv in broken: throws = False if visual: try: ds = mapnik.Datasource(type='csv', file=csv, strict=True) print '\x1b[33mfailed\x1b[0m', csv except Exception: print '\x1b[1;32m✓ \x1b[0m', csv
def test_good_files(visual=False): good_files = glob.glob("../data/csv/*.*") good_files.extend(glob.glob("../data/csv/warns/*.*")) ignorable = os.path.join('..', 'data', 'csv', 'long_lat.vrt') good_files.remove(ignorable) for f in good_files: if f.endswith('.index'): good_files.remove(f) for csv in good_files: if visual: try: mapnik.Datasource(type='csv', file=csv) print('\x1b[1;32m✓ \x1b[0m', csv) except Exception as e: print('\x1b[33mfailed: should not have thrown\x1b[0m', csv, str(e))
def test_null_id_field(): opts = { 'type': 'postgis', 'dbname': MAPNIK_TEST_DBNAME, 'geometry_field': 'geom', 'table': "(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp" } ds = mapnik.Datasource(**opts) fs = ds.featureset() feat = fs.next() eq_(feat.id(), 1L) eq_(feat['osm_id'], None)
def test_null_key_field(): opts = { 'type': 'postgis', "key_field": 'osm_id', 'dbname': MAPNIK_TEST_DBNAME, 'geometry_field': 'geom', 'table': "(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp" } ds = mapnik.Datasource(**opts) fs = ds.featureset() feat = fs.next( ) ## should throw since key_field is null: StopIteration: No more features.
def test_geojson_from_in_memory_string(): with open('../data/topojson/escaped.topojson', 'r') as datafile: ds = mapnik.Datasource(type='topojson', inline=datafile.read()) f = list(ds.all_features())[0] eq_(len(ds.fields()), 11) desc = ds.describe() eq_(desc['geometry_type'], mapnik.DataGeometryType.Point) eq_(f['name'], u'Test') eq_(f['int'], 1) eq_(f['description'], u'Test: \u005C') eq_(f['spaces'], u'this has spaces') eq_(f['double'], 1.1) eq_(f['boolean'], True) eq_(f['NOM_FR'], u'Qu\xe9bec') eq_(f['NOM_FR'], u'Québec')
def test_that_feature_id_only_incremented_for_valid_rows(**kwargs): ds = mapnik.Datasource(type='csv', file=os.path.join('../data/csv/warns','feature_id_counting.csv')) eq_(len(ds.fields()),3) eq_(ds.fields(),['x','y','id']) eq_(ds.field_types(),['int','int','int']) fs = ds.featureset() # first feat = fs.next() eq_(feat['x'],0) eq_(feat['y'],0) eq_(feat['id'],1) # second, should have skipped bogus one feat = fs.next() eq_(feat['x'],0) eq_(feat['y'],0) eq_(feat['id'],2) desc = ds.describe() eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) eq_(len(ds.all_features()),2)
def test_geometry_round_trip(): test_db = '/tmp/mapnik-sqlite-point.db' ogr_metadata = True # create test db conn = sqlite3.connect(test_db) cur = conn.cursor() cur.execute(''' CREATE TABLE IF NOT EXISTS point_table (id INTEGER PRIMARY KEY AUTOINCREMENT, geometry BLOB, name varchar) ''') # optional: but nice if we want to read with ogr if ogr_metadata: cur.execute('''CREATE TABLE IF NOT EXISTS geometry_columns ( f_table_name VARCHAR, f_geometry_column VARCHAR, geometry_type INTEGER, coord_dimension INTEGER, srid INTEGER, geometry_format VARCHAR )''') cur.execute('''INSERT INTO geometry_columns (f_table_name, f_geometry_column, geometry_format, geometry_type, coord_dimension, srid) VALUES ('point_table','geometry','WKB', 1, 1, 4326)''') conn.commit() cur.close() # add a point as wkb (using mapnik) to match how an ogr created db looks x = -122 # longitude y = 48 # latitude wkt = 'POINT(%s %s)' % (x, y) # little endian wkb (mapnik will auto-detect and ready either little or big endian (XDR)) wkb = mapnik.Path.from_wkt(wkt).to_wkb(mapnik.wkbByteOrder.NDR) values = (None, sqlite3.Binary(wkb), "test point") cur = conn.cursor() cur.execute( '''INSERT into "point_table" (id,geometry,name) values (?,?,?)''', values) conn.commit() cur.close() def make_wkb_point(x, y): import struct byteorder = 1 # little endian endianess = '' if byteorder == 1: endianess = '<' else: endianess = '>' geom_type = 1 # for a point return struct.pack('%sbldd' % endianess, byteorder, geom_type, x, y) # confirm the wkb matches a manually formed wkb wkb2 = make_wkb_point(x, y) eq_(wkb, wkb2) # ensure we can read this data back out properly with mapnik ds = mapnik.Datasource(**{ 'type': 'sqlite', 'file': test_db, 'table': 'point_table' }) fs = ds.featureset() feat = fs.next() eq_(feat.id(), 1) eq_(feat['name'], 'test point') geoms = feat.geometries() eq_(len(geoms), 1) eq_(geoms.to_wkt(), 'Point(-122.0 48.0)') # ensure it matches data read with just sqlite cur = conn.cursor() cur.execute('''SELECT * from point_table''') conn.commit() result = cur.fetchone() cur.close() feat_id = result[0] eq_(feat_id, 1) name = result[2] eq_(name, 'test point') geom_wkb_blob = result[1] eq_(str(geom_wkb_blob), geoms.to_wkb(mapnik.wkbByteOrder.NDR)) new_geom = mapnik.Path.from_wkb(str(geom_wkb_blob)) eq_(new_geom.to_wkt(), geoms.to_wkt()) # cleanup os.unlink(test_db) os.unlink(test_db + '.index')
def mapnik_output(self, min_x, min_y, max_x, max_y, epsg, multi_poly_proj, union_line, noConsultationDuTeleservice): """Fonction creer une sortie carto""" mapnik_x_y = self.mapnik_config(min_x, min_y, max_x, max_y) sizex = mapnik_x_y[0] sizey = mapnik_x_y[1] m = mapnik.Map(sizey, sizex, '+init=epsg:' + str(epsg)) m.background = mapnik.Color('steelblue') #si la couche du grid existe, on l'utilise if os.path.exists(os.path.join(os.path.abspath(os.path.dirname(__file__)), "temp" + os.sep + "grid.shp")): provpoly_lyr = mapnik.Layer('Atlas', '+init=epsg:' + str(epsg)) path_temp = os.path.join(os.path.abspath(os.path.dirname(__file__)), "temp" + os.sep) provpoly_lyr.datasource = mapnik.Shapefile(file=path_temp + 'grid', encoding='latin1') provpoly_style = mapnik.Style() provpoly_rule_qc = mapnik.Rule() provpoly_rule_qc.symbols.append(mapnik.PolygonSymbolizer(mapnik.Color(217, 235, 203))) provpoly_style.rules.append(provpoly_rule_qc) m.append_style('atlas', provpoly_style) provpoly_lyr.styles.append('atlas') m.layers.append(provpoly_lyr) s_wkt_poly = mapnik.Style() r_wkt_poly = mapnik.Rule() wkt_polygon_symbolizer = mapnik.PolygonSymbolizer(mapnik.Color('#FF3366')) r_wkt_poly.symbols.append(wkt_polygon_symbolizer) wkt_line_symbolizer = mapnik.LineSymbolizer(mapnik.Color('#000'), 1.0) r_wkt_poly.symbols.append(wkt_line_symbolizer) s_wkt_poly.rules.append(r_wkt_poly) m.append_style('Travaux', s_wkt_poly) wkt_poly_geom = multi_poly_proj csv_string_wkt_poly = ''' wkt,Name "%s","test" ''' % wkt_poly_geom ds_wkt_poly = mapnik.Datasource(**{"type": "csv", "inline": csv_string_wkt_poly}) layer_wkt_poly = mapnik.Layer('Travaux', '+init=epsg:' + str(epsg)) layer_wkt_poly.datasource = ds_wkt_poly layer_wkt_poly.styles.append('Travaux') m.layers.append(layer_wkt_poly) wkt_line_geom = union_line csv_string_wkt_line_geom = ''' wkt,Name "%s","test" ''' % wkt_line_geom wkt_line_geom_ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string_wkt_line_geom}) wkt_line_layer = mapnik.Layer('Reseau', '+init=epsg:' + str(epsg)) wkt_line_layer.datasource = wkt_line_geom_ds wkt_line_style = mapnik.Style() wkt_line_rule = mapnik.Rule() wkt_line = mapnik.Stroke() wkt_line.color = mapnik.Color(171, 158, 137) wkt_line.width = 2.0 wkt_line_rule.symbols.append(mapnik.LineSymbolizer(wkt_line)) wkt_line_style.rules.append(wkt_line_rule) m.append_style('reseau', wkt_line_style) wkt_line_layer.styles.append('reseau') m.layers.append(wkt_line_layer) #m.zoom_all() m.zoom_to_box(mapnik.Box2d(min_x, min_y, max_x, max_y)) mapnik_output = mapnik.render_to_file(m, os.path.join(os.path.abspath(os.path.dirname(__file__)), "output" + os.sep + "plan_" + noConsultationDuTeleservice + ".png"), 'png') #mapnik_output = mapnik.render_to_file(m, 'plan_' + noConsultationDuTeleservice + '.png', 'png') return mapnik_output
def test_that_nonexistant_query_field_throws(**kwargs): ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson') eq_(len(ds.fields()),7)
# -*- coding: utf-8 -*- import os import mapnik # usage: # python update.py # download latest osm for bbox #os.system('wget -O pdx-poi.osm "http://www.overpass-api.de/api/xapi?node[bbox=-122.68732,45.52028,-122.65217,45.53982][amenity=*]"') # read in with mapnik ds = mapnik.Datasource(**{'type':'osm','file':'pdx-poi.osm'}) # loop over all features # and write out a csv file fs = ds.all_features() csv_features = [] for feat in fs: if feat.has_key('name'): json = feat.geometries().to_geojson() name = feat['name'] # work around bug if name == True: name = '' csv_features.append(""""%s",%d,"%s",'%s'""" % (name,feat.id(),feat['amenity'],json)) osm_out = open('pdx-poi.csv','w+') # write headers osm_out.write('name,osm_id,amenity,geojson\n') # write data rows
import os import mapnik # usage: # python update.py # download latest osm for bbox #os.system('wget -O nacis.osm "http://api.openstreetmap.org/api/0.6/map?bbox=-122.656771,45.529472,-122.652377,45.53208"') # read in with mapnik ds = mapnik.Datasource(**{'type': 'osm', 'file': 'nacis.osm'}) # loop over all features # and write out a csv file fs = ds.all_features() csv_features = [] for feat in fs: if feat.has_key('name'): json = feat.geometries().to_geojson() name = feat['name'] # work around bug if name == True: name = '' csv_features.append(""""%s",%d,'%s'""" % (name, feat.id(), json)) osm_out = open('osm.csv', 'w+') # write headers osm_out.write('name,osm_id,geojson\n') # write data rows osm_out.write('\n'.join(csv_features)) osm_out.close()
feature.add_geometries_from_wkt('POINT (1 1)') paths = feature.geometries() print paths.__geo_interface__ def print_featureset(fs): feat = fs.next() while (feat): print feat.__geo_interface__ try: feat = fs.next() except StopIteration: feat = None # Example 3: access via an inline Geo-CSV csv = ''' id,wkt 1,"POINT(0 0)" 2,"POINT(1 1)" ''' datasource = mapnik.Datasource(**{'type':'csv','inline':csv}) query = mapnik.Query(datasource.envelope()) fs = datasource.features(query) print_featureset(fs) # Example 3: access via a shapefile # uncomment and fix the path to point to a valid shapefile #datasource = mapnik.Datasource(**{'type':'shape','file':'some/path/to/shapefile.shp'}) #query = mapnik.Query(datasource.envelope()) #print_featureset(datasource.features(query))
def school_sheds_results(request=None, school_id=None, bbox=None, width=816, height=1056, srid=3857, format='png'): ''' Default height and width are 'Letter' ratio ''' format = format.encode('ascii') school = School.objects.get(pk=school_id) point = school.geometry circle = point.buffer(3400.0) m = mapnik.Map(int(width), int(height), "+init=epsg:" + str(srid)) mapnik.load_map(m, os.path.dirname(__file__) + "/basemap/basemap.xml") if bbox is None: circle.transform(srid) bbox = mapnik.Box2d(*circle.extent) m.zoom_to_box(bbox) #m.background = mapnik.Color('steelblue') # styles for sheds s = mapnik.Style() for name, color in (('0.5', VIOLET), ('1.0', PURPLE), ('1.5', LAVENDER), ('2.0', LIGHTCYAN)): r = mapnik.Rule() r.filter = mapnik.Expression("[name] = " + name) c = mapnik.Color(color) c.a = 80 line_symbolizer = mapnik.LineSymbolizer(mapnik.Color("gray"), 1) poly_symbolizer = mapnik.PolygonSymbolizer(c) r.symbols.append(line_symbolizer) r.symbols.append(poly_symbolizer) s.rules.append(r) # styles for schools school_colors = SCHOOL_COLORS for name, color in school_colors: r = mapnik.Rule() r.filter = mapnik.Expression("[name] = '" + name + "'") line_symbolizer = mapnik.LineSymbolizer() poly_symbolizer = mapnik.PolygonSymbolizer(mapnik.Color(color)) r.symbols.append(line_symbolizer) r.symbols.append(poly_symbolizer) s.rules.append(r) r = mapnik.Rule() r.filter = mapnik.Expression( "[name] != 'map_title' and [name] != 'map_subtitle' and [name] != 'legend_title' and [name] != 'school'" ) text_symbolizer = mapnik.TextSymbolizer(mapnik.Expression('[label]'), 'DejaVu Sans Book', 9, mapnik.Color('black')) text_symbolizer.halo_fill = mapnik.Color('white') text_symbolizer.halo_radius = 1 text_symbolizer.horizontal_alignment = mapnik.horizontal_alignment.RIGHT #text_symbolizer.label_placement = mapnik.label_placement.VERTEX_PLACEMENT text_symbolizer.allow_overlap = True text_symbolizer.displacement = (12, 0) r.symbols.append(text_symbolizer) s.rules.append(r) r = mapnik.Rule() r.filter = mapnik.Expression("[name] = 'map_title'") text_symbolizer = mapnik.TextSymbolizer(mapnik.Expression('[label]'), 'DejaVu Sans Book', 15, mapnik.Color('black')) text_symbolizer.horizontal_alignment = mapnik.horizontal_alignment.RIGHT text_symbolizer.halo_fill = mapnik.Color('white') text_symbolizer.allow_overlap = True r.symbols.append(text_symbolizer) s.rules.append(r) r = mapnik.Rule() r.filter = mapnik.Expression("[name] = 'map_subtitle'") text_symbolizer = mapnik.TextSymbolizer(mapnik.Expression('[label]'), 'DejaVu Sans Book', 12, mapnik.Color('black')) text_symbolizer.horizontal_alignment = mapnik.horizontal_alignment.RIGHT text_symbolizer.halo_fill = mapnik.Color('white') text_symbolizer.allow_overlap = True r.symbols.append(text_symbolizer) s.rules.append(r) r = mapnik.Rule() r.filter = mapnik.Expression("[name] = 'legend_title'") text_symbolizer = mapnik.TextSymbolizer(mapnik.Expression('[label]'), 'DejaVu Sans Condensed Bold', 11, mapnik.Color('black')) text_symbolizer.horizontal_alignment = mapnik.horizontal_alignment.RIGHT text_symbolizer.halo_fill = mapnik.Color('white') text_symbolizer.halo_radius = 1 text_symbolizer.allow_overlap = True r.symbols.append(text_symbolizer) s.rules.append(r) r = mapnik.Rule() r.filter = mapnik.Expression("[name] = 'legend_box'") poly_symbolizer = mapnik.PolygonSymbolizer(mapnik.Color("white")) line_symbolizer = mapnik.LineSymbolizer(mapnik.Color("black"), 0.5) poly_symbolizer.fill_opacity = 0.8 r.symbols.append(line_symbolizer) r.symbols.append(poly_symbolizer) s.rules.append(r) r = mapnik.Rule() r.filter = mapnik.Expression("[name] = 'school'") ps = mapnik.PointSymbolizer( mapnik.PathExpression( os.path.dirname(__file__) + '/static/img/School.svg')) ps.transform = 'scale(0.06)' ps.allow_overlap = True #shield.label_placement = mapnik.label_placement.POINT_PLACEMENT r.symbols.append(ps) s.rules.append(r) m.append_style("surveys", s) def p2l(pct_x, pct_y): loc_x = bbox.minx + (bbox.maxx - bbox.minx) * pct_x / 100.0 loc_y = bbox.miny + (bbox.maxy - bbox.miny) * pct_y / 100.0 return (loc_x, loc_y) sheds = { 0.5: school.shed_05, 1.0: school.shed_10, 1.5: school.shed_15, 2.0: school.shed_20 } csv_string = 'wkt,name,label\n' for key, g in reversed(sorted(sheds.items(), key=lambda a: a[0])): if g is None: continue g.srid = 26986 g.transform(srid) csv_string += '"%s","%s",""\n' % (g.wkt, str(key)) surveys = Survey.objects.filter(school=school) for survey in surveys: survey.location.transform(srid) children = list(survey.child_set.all()) if len(children) > 0: for c in children: name = str(c.to_school) point = survey.location point.x += random.randint(-50, 50) point.y += random.randint(-50, 50) school_circle = point.buffer(50) csv_string += '"%s","%s",""\n' % (school_circle.wkt, name) else: name = "o" point = survey.location point.x += random.randint(-50, 50) point.y += random.randint(-50, 50) school_circle = point.buffer(50) csv_string += '"%s","%s",""\n' % (school_circle.wkt, name) #Add School geometry school.geometry.transform(srid) csv_string += '"%s","school","%s"\n' % (school.geometry.wkt, school.name) def box(minx, miny, maxx, maxy): lmin = Point(p2l(minx, miny)) lmax = Point(p2l(maxx, maxy)) lr = LinearRing((lmin.x, lmin.y), (lmax.x, lmin.y), (lmax.x, lmax.y), (lmin.x, lmax.y), (lmin.x, lmin.y)) poly = Polygon(lr) return poly legend = box(2, 108, 50, 113.5) csv_string += '"%s","%s","%s"\n' % (legend.wkt, "legend_box", "") xy = p2l(3.5, 112) point = Point(*xy) csv_string += '"%s","%s","School Commute Survey Results"\n' % (point.wkt, "map_title") xy = p2l(3.5, 109.5) point = Point(*xy) csv_string += '"%s","%s","%s, %s"\n' % (point.wkt, "map_subtitle", school, school.districtid) legend_x = 53 legend = box(legend_x, 97, 97.5, 113.5) csv_string += '"%s","%s","%s"\n' % (legend.wkt, "legend_box", "") xy = p2l(legend_x + 1.5, 112) point = Point(*xy) csv_string += '"%s","legend_title","Approx. home locations and travel to school mode"\n' % ( point.wkt, ) walksheds_x = 88 xy = p2l(walksheds_x, 112) point = Point(*xy) csv_string += '"%s","legend_title","Walksheds"\n' % (point.wkt, ) y = 111.5 for name, label in school_colors: y -= 1.8 xy = p2l(legend_x + 2, y) point = Point(*xy) circle = point.buffer(50) csv_string += '"%s","%s","%s"\n' % (circle.wkt, name, MODE_DICT[name]) y = 110 for name in ( '0.5', '1.0', '1.5', '2.0', ): y -= 2.4 ws = box(walksheds_x, y, walksheds_x + 2, y + 1.5) csv_string += '"%s","%s","%s Mile"\n' % (ws.wkt, name, name) layer = mapnik.Layer('surveys', "+init=epsg:" + str(srid)) ds = mapnik.Datasource(type="csv", inline=csv_string.encode('ascii')) layer.datasource = ds layer.styles.append('surveys') m.layers.append(layer) # Render to image if format == 'pdf': tmp_file = tempfile.NamedTemporaryFile() surface = cairo.PDFSurface(tmp_file.name, m.width, m.height) mapnik.render(m, surface) surface.finish() tmp_file.seek(0) im = tmp_file.read() else: im = mapnik.Image(m.width, m.height) mapnik.render(m, im) im = im.tostring(format) response = HttpResponse() response['Content-length'] = str(len(im)) response['Content-Type'] = mimetypes.types_map['.' + format] response.write(im) return response
def to_mapnik(self): return mapnik.Datasource(**self.parameters)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ############################################################################### ############################################################################### import os import mapnik stylesheet = '/gdata/world_population.xml' m = mapnik.Map(600, 300) mapnik.load_map(m, stylesheet) m.background = mapnik.Color('steelblue') s = mapnik.Style() r = mapnik.Rule() polygon_symbolizer = mapnik.PolygonSymbolizer() polygon_symbolizer.fill = mapnik.Color('#f2eff9') r.symbols.append(polygon_symbolizer) s.rules.append(r) m.append_style('My Style2', s) wkt_geom = 'POLYGON ((5.123456 21.6, -16.8 -52.6, 37.8 -21.6, 5.123456 21.6))' csv_string = ''' wkt,Name "%s","test" ''' % wkt_geom ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string}) layer2 = mapnik.Layer('world', '+proj=latlong +datum=WGS84') layer2.datasource = ds layer2.styles.append('My Style2') m.layers.append(layer2) bbox = mapnik.Box2d(-180.0, -90.0, 180.0, 90.0) m.zoom_to_box(bbox) mapnik.render_to_file(m, 'xx_ds_pt.png', 'png')
text.placements.defaults.format.face_name = 'DejaVu Sans Book' point = mapnik.PointSymbolizer() rule = mapnik.Rule() rule.symbols.append(text) rule.symbols.append(point) style = mapnik.Style() style.rules.append(rule) m.append_style('Style', style) layer = mapnik.Layer('Layer') layer.datasource = mapnik.Datasource(**{'type':'csv','file':os.path.join(dirname,"data/points.csv")}) layer.styles.append('Style') m.layers.append(layer) bbox = mapnik.Box2d(-0.05, -0.01, 0.95, 0.01) m.zoom_to_box(bbox) formatnode = mapnik.FormattingFormat() formatnode.child = mapnik.FormattingText("[name]") formatnode.fill = mapnik.Color("green") format_trees = [ ('TextNode', mapnik.FormattingText("[name]")), ('MyText', MyText()), ('IfElse', IfElse("[nr] != 5", mapnik.FormattingText("[name]"),
def get_csv_ds(filename): return mapnik.Datasource(type='csv', file=os.path.join('../data/csv/', filename))
def write_datasource(self, filename, geojson): with open(filename, 'w') as f: f.write(json.dumps(geojson)) datasource = mapnik.Datasource(type='geojson', file=filename) os.remove(filename) return datasource
import mapnik m = mapnik.Map(1000, 500) m.background = mapnik.Color('#e9e5dc') s = mapnik.Style() r = mapnik.Rule() polygon_symbolizer = mapnik.PolygonSymbolizer(mapnik.Color('#deb')) r.symbols.append(polygon_symbolizer) line_symbolizer = mapnik.LineSymbolizer(mapnik.Color('#fff'), 2) r.symbols.append(line_symbolizer) s.rules.append(r) m.append_style('My Style', s) ds = mapnik.Datasource(type='geojson', file='output.json') layer = mapnik.Layer('world') layer.datasource = ds layer.styles.append('My Style') m.layers.append(layer) m.zoom_all() mapnik.render_to_file(m, 'world.png', 'png') print "rendered image to 'world.png'"
def test_inline_geojson(**kwargs): csv_string = "geojson\n'{\"coordinates\":[-92.22568,38.59553],\"type\":\"Point\"}'" ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string}) eq_(len(ds.fields()), 0) eq_(ds.fields(), [])
def test_large_geojson_properties(): ds = mapnik.Datasource(type='geojson', file='../data/json/escaped.geojson', cache_features=False) f = ds.features_at_point(ds.envelope().center()).features[0] eq_(len(ds.fields()), 9) desc = ds.describe() eq_(desc['geometry_type'], mapnik.DataGeometryType.Point) eq_(f['name'], u'Test') eq_(f['int'], 1) eq_(f['description'], u'Test: \u005C') eq_(f['spaces'], u'this has spaces') eq_(f['double'], 1.1) eq_(f['boolean'], True) eq_(f['NOM_FR'], u'Qu\xe9bec') eq_(f['NOM_FR'], u'Québec') eq_(f['array'], u'[[[1],["deux"]],[["\\u0442\\u0440\\u0438","four","\\u4e94"]]]') array = json.loads(f['array']) eq_(array, [[[1], [u'deux']], [[u'\u0442\u0440\u0438', u'four', u'\u4e94']]]) eq_( f['object'], u'{"value":{"type":"\\u041c\\u0430pni\\u043a","array":[3,0,"x"]}}') object = json.loads(f['object']) eq_(object, { u'value': { u'array': [3, 0, u'x'], u'type': u'\u041c\u0430pni\u043a' } }) ds = mapnik.Datasource(type='geojson', file='../data/json/escaped.geojson') f = ds.all_features()[0] eq_(len(ds.fields()), 9) desc = ds.describe() eq_(desc['geometry_type'], mapnik.DataGeometryType.Point) eq_(f['name'], u'Test') eq_(f['int'], 1) eq_(f['description'], u'Test: \u005C') eq_(f['spaces'], u'this has spaces') eq_(f['double'], 1.1) eq_(f['boolean'], True) eq_(f['NOM_FR'], u'Qu\xe9bec') eq_(f['NOM_FR'], u'Québec') eq_(f['array'], u'[[[1],["deux"]],[["\\u0442\\u0440\\u0438","four","\\u4e94"]]]') array = json.loads(f['array']) eq_(array, [[[1], [u'deux']], [[u'\u0442\u0440\u0438', u'four', u'\u4e94']]]) eq_( f['object'], u'{"value":{"type":"\\u041c\\u0430pni\\u043a","array":[3,0,"x"]}}') object = json.loads(f['object']) eq_(object, { u'value': { u'array': [3, 0, u'x'], u'type': u'\u041c\u0430pni\u043a' } })