def to_geojson(self, json_path=None, places=True, massifs=True, circuits=True, boulders=False): features = [] if places: features.extend([place for place in self.places if place]) if massifs: features.extend([massif for massif in self.massifs if massif]) if circuits: if boulders: exported_circuits = [circuit for circuit in self.circuits if circuit] else: exported_circuits = [circuit.to_feature() for circuit in self.circuits if circuit] features.extend(exported_circuits) feature_collections = geojson.FeatureCollection(features) if not geojson.is_valid(feature_collections): raise ValueError('Non valid GeoJSON') # Fixme: crs geojson.named API kwargs = dict(indent=2, ensure_ascii=False, sort_keys=True) if json_path is not None: with open(json_path, 'w', encoding='utf8') as f: geojson.dump(feature_collections, f, **kwargs) else: return geojson.dumps(feature_collections, **kwargs)
def save_path(): try: with open('fly_path_line.json', 'w') as file: geojson.dump(fly_path_line, file) print 'path line: ', fly_path_line except Exception as e: print str(e)
def write_to_file(self): filename = self._get_filename() geojson_filename = 'geojson/{0}/{1}.geojson'.format( self.area_type, filename, ) if not os.path.exists(os.path.dirname(geojson_filename)): os.makedirs(os.path.dirname(geojson_filename)) with open(geojson_filename, 'w') as sink: geojson.dump(self.shape, sink, indent=None) if self.topojson: topojson_filename = 'topojson/{0}/{1}.topojson'.format( self.area_type, filename, ) if not os.path.exists(os.path.dirname(topojson_filename)): os.makedirs(os.path.dirname(topojson_filename)) local('topojson -o {0} {1} -p'.format( geojson_filename, topojson_filename, )) puts('{0}\t...{1}\t...done'.format(filename, os.getpid()))
def contour_to_geojson(contour, min_angle_deg=2, geojson_filepath=None, strdump=False, ndigits=3, unit='', stroke_width=3): """Transform matplotlib.contour to geojson.""" contour_levels = contour.levels collections = contour.collections contour_index = 0 assert len(contour_levels) == len(collections) line_features = [] for collection in collections: paths = collection.get_paths() color = collection.get_edgecolor() for path in paths: v = path.vertices if len(v) < 6: continue coordinates = keep_high_angle(v, min_angle_deg) if ndigits: coordinates = np.around(coordinates, ndigits) line = LineString(coordinates.tolist()) properties = { "stroke-width": stroke_width, "stroke": rgb2hex(color[0]), "title": "%.2f" % contour_levels[contour_index] + ' ' + unit, } line_features.append(Feature(geometry=line, properties=properties)) contour_index += 1 feature_collection = FeatureCollection(line_features) if strdump or not geojson_filepath: return geojson.dumps(collection, sort_keys=True, separators=(',', ':')) with open(geojson_filepath, 'w') as fileout: geojson.dump(feature_collection, fileout, sort_keys=True, separators=(',', ':'))
def diffme(original_file, new_file, new_points_f, deleted_points_f, id_field): original = geojson.load(original_file) new = geojson.load(new_file) # Load all the points into a dict original_layer = loadpoints(original.features, id_field) new_layer = loadpoints(new.features, id_field) # TODO: Check that CRS is identical. # Find all the points that were added original_guids = set(original_layer.keys()) new_guids = set(new_layer.keys()) added_guids = new_guids - original_guids new_points = geojson.FeatureCollection([]) new_points.crs = new.crs if id_field is None: new_points.features = filter((lambda x: hash_coords(*x.geometry.coordinates) in added_guids), new.features) else: new_points.features = filter((lambda x: x.properties[id_field] in added_guids), new.features) geojson.dump(new_points, new_points_f) new_points_f.close() deleted_guids = original_guids - new_guids deleted_points = geojson.FeatureCollection([]) deleted_points.crs = original.crs if id_field is None: deleted_points.features = filter((lambda x: hash_coords(*x.geometry.coordinates) in deleted_guids), original.features) else: deleted_points.features = filter((lambda x: x.properties[id_field] in deleted_guids), original.features) geojson.dump(deleted_points, deleted_points_f) deleted_points_f.close()
def saveAsGeoJSON(self, outputFilePath): try: from geojson import Feature, Polygon, FeatureCollection, dump except ImportError: raise ImportError( """ ERROR: Could not find the GeoJSON Python library.""") collection = FeatureCollection([]) for j in range(self._nrows): for i in range(self._ncols): cellVertexes = self.getCellVertexes(i, j) collection.features.append( Feature( geometry=Polygon([[ cellVertexes[0], cellVertexes[1], cellVertexes[2], cellVertexes[3], cellVertexes[4], cellVertexes[5], cellVertexes[0] ]]), properties={self.__value_field: str(self._mesh[i][j])})) with open(outputFilePath, 'w') as fp: dump(collection, fp)
def _export_envelope(self, output_dir, cross_sections): """ # Export envelopes around cross-sections """ output = {"type": "FeatureCollection"} css_hulls = list() for css in cross_sections: pointlist = np.array([ point["geometry"]["coordinates"] for point in css.get_point_list("face") ]) # construct envelope try: hull = ConvexHull(pointlist) css_hulls.append( Feature( properties={"name": css.name}, geometry=Polygon( [list(map(tuple, pointlist[hull.vertices]))]), )) except IndexError: self.set_logger_message(f"No Hull Exported For {css.name}") with open(os.path.join(output_dir, "cross_section_volumes.geojson"), "w") as f: geojson.dump(FeatureCollection(css_hulls), f, indent=2)
def generate_currency_spec(name, locations, bootstrappers): gj = geojson.FeatureCollection(list(map(lambda x : geojson.Feature(geometry=x), locations))) gj['currency_meta'] = { 'name': name, 'bootstrappers': bootstrappers } fname = name + '.json' with open(fname, 'w') as outfile: geojson.dump(gj, outfile) return fname
def generateJson(): # summaries = Station.objects.all().order_by('?')[:15] # summaries = DailySummary.objects.all() summaries = DailySummary.objects.select_related('station')\ .exclude(station__isnull=True)\ .order_by('station').filter(Q(date__gte='2013-01-01') & Q(date__lte='2013-01-31'))[:310] # summaries = sorted(summaries, key=lambda summ: summ.date) # sorted(student_objects, key=lambda student: student.age) for summary in summaries: print '\n' + str(summary.station.usaf), str(summary.station.wban), str(summary.station.location) dicts = summaries.values() features = [] # todo: optimize the querying here for i in xrange(len(summaries)): if i % 50 == 0 and i > 0: print 'Processed', i, 'summaries into GeoJSON.' location = summaries[i].station.location point = Point((location[0], location[1])) dict = dicts[i] dict['date'] = dict['date'].isoformat() feature = Feature(geometry=point, properties=dict) features.append(feature) features = FeatureCollection(features) file = open('weather.json', 'w') geojson.dump(features, file, sort_keys=False) file.close()
def handle(self, *args, **options): try: os.mkdir(options['output']) except FileExistsError: pass if not (options['from'] or options['to']): elections = Election.objects.future().filter(group_type='election') else: elections = Election.objects.all().filter(group_type='election') if options['from']: elections = elections.filter(poll_open_date__gte=options['from']) if options['to']: elections = elections.filter(poll_open_date__lte=options['to']) for election in elections: self.stdout.write("Exporting elections for group %s" % election) data = self.export_election(election) gj_path = os.path.join(options['output'], '%s.json' % election.election_id) with open(gj_path, 'w') as output_file: geojson.dump(data, output_file) tj_path = os.path.join(options['output'], '%s-topo.json' % election.election_id) self.topojson_convert(gj_path, tj_path) tj_simple_path = os.path.join(options['output'], '%s-topo-simplified.json' % election.election_id) self.topojson_simplify(tj_path, tj_simple_path)
def save_geojson(self, features, data_file='test.json', data_dir:Path=None, append=False): """Saves geojson object to datapath.""" mode = 'a' if append else 'w' if not data_dir: data_dir = GEOJSON_PATH else: if not data_dir.is_dir(): try: data_dir.mkdir() except: pass filepath = data_dir / data_file try: with open(filepath, mode) as f: geojson.dump(features, f) print('saved: ',filepath) except: print('saving didnt work, attempting to save with a safe name') clean_n = "".join([c for c in filepath.stem if c.isalpha() or c.isdigit() or c==' ']).rstrip() # random 8 letters rngl = token_hex(8) clean_n = clean_n+filepath.suffix if clean_n else rngl+filepath.suffix filepath = GEOJSON_PATH / clean_n with open(filepath, mode) as f: geojson.dump(features, f)
def create_geojson(coords, siteid, outdir): """ Create GeoJSON file from geometry extracted from BETY Parameters ---------- coords (str) -- geometry from BETY sites siteid (str) -- siteid outdir (str) -- path where the output file has to be stored Returns ------- Absolute path to the merged file output GeoJSOn file is saved in the specified directory. """ geo = json.loads(coords) features = [] features.append(Feature(geometry=geo, properties={"name": siteid})) feature_collection = FeatureCollection(features) if not os.path.exists(outdir): os.makedirs(outdir, exist_ok=True) file = os.path.join(outdir, siteid + ".geojson") with open(file, "w") as f: dump(feature_collection, f) return os.path.abspath(file)
def contourf_to_geojson(contourf, min_angle_deg=None, strdump=False, geojson_filepath=None, ndigits=3, unit='', fill_opacity=.9, stroke_width=1): """Transform matplotlib.contourf to geojson.""" contour_levels = contourf.levels polygon_features = [] contourf_idx = 0 for coll in contourf.collections: color = coll.get_facecolor() for path in coll.get_paths(): for coord in path.to_polygons(): if min_angle_deg: coord = keep_high_angle(coord, min_angle_deg) coord = np.around(coord, ndigits) if ndigits else coord polygon = Polygon(coordinates=[coord.tolist()]) fcolor = rgb2hex(color[0]) properties = set_properties(stroke_width, fcolor, fill_opacity, contour_levels, contourf_idx, unit) feature = Feature(geometry=polygon, properties=properties) polygon_features.append(feature) contourf_idx += 1 collection = FeatureCollection(polygon_features) if strdump or not geojson_filepath: return geojson.dumps(collection, sort_keys=True, separators=(',', ':')) with open(geojson_filepath, 'w') as fileout: geojson.dump(collection, fileout, sort_keys=True, separators=(',', ':'))
def _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize): if not serialize: return feature_collection if strdump or not geojson_filepath: return geojson.dumps(feature_collection, sort_keys=True, separators=(',', ':')) with open(geojson_filepath, 'w') as fileout: geojson.dump(feature_collection, fileout, sort_keys=True, separators=(',', ':'))
def purge_old_tweets(self, file_name): #Read in the current GeoJSON File file_path = 'json_data/' + str(file_name) json_file = None try: with open(file_path, 'r') as file: json_file = json.load(file) #print len(json_file["features"]) #append to the existing featurecollection for feature in json_file["features"]: time_then = parser.parse(feature["properties"]["time"]).replace(tzinfo=None) time_now = datetime.utcnow() time_delta = time_then - time_now if time_delta > timedelta(hours=4): print "old tweet purged from", file_name json_file["features"].remove(feature) #print len(json_file["features"]) with open(file_path, 'w') as file: geojson.dump(json_file, file) except Exception as e: pass
def to_geojson(out_path, fatalities): """Writes a CalTopo-compatible GeoJSON file to out_path.""" features = [] dates = [f.date for f in fatalities] earliest = min(dates) latest = max(dates) num_days = (latest - earliest).days def _gradient(date: datetime.date) -> str: since_earliest = (date - earliest).days red_ratio = since_earliest / num_days r_value = math.floor(red_ratio * 127) + 128 return '#%02x%02x%02x' % (r_value, 0, 0) for f in fatalities: description_lines = [f.event_url] if f.forecast_url: description_lines.append(f.forecast_url) hex_color = _gradient(f.date) f = geojson.Feature(geometry=geojson.Point((f.longitude, f.latitude)), properties={ 'title': f.date.isoformat(), 'description': '\n'.join(description_lines), 'marker-color': hex_color, 'marker-symbol': 'danger', }) features.append(f) feature_collection = geojson.FeatureCollection(features) with open(out_path, 'w') as f: geojson.dump(feature_collection, f)
def makeClusterPolygons(self,clusters): # make polygons for each of those clusters clusterids = clusters['data_inert'].keys() if -1 in clusterids: clusterids.remove(-1) poly =[] for k in clusterids: points =np.asarray(clusters['data_inert'][k]) if k>=0 and points.shape[0]>2: hull =ConvexHull(points) hullvert =list(hull.vertices) coords =[(x,y) for x,y in points[hullvert,:]] poly.append((coords,)) mp = MultiPolygon(poly) color = '#ff0000' category = 'pca' st = { "weight": 2, "color": color, "opacity": 0, "fillColor": color, "fillOpacity": 0.3 } pr = {"name":category, "popupContent":category,"style":st} ft = Feature(geometry=mp, properties=pr) outfilename = 'sample-geojson.js' outfile = open(outfilename,'w') print>>outfile, "var data = " geojson.dump(ft,outfile) #geojson.dump(ft,outfile) outfile.close()
def writeJSON(connection): #What's all this, then? #Creating some geojson goodness ... cursor = connection.cursor() #We want the half-dozen high-priority sites with the most people within a 5km radius. Those sites are (as of 2016) all in Montreal. result = cursor.execute( '''SELECT FederalSiteIdentifier, PopulationCount_KM5, Name_EN, Location_Municipality, Location_Latitude, Location_Longitude FROM sites WHERE SiteStatus_Status_EN LIKE 'Active' AND Classification_Code LIKE '1' ORDER BY CAST(PopulationCount_KM5 AS integer) DESC LIMIT 0,6; ''' ) rows = result.fetchall() features = [] for row in rows: site = {} site['Location_Latitude'] = float(row[4].strip()) site['Location_Longitude'] = float(row[5].strip()) my_point = geojson.Point( (site['Location_Latitude'], site['Location_Longitude'])) my_feature = geojson.Feature(geometry=my_point, properties={ "FederalSiteIdentifier": row[0], "PopulationCount_KM5": row[1], "Name_EN": row[2], "Location_Municipality": row[3] }) features.append(my_feature) my_feature_collection = geojson.FeatureCollection(features) with open('data.json', 'w') as f: print geojson.dump(my_feature_collection, f) connection.commit() print "geojson has been successfully dumped into data.json." return (cursor)
def timeMapsForPharmacies(): pharmacies = next(OverPassHelper().directFetch(dresdenAreaId, [OsmDataQuery("amenity health", OsmObjectType.ALL, ['"amenity"~"pharmacy"'])])) timeMaps = retrieveTimeMaps(pharmacies["features"], travelTime=300, transportation="walking") fileName = "out/data/timeMapsPerPharmacy.json" with open(fileName, 'w', encoding='UTF-8') as outfile: logging.info("Saving at {}".format(fileName)) geojson.dump(timeMaps, outfile)
def save_state(self, outcome): # open old geojson save file, load that and close old save file # FORZATO PERCHE' FACEVA SCHIFO perché ancora non fetcho le risorse (penso) # outcome = "" old_save_file = open("./resources/state_" + str(self.state) + ".geojson", "r", encoding="utf-8") whole_data = json.load(old_save_file) whole_data["event_description"] = outcome data = whole_data["data"] old_save_file.close() self.old_outcome = outcome for i in range(0, len(data["features"])): # saving comune sav_com = self.provincia.getComuneByName( data["features"][i]["properties"]["comune"]) # set owner owner = sav_com.owned_by if sav_com.owned_by is not None else sav_com data["features"][i]["properties"]["owner"] = owner.name # set color data["features"][i]["properties"]["color"] = owner.color whole_data["data"] = data # increment save index and save new file self.state += 1 with open("./resources/state_" + str(self.state) + ".geojson", "w", encoding="utf-8") as f: geojson.dump(whole_data, f)
def writeJs(data): points = [(r.lng, r.lat) for r in data] ids = [getGridIdByLatLng(float(x[0]), float(x[1])) for x in points] print(ids) borders = [getBorderByGridId(x) for x in ids] print(borders) grid_border = [ grid2dict(Grid(r[0], r[1][0], r[1][1], r[1][2], r[1][3])) for r in zip(ids, borders) ] line = LineString(tuple(points)) feature_linestring = Feature( geometry=line, properties={"popupContent": str(data[0].imsi)}) markers = [{ 'lng': r.lng, 'lat': r.lat, 'time': r.utime, 'imsi': r.imsi } for r in data] #with open('app/static/borders.js', 'w') as f: with open('app/static/borders_cc8.js', 'w') as f: f.write('var userlocation=[') f.write('\n') geojson.dump(feature_linestring, f) f.write('];') f.write('\n') f.write('var gridBorder = ') json.dump(grid_border, f) f.write(';') f.write('\n') f.write('var stayPoints=') f.write('\n') json.dump(markers, f) f.write(';')
def generate_features(self): # Create target directory if don't exist target = os.path.join(output_folder, self.name) if not os.path.exists(target): os.mkdir(target) ## ROOMS fc = geojson.FeatureCollection([r.generate_room_feature(target) for r in self.rooms.values()]) with open(os.path.join(target, self.name + "_rooms.geojson"), 'w') as myout: geojson.dump(fc,myout) ## Room Geometry fc = geojson.GeometryCollection([c.to_geometry_feature() for c in self.rooms.values()]) with open(os.path.join(target, self.name + "_room_geometry.geojson"), 'w') as myout: geojson.dump(fc,myout) ## CONNECTIONS fc = geojson.FeatureCollection([c.to_feature() for c in self.connections]) with open(os.path.join(target, self.name + "_connections.geojson"), 'w') as myout: geojson.dump(fc,myout) ## THROWABLES fc = geojson.FeatureCollection([t.to_feature() for t in self.throwables]) with open(os.path.join(target, self.name + "_throwables.geojson"), 'w') as myout: geojson.dump(fc,myout) ## CREATURE SPAWNS fc = geojson.FeatureCollection([f for f in [s.to_feature() for s in self.spawns] if f]) ## some fail with open(os.path.join(target, self.name + "_spawns.geojson"), 'w') as myout: geojson.dump(fc,myout)
def main1(): input = sys.argv[1] output = sys.argv[2] pyramid = Pyramid(levels=range(0, 22)) features = list() with open(input, 'rb') as fp: reader = csv.reader(fp) for row in reader: z, x, y = tuple(map(int, row)) tile_index = pyramid.create_tile_index(z, x, y, range_check=False) envelope = tile_index.envelope coordinates = [[ envelope.leftbottom.make_tuple(), envelope.lefttop.make_tuple(), envelope.righttop.make_tuple(), envelope.rightbottom.make_tuple(), # envelope.leftbottom.make_tuple(), ]] polygon = geojson.Polygon(coordinates=coordinates) feature = geojson.Feature(geometry=polygon, properties=dict(z=z, x=x, y=y)) features.append(feature) collection = geojson.FeatureCollection(features=features) with open(output, 'wb') as fp: geojson.dump(collection, fp, # indent=2 )
def es_geoshape_to_geojson(index, doc_type, geoshape_field, outfile, query={}): #input: ES index, doctype, and field that is mapped as a geoshape #output: Geojson feature collection es_full_url = 'http://' + ES_username + ':' + ES_password + '@' + ES_url + ':9200' es = Elasticsearch(es_full_url) features = [] docs = helpers.scan(es, index=index, doc_type=doc_type, query=json.dumps(query)) for doc in docs: #define the geojson shape type based on ES geoshape type if doc['_source'][geoshape_field]['type'].lower() == 'point': shape = geojson.Point( doc['_source'][geoshape_field]['coordinates']) if doc['_source'][geoshape_field]['type'].lower() == 'linestring': shape = geojson.LineString( doc['_source'][geoshape_field]['coordinates']) if doc['_source'][geoshape_field]['type'].lower() == 'polygon': shape = geojson.Polygon( doc['_source'][geoshape_field]['coordinates']) if doc['_source'][geoshape_field]['type'].lower() == 'multipolygon': shape = geojson.MultiPolygon( doc['_source'][geoshape_field]['coordinates']) #print geojson.is_valid(shape) props = deepcopy(doc['_source']) props.pop(geoshape_field, None) features.append(geojson.Feature(geometry=shape, properties=props)) with open(outfile, 'w') as out: geojson.dump(geojson.FeatureCollection(features), out)
def make_shape(): coords = [[ [-74.827880859375, 46.240651955001695], [-74.3280029296875, 46.027481852486645], [-73.7786865234375, 46.02366774426006], [-73.1195068359375, 46.12274903582433], [-72.7130126953125, 46.32796494040746], [-72.6690673828125, 46.694667307773116], [-73.037109375, 46.86770273172814], [-73.5589599609375, 46.87145819560722], [-73.927001953125, 46.78877728793222], [-74.080810546875, 46.56641407568593], [-73.86657714843749, 46.437856895024204], [-73.5479736328125, 46.47191632087041], [-73.267822265625, 46.58906908309182], [-73.6798095703125, 46.649436163350245], [-73.333740234375, 46.717268685073954], [-72.9876708984375, 46.55130547880643], [-73.267822265625, 46.392411189814645], [-73.62487792968749, 46.1912395780416], [-74.1357421875, 46.0998999106273], [-74.827880859375, 46.240651955001695], ]] geo_def = geojson.Polygon(coords) # raise Warning(geo_def) # raise Exception(geo_def.errors()) temp = tempfile.NamedTemporaryFile(suffix=".json", delete=False) with open(temp.name, "w") as f: geojson.dump(geo_def, f, indent=4) return temp
def write_to(data, property_names, output_file): ''' Write list of tuples to geojson. First entry of each tuple should be geometry in hex coordinates and the rest properties. Args: data: List of tuples. property_names: List of strings. Should be same length as the number of properties. output_file (str): Output file name. ''' geojson_features = [] for entry in data: coords_in_hex, properties = entry[0], entry[1:] geometry = loads(coords_in_hex, hex=True) property_dict = dict(zip(property_names, properties)) if geometry.geom_type == 'Polygon': coords = [list(geometry.exterior.coords)] # brackets required geojson_feature = geojson.Feature(geometry=geojson.Polygon(coords), properties=property_dict) elif geometry.geom_type == 'Point': coords = list(geometry.coords)[0] geojson_feature = geojson.Feature(geometry=geojson.Point(coords), properties=property_dict) geojson_features.append(geojson_feature) feature_collection = geojson.FeatureCollection(geojson_features) with open(output_file, 'wb') as f: geojson.dump(feature_collection, f)
def split(input_file, file_1, file_2, no_in_first_file): ''' Split a geojson in two separate files. Args: input_file (str): Input filename. file_1 (str): Output file name 1. file_2 (str): Output file name 2. no_features (int): Number of features in input_file to go to file_1. output_file (str): Output file name. ''' # get feature collection with open(input_file) as f: feat_collection = geojson.load(f) features = feat_collection['features'] feat_collection_1 = geojson.FeatureCollection(features[0:no_in_first_file]) feat_collection_2 = geojson.FeatureCollection(features[no_in_first_file:]) with open(file_1, 'w') as f: geojson.dump(feat_collection_1, f) with open(file_2, 'w') as f: geojson.dump(feat_collection_2, f)
def filter_by_property(input_file, output_file, property_name, values): ''' Create a file containing only features with specified property value(s) from input_file. INPUT input_file (str): File name. output_file (str): Output file name. property_name (str): Name of the feature property to filter by. values (list): Value(s) a feature may have for property_name if it is to be included in output_file. ''' filtered_feats = [] if not output_file.endswith('.geojson'): output_file += '.geojson' # Load feature list with open(input_file) as f: feature_collection = geojson.load(f) # Filter feats by property_name for feat in feature_collection['features']: if feat['properties'][property_name] in values: filtered_feats.append(feat) feature_collection['features'] = filtered_feats # Save filtered file with open(output_file, 'wb') as f: geojson.dump(f)
def purge_old_tweets(self, file_name): #Read in the current GeoJSON File file_path = 'json_data/' + str(file_name) json_file = None try: with open(file_path, 'r') as file: json_file = json.load(file) #print len(json_file["features"]) #append to the existing featurecollection for feature in json_file["features"]: time_then = parser.parse( feature["properties"]["time"]).replace(tzinfo=None) time_now = datetime.utcnow() time_delta = time_then - time_now if time_delta > timedelta(hours=4): print "old tweet purged from", file_name json_file["features"].remove(feature) #print len(json_file["features"]) with open(file_path, 'w') as file: geojson.dump(json_file, file) except Exception as e: pass
def to_geojson(hexgrid, filename): """Write out the hexgrid assignment to geoJSON format. Args: hexgrid (Hexgrid): Hexgrid object that should have an assignment. So the `fit()` function should have been run. """ def to_polygon(code): gridcoords = hexgrid.assignment[code] hexagon = hexgrid.grid[gridcoords] properties = hexgrid.objects[code] # drop the centroid, that was something that I calculated. properties.pop('centroid', None) # also stick the code on because I want that code. properties['code'] = code polygon = geojson.Polygon(mapping(hexagon.to_poly())['coordinates'], properties=properties) return polygon polygons = [to_polygon(code) for code in hexgrid.assignment.keys()] bbox = [ hexgrid.extent['min_x'], hexgrid.extent['min_y'], hexgrid.extent['max_x'], hexgrid.extent['max_y'], ] collection = geojson.GeometryCollection(polygons, bbox=bbox) with open(filename, 'w') as f: geojson.dump(collection, f, sort_keys=True)
def to_geojson(self, path_or_buf=None, type='LineString'): """Format and save the object to a geojson file Example: .. code-block:: python >>> log.to_geojson('diff-trajectory.json') Args: path_or_buf (str or file): The path or the file-like object to save the file (default is None) type (str or RepType): Multiple type can be used with | or '|' for string (default is 'LineString') Returns: str or None : Return the geojson as a string if path_or_buf is None """ geo_obj = self.to_geojson_obj(type) if path_or_buf is None: return geojson.dumps(geo_obj) elif isinstance(path_or_buf, str): f = open(path_or_buf, 'w+') geojson.dump(geo_obj, f) f.close() else: geojson.dump(geo_obj, path_or_buf)
def exportCentersToTextFile(self, filename): """ Record cluster centers to text file. Parameters ---------- filename : string path Recording file name. Notes ----- Uses JSON data format. """ cc = map(lambda x, y: (np.append(x, y)).tolist(), self.clusterCenters, self.population) cc = list(map(lambda i: {'lat': i[0], 'lon': i[1], 'id': i[2], 'pop': i[3]}, cc)) try: path = os.path.dirname(filename) if path == '': pass else: if not (os.path.exists(path)): os.makedirs(os.path.dirname(filename)) with open(filename, 'w') as file_: json.dump(cc, file_) except IOError as e: print('{}'.format(e))
def Write_Geojson_Crop_Pre(data, output_file, ts_for_vis, time_window): #headers = ['unix_time', 'car_id', 'osm_id', 'gid', 'unix_time_pre', 'gid_pre', 'pick_or_drop', 'speed', 'linestring'] features = [] for item in data: points_in_line = [] time_pre = item[4] time_current = item[0] if time_current == time_pre: continue line = item[8] line = line.split('(')[1] line = line.split(')')[0] lst = line.split(',') for i in range(0,len(lst)): tmp = lst[i].split() points_in_line.append([float(tmp[0]),float(tmp[1])]) start_index = 0 end_index = len(points_in_line) if time_pre < ts_for_vis and time_current <= ts_for_vis + time_window: start_index = int((1 - (time_current - ts_for_vis) / float(time_current - time_pre))*end_index) time_pre = ts_for_vis elif time_pre >= ts_for_vis and time_current > ts_for_vis + time_window: end_index = int((ts_for_vis + time_window - time_pre) / float(time_current - time_pre)*end_index) time_current = ts_for_vis + time_window if len(range(start_index,end_index)) < 2: continue time_string = datetime.datetime.utcfromtimestamp(time_current + 3600*8).strftime('%Y-%m-%d %H:%M:%S') features.append(geojson.Feature(geometry=geojson.LineString(points_in_line[start_index:end_index]), properties={'ABSTIME':time_current,'TIME':time_string,'CAR_ID':item[1], 'OSM_ID': item[2], 'GID': item[3], 'GID_pre': item[5], 'Pick_or_drop':item[6], "ABS_TIME_PRE":time_pre,"Speed":item[7]})) geom_in_geojson = geojson.FeatureCollection(features) with open(output_file, 'w') as o: geojson.dump(geom_in_geojson, o)
def main(input, output): """This script converts GeoMixer vector tile data to GeoJSON format. \b Convert input_fle.json to output_file.geojson: gmx2geojson input_fle.json output_file.geojson """ source = geojson.load(input) features = list() for item in source.get('values'): attrs = OrderedDict() for idx, ele in enumerate(item): if not isinstance(ele, dict): key = 'property%s' % (idx, ) attrs[key] = ele else: geom = ele geom['type'] = ( 'MultiPolygon' if geom['type'] == 'MULTIPOLYGON' else 'Polygon' if geom['type'] == 'POLYGON' else 'MultiLineString' if geom['type'] == 'MULTILINESTRING' else 'LineString' if geom['type'] == 'LINESTRING' else 'MultiPoint' if geom['type'] == 'MULTIPOINT' else 'Point' if geom['type'] == 'POINT' else geom['type']) feature = geojson.Feature(geometry=geom, properties=attrs) features.append(feature) collection = geojson.FeatureCollection(features) geojson.dump(collection, output)
def save_geojson(df): features = [] for r, s in df.iterrows(): lat = s['Latitude'] lon = s['Longitude'] dis = s['range'] angle = s['StartAngle'] theta = s['SpreadAngle'] x = s.fillna(0).copy() feature = geojson.Feature( geometry=geojson.Point([lon, lat]), properties={ k.lower(): v for (k, v) in x.iteritems() } ) features.append(feature) if not pd.isnull(angle) and not pd.isnull(theta): x = s.fillna(0).copy() feature = geojson.Feature( geometry=wedge(dis, angle, theta, lat, lon), properties={ k.lower(): v for (k, v) in x.iteritems() } ) features.append(feature) fc = geojson.FeatureCollection(features) with open('hfradar.geojson', 'w') as f: geojson.dump(fc, f, sort_keys=True, indent=2, separators=(",", ": "))
def save_geojson(paths, savepath): palette = { 'DOF':'#1f78b4', 'CTA':'#33a02c', 'Speed':'#6a3d9a', 'Red light':'#a6cee3', 'Chicago Parking Meter':'#b2df8a', 'Miscellaneous/Other':'#cab2d6', 'Streets and San':'#e31a1c', 'LAZ':'#fb9a99', 'CPD':'#fdbf6f', 'SERCO':'#ff7f00' } features = [] for officer, geoms in paths.items(): officer_dept = officer.split('|')[1] color = palette[officer_dept] if not geoms: continue for geom in geoms: ls = geojson.LineString(geom) feature = geojson.Feature(geometry=ls, properties={'stroke':color, 'stroke-width':1}) features.append(feature) fh = open(savepath, 'w') results = geojson.FeatureCollection(features) geojson.dump(results, fh) fh.close() return results
def generateJson(): # summaries = Station.objects.all().order_by('?')[:15] # summaries = DailySummary.objects.all() earthquakes = Earthquake.objects.all().exclude(location__isnull=True) dicts = earthquakes.values() features = [] # todo: optimize the querying here for i in xrange(len(earthquakes)): if i % 50 == 0 and i > 0: print 'Processed', i, 'earthquakes into GeoJSON.' location = earthquakes[i].location point = Point((location[0], location[1])) dict = dicts[i] dict['date'] = dict['date'].isoformat() del dict['location'] feature = Feature(geometry=point, properties=dict) features.append(feature) features = FeatureCollection(features) file = open('seismic.json', 'w') geojson.dump(features, file, sort_keys=False) file.close()
def Write_Geojson_Pre(data, output_file): #headers = ['unix_time', 'car_id', 'osm_id', 'gid', 'unix_time_pre', 'gid_pre', 'pick_or_drop', 'speed', 'linestring'] features = [] for item in data: points_in_line = [] line = item[8] line = line.split('(')[1] line = line.split(')')[0] lst = line.split(',') for i in range(0, len(lst)): tmp = lst[i].split() points_in_line.append([float(tmp[0]), float(tmp[1])]) time_string = datetime.datetime.utcfromtimestamp( item[0] + 3600 * 8).strftime('%Y-%m-%d %H:%M:%S') features.append( geojson.Feature(geometry=geojson.LineString(points_in_line), properties={ 'ABSTIME': item[0], 'TIME': time_string, 'CAR_ID': item[1], 'OSM_ID': item[2], 'GID': item[3], 'GID_pre': item[5], 'Pick_or_drop': item[6], "ABS_TIME_PRE": item[4], "Speed": item[7] })) geom_in_geojson = geojson.FeatureCollection(features) with open(output_file, 'w') as o: geojson.dump(geom_in_geojson, o)
def df_to_geojson(df, properties, SN, lat, lon): """ Method: df_to_geojson(df, properties, SN, lat, lon) Purpose: Iterates through the DF in order to create the properties for the Geojson file Require: df: The Dataframe to be read properties: The properties of the geojson SN: List of serial numbers lat: The latitude coordinate lon: The longitude coordinate Version: 05/2021, MJB: Documentation """ start_time = time.time() df = df.fillna(999) print('writing geojson') props = {'depth': [], 'SN': SN, 'time': df.index.map(str).to_list(), 'temp': []} for prop in properties: props['depth'].append(prop) temp = [] for _, row in df.iterrows(): temp.append(row[str(prop)]) props['temp'].append(temp) point = Point((lat, lon)) feature = Feature(geometry=point, properties=props) output_filename = '../src/output_files/dataset.geojson' with open(output_filename, 'w') as output_file: # Crashes when opened with text editor dump(feature, output_file) print('geojson written') print("--- %s seconds ---" % (time.time() - start_time))
def write_geojson(features, outFileName, srs=None): features = geo_features(features) with open(outFileName,'w') as f: # FeatureCollection header f.write('{\n"type": "FeatureCollection",\n') # spatial ref spec if srs: f.write('"crs": ') json.dump(spatialref.geojson_crs(srs),f,indent=2) f.write(',\n') # features header f.write('"features": [\n') # features for feature in features: geojson.dump(feature,f,indent=2) if feature != features[-1]: f.write(',') f.write('\n\n') # close features f.write(']\n') # close FeatureCollection f.write('}\n')
def saveAsGeoJSON(self, outputFilePath): try: from geojson import Feature, Polygon, FeatureCollection, dump except ImportError: raise ImportError(""" ERROR: Could not find the GeoJSON Python library.""") collection = FeatureCollection([]) for j in range(self._nrows): for i in range(self._ncols): cellVertexes = self.getCellVertexes(i, j) collection.features.append( Feature( geometry = Polygon([[ cellVertexes[0], cellVertexes[1], cellVertexes[2], cellVertexes[3], cellVertexes[4], cellVertexes[5], cellVertexes[0] ]]), properties = {self.__value_field: str(self._mesh[i][j])})) with open(outputFilePath, 'w') as fp: dump(collection, fp)
def modify_file(self, file_name, coordinates, tweet_text, date_object, user): file_path = 'json_data/' + str(file_name) json_file = None feature = geojson.Feature(geometry=geojson.Point(coordinates), properties={ "tweet": tweet_text, "user": user, "time": str(date_object) }) #Read in the current GeoJSON File try: with open(file_path, 'r') as file: json_file = json.load(file) #append to the existing featurecollection json_file["features"].append(feature) #Create it, and a Feature Collection if it does not exist except (IOError): with open(file_path, 'w') as file: feature_collection = geojson.FeatureCollection([feature]) json.dump(feature_collection, file) json_file = feature_collection #Print out the new GeoJSON File with open(file_path, 'w') as file: geojson.dump(json_file, file)
def exportPointsToTextFile(self, filename): """ Record points with labels to text file. Parameters ---------- filename : string path Recording file name. Notes ----- Uses JSON data format. """ # create new array with one more dimension for points X = list(range(len(self.X))) # for each point in X array for i in X: X[i] = {'lat': self.X[i][0], 'lon': self.X[i][1], 'clusterId': self.labels[i]} try: path = os.path.dirname(filename) if path == '': pass else: if not (os.path.exists(path)): os.makedirs(os.path.dirname(filename)) with open(filename, 'w') as file_: json.dump(X, file_) except IOError as e: print('{}'.format(e))
def output_to_file(self, json_content, step_num): file_format = "geojson_{0:06d}.geojson" filename = os.path.join(self.output_dir, file_format.format(step_num)) with open(filename, "w") as outfile: dump(json_content, outfile, indent=True) return filename
def mergeme(inputs, output, no_dupe_handling, id_field, id_property): if no_dupe_handling: assert (not id_field) and (not id_property) else: assert (not id_field) or (not id_property) assert not (id_field and id_property) known_ids = set() crs = None output_layer = geojson.FeatureCollection([]) # Flatten the list of inputs inputs = list(itertools.chain.from_iterable(inputs)) for i, layer_f in enumerate(inputs): print "Processing input file #%d..." % i layer = geojson.load(layer_f) # FIXME: this requires the CRS be specified on a "layer" level. GeoJSON allows this to be ommitted, and this should include a check to ensure it is ommitted for all in this case. # Some broken GeoJSON files do weird things... if isinstance(layer.crs, list): layer.crs = layer.crs[0] if isinstance(layer.crs.properties, list): newprops = {} for x in range(len(layer.crs.properties)/2): newprops[layer.crs.properties[x*2]] = layer.crs.properties[(x*2) + 1] layer.crs.properties = newprops # We don't care about per-geometry CRS, these can mingle if i == 0: # first file sets the CRS! crs = layer.crs.properties['name'] output_layer.crs = layer.crs else: assert layer.crs.properties['name'] == crs, ('CRS of files must match. File has CRS %r, expected %r' % (layer.crs.properties['name'], crs)) # We have a matching CRS, start merging geometries. for geometry in layer.features: if not no_dupe_handling: if id_property and geometry.properties[id_property] in known_ids: # Geometry is already present, skip continue elif id_field and geometry.id in known_ids: # Geometry is already present, skip continue # Geometry is new, add to list output_layer.features.append(geometry) if id_property: known_ids.add(geometry.properties[id_property]) elif id_field: known_ids.add(geometry.id) print "OK! (%d total geometries written, %d read from this file)" % (len(output_layer.features), len(layer.features)) # all files complete geojson.dump(output_layer, output) print "Files merged!"
def filter_polygon_size(shapefile, output_file, min_polygon_hw=30, max_polygon_hw=224): ''' Creates a geojson file containing only acceptable side dimensions for polygons. INPUT (1) string 'shapefile': name of shapefile with original samples (2) string 'output_file': name of file in which to save selected polygons. This should end in '.geojson' (3) int 'min_polygon_hw': minimum acceptable side length (in pixels) for given polygon (4) int 'max_polygon_hw': maximum acceptable side length (in pixels) for given polygon OUTPUT (1) a geojson file (output_file.geojson) containing only polygons of acceptable side dimensions ''' # load polygons with open(shapefile) as f: data = geojson.load(f) total = float(len(data['features'])) # find indicies of acceptable polygons ix_ok, ix = [], 0 print 'Extracting image ids...' img_ids = find_unique_values(shapefile, property_name='image_id') print 'Filtering polygons...' for img_id in img_ids: print '... for image {}'.format(img_id) img = geoio.GeoImage(img_id + '.tif') # cycle thru polygons for chip, properties in img.iter_vector(vector=shapefile, properties=True, filter=[{'image_id': img_id}], mask=True): chan,h,w = np.shape(chip) if chip is None or min(h, w) < min_polygon_hw or max(h, w) > max_polygon_hw: ix += 1 # add percent complete to stdout sys.stdout.write('\r%{0:.2f}'.format(100 * ix / total) + ' ' * 20) sys.stdout.flush() continue ix_ok.append(ix) ix += 1 # add percent complete to stdout sys.stdout.write('\r%{0:.2f}'.format(100 * ix / total) + ' ' * 20) sys.stdout.flush() print 'Saving...' ok_polygons = [data['features'][i] for i in ix_ok] np.random.shuffle(ok_polygons) filtrate = {data.keys()[0]: data.values()[0], data.keys()[1]: ok_polygons} # save new geojson with open(output_file, 'wb') as f: geojson.dump(filtrate, f) print 'Saved {} polygons to {}'.format(str(len(ok_polygons)), output_file)
def encode_to_raw_json(self, feature_collection, csv_f): """ Encode a feature collection and dump into JSON file. :param feature_collection: FeatureCollection object :param csv_f: csv file name """ clean_name = str(path.splitext(csv_f)[0]) + ".json" with open(path.join(self.uk_postcodes, clean_name), "wb") as json_outfile: dump(feature_collection, json_outfile)
def output_to_file(self, json_content, step_num): file_format = 'mass_balance_{0:06d}.json' filename = os.path.join(self.output_dir, file_format.format(step_num)) with open(filename, 'w') as outfile: dump(json_content, outfile, indent=True) return filename
def zipDMA_togeojson(zipcodes, one_dma_id, to_geojson = False): zipfeatures = [j for j in zips.features if j['properties']['ZCTA5CE10'] in zipcodes] zipfc = geojson.FeatureCollection(zipfeatures) dma_filename = dmametadata.ix[one_dma_id]['dma_name'].replace(',', '').replace(' ', '') + one_dma_id if to_geojson: geojson.dump(zipfc, open(dma_filename + '.geojson', 'wb')) return zipfc
def writeMultiPolygonFeatureCollection(self,polygons,colors,varname): featureCollection = {'features':[]} for polygon,color in zip(polygons,colors): st = {"fillcolor": color, "color":color} pr = {"style":st} ft = Feature(geometry=Polygon(polygon), properties=pr) featureCollection['features'].append(ft) print>>self.outfile, "var %s = "%(varname) geojson.dump(featureCollection,self.outfile)
def bbox(inputs, output): crs = None output_layer = geojson.FeatureCollection([]) # Flatten the list of inputs inputs = list(itertools.chain.from_iterable(inputs)) for i, layer_f in enumerate(inputs): print "Processing input file #%d..." % i layer = geojson.load(layer_f) # FIXME: this requires the CRS be specified on a "layer" level. GeoJSON allows this to be ommitted, and this should include a check to ensure it is ommitted for all in this case. # Some broken GeoJSON files do weird things... if isinstance(layer.crs, list): layer.crs = layer.crs[0] if isinstance(layer.crs.properties, list): newprops = {} for x in range(len(layer.crs.properties)/2): newprops[layer.crs.properties[x*2]] = layer.crs.properties[(x*2) + 1] layer.crs.properties = newprops # We don't care about per-geometry CRS, these can mingle if i == 0: # first file sets the CRS! crs = layer.crs.properties['name'] output_layer.crs = layer.crs else: assert layer.crs.properties['name'] == crs, ('CRS of files must match. File has CRS %r, expected %r' % (layer.crs.properties['name'], crs)) # We have a matching CRS, start processing the file assert len(layer.bbox) == 4, 'File must have a bounding box' output_layer.features.append( geojson.Feature( geometry=geojson.Polygon( coordinates=[[ [layer.bbox[0], layer.bbox[1]], [layer.bbox[0], layer.bbox[3]], [layer.bbox[2], layer.bbox[3]], [layer.bbox[2], layer.bbox[1]], [layer.bbox[0], layer.bbox[1]] ]] ), properties=dict( id=i, filename=layer_f.name ), id=i ) ) # all files complete geojson.dump(output_layer, output) print "Bounding boxes drawn!"
def df_to_geojson(df,out_file): features = [] insert_features = lambda X: features.append( geojson.Feature(geometry=geojson.Point((X["lon"], X["lat"])), properties=dict(date=X["date"], time=X["time"], temp=X["temp"]))) df.apply(insert_features, axis=1) with open(out_file, 'w', encoding='utf8') as fp: geojson.dump(geojson.FeatureCollection(features), fp, sort_keys=True, ensure_ascii=False)
def route_export(output_fh): output_layer = geojson.FeatureCollection([]) # assume WGS84 CRS output_layer.crs = geojson.crs.Named('urn:ogc:def:crs:OGC:1.3:CRS84') ls = geojson.LineString() for point in route(): ls.coordinates.append([point.longitude, point.latitude]) output_layer.features.append(geojson.Feature(id=1, geometry=ls)) geojson.dump(output_layer, output_fh)
def makePointsToMultiPointFeatureCollection(self,points,colors,radii,outfilename): featureCollection = {'features':[]} for color,radius,point in zip(colors,radii,points): st = {"fillOpacity":1,"fillcolor": color, "color":color,"radius":radius,"stroke":False} pr = {"style":st} ft = Feature(geometry=Point(point.tolist()),properties=pr) featureCollection['features'].append(ft) outfile = open(outfilename,'w') print>>outfile, "var data = " geojson.dump(featureCollection,outfile) outfile.close()
def __init__(self, f): # note: we clean up after ourselves in the destructor # below (20150827/thisisaaronland) fh = tempfile.NamedTemporaryFile(delete=False) geojson.dump(f, fh) fh.close() logging.debug("entempified %s" % fh.name) self.fh = fh