def update_geojson_choropleth(self): geo_data = pygeoj.load(script_file_path + "/central_london.geojson") avg_area = self.df_disposals.groupby('area_code').mean() avg_district = self.df_disposals.groupby('district_code').mean() avg_sector = self.df_disposals.groupby('sector_code').mean() for feature in geo_data: ppt = feature.properties area, district, sector = extract_postcode_stubs(ppt['name']) ppt['area'] = area ppt['district'] = district ppt['sector'] = sector mkts = [m for m in extract_markets(ppt['name']) if m is not None] ppt['sub_market'] = str(mkts[0]) ppt['wider_market'] = str(mkts[-1]) ppt.pop('column_1495195118190') ppt.pop('cartodb_id') ppt['average_rent'] = self.average_cascade('rent', area, district, sector, avg_area, avg_district, avg_sector) ppt['average_rates'] = self.average_cascade( 'rates', area, district, sector, avg_area, avg_district, avg_sector) ppt['average_size_min'] = self.average_cascade( 'size_min', area, district, sector, avg_area, avg_district, avg_sector) ppt['average_size_max'] = self.average_cascade( 'size_max', area, district, sector, avg_area, avg_district, avg_sector) geo_data.save("pyg_test.geojson")
def json_to_txt(jsonfile, maskland): drift_json = pygeoj.load(jsonfile) lats1 = [] lons1 = [] lats2 = [] lons2 = [] drift_m = [] n = 0 for feature in drift_json: n += 1 print 'new feature', n lat1 = feature._data['properties']['lat1'] lon1 = feature._data['properties']['lon1'] if maskland == True: land = mask_land_basemap(lat1, lon1) if land != 1: lats1.append(feature._data['properties']['lat1']) lons1.append(feature._data['properties']['lon1']) lats2.append(feature._data['properties']['lat2']) lons2.append(feature._data['properties']['lon2']) #drift_m.append(feature._data['properties']['drift_m']) drift_m.append(np.float( feature._data['properties']['drift_m'])) else: print 'land' else: lats1.append(feature._data['properties']['lat1']) lons1.append(feature._data['properties']['lon1']) lats2.append(feature._data['properties']['lat2']) lons2.append(feature._data['properties']['lon2']) drift_m.append(np.float(feature._data['properties']['drift_m'])) return lats1, lons1, lats2, lons2, drift_m
def returnCordinates(path): files = os.listdir(path) LongtitudeList = [] LatitudeList = [] for file in files[1:]: if file.endswith( "housenumbers.geojson" ): # four geojson files skiped,this line will be change continue file = path + '\\' + file try: test = pygeoj.load(filepath=file) except: continue citydict = dict() # print("++++++++++++++",file,"+++++++++++++")#I'll print to see contents in file,it will be deleted for feature in test: coordinates = feature.geometry.coordinates if feature.geometry.type == "Point": LongtitudeList.append(coordinates[0]) LatitudeList.append(coordinates[1]) elif feature.geometry.type == "LineString": for coordinate in coordinates: LongtitudeList.append(coordinate[0]) LatitudeList.append(coordinate[1]) elif feature.geometry.type == "Polygon": for coordinate in coordinates[0]: LongtitudeList.append(coordinate[0]) LatitudeList.append(coordinate[1]) return [ np.min(LongtitudeList), np.max(LongtitudeList), np.min(LatitudeList), np.max(LatitudeList) ]
def jsonCase(filepath): try: myGeojson = pygeoj.load(filepath=filepath) pointList = [] def getCoordinates(listInList): coordinates = [] for sublist in listInList: if type(sublist) is list: if type(sublist[0]) is list: coordinates.extend(getCoordinates(sublist)) else: coordinates.extend(listInList) break else: coordinates.append(listInList) break return coordinates for features in myGeojson: pointList.extend( list( map(tuple, getCoordinates(features.geometry.coordinates)))) return (convex_hull(pointList), None) # errors except: return (None, "File Error!")
def update(request, modelo): datos = request.data.get("data") if datos is None: raise ValidationError({"data":"es necesario en geojson"}) if not isinstance(datos, str): raise ValidationError({"data":"debe ser un str"}) datos = datos.replace("'", "\"") try: datos = json.loads(datos) geo = pygeoj.load(data=datos) data = [] for i in geo._data["features"]: nuevo = i["properties"].get("nuevo") modificar = i["properties"].get("modificar") eliminar = i["properties"].get("eliminar") if nuevo is not None or modificar is not None or eliminar is not None: data.append(i) geo._data["features"] = data importer = CapaImporter(geo, None, None, verificar_nombre=False, verificar_categoria=False) importer.alterar_registros(modelo) queryset = modelo.objects.all() data = serialize('geojson', queryset, geometry_field='geom') data = json.loads(data) return Response(data) #except json.decoder.JSONDecodeError as e: # raise ValidationError({"mensaje": "json invalido, "+e}) except ValueError as e: print(e) raise ValidationError({"mensaje": "el geojson es invalido"})
def AddRegion2LanLongData(LanLogData, GeojsonRegion, CsvFileName): from shapely.geometry import shape, Point import pygeoj import pandas as pd import numpy as np DatosTrain = pd.read_csv(LanLogData, index_col=0) js = pygeoj.load(GeojsonRegion) # Seleccionamos las columnas de longitud y latitud LongLat = DatosTrain.loc[:, ["lng", "lat"]] DatosTrain["Region"] = pd.Series(np.nan, index=DatosTrain.index) for i in range(0, len(LongLat.index)): PuntoCoord = LongLat.iloc[i] Lng = PuntoCoord.values[0] Lat = PuntoCoord.values[1] point1 = Point(Lng, Lat) # check each polygon to see if it contains the point for feature in js: polygon = shape(feature.geometry) if polygon.contains(point1): DatosTrain.iloc[i, (len(DatosTrain.columns) - 1)] = feature.properties["name"] return DatosTrain.to_csv(CsvFileName, sep=";", encoding="UTF-8")
def getBoundingBox(name, path): filepath = "%s\%s.json" % (path, name) try: myGeojson = pygeoj.load(filepath=filepath) click.echo(myGeojson.bbox) except ValueError: myJson = open(filepath, "rb") myJson = json.load(myJson) myGeojson = {"type": "FeatureCollection", "features": []} myGeojson.get("features").append(myJson) myGeojson = pygeoj.load(data=myGeojson) click.echo(myGeojson.bbox) except: click.echo("File not Found")
def main(argv): help_message = 'test.py <inputfile> <outputfile>' if len(argv) < 2: print(help_message) sys.exit(2) else: inputfile = argv[0] outputfile = argv[1] print("Reading data from " + inputfile) # Setting up the user defined problem in pygmo prob = pg.problem(TestOptimizer(inputfile)) solution_size = 8 # Start with an initial set of 100 sets pop = pg.population(prob, size=solution_size) # Set the algorithm to non-dominated sorting GA algo = pg.algorithm(pg.nsga2(gen=40)) # Optimize pop = algo.evolve(pop) # This returns a set of optimal vectors and corresponding fitness values fits, vectors = pop.get_f(), pop.get_x() print("Writing output to " + outputfile) jsonfile = pygeoj.load(filepath=inputfile) num_districts = len(jsonfile) counter = 0 for feature in jsonfile: for sol in range(solution_size): feature.properties = {"sol" + str(sol): str(vectors[sol][counter])} counter += 1 # Save output jsonfile.save(outputfile)
def ogr2ogrCase(filepath): """Method for extracting the crs of a valid GeoJSON file\n @param filepath Full path to GeoJSON @returns a boundingbox as an array in a tuple in WGS84, formated like ([minLong, minLat, maxLong, maxLat], None) """ try: myGeojson = pygeoj.load(filepath=filepath) crs = myGeojson.crs['properties']['name'] if crs.find('EPSG') != -1: crs = int(crs.split(':')[-1]) else: return (None, "No reference system found or not as EPSG Code") pointList = [] for features in myGeojson: # the coordinates are beeing extracted from the GeoJSON and transformed into wgs84 coordinates pointList.extend( list( map( lambda point: CRSTransform(point[1], point[0], crs ), list( map( tuple, getCoordinatesFromGeoJson( features.geometry.coordinates)))))) return (convex_hull(pointList), None) except: return (None, "File Error!")
def geojsonCase(filepath): """Method for extracting the boundingbox of a valid GeoJSON file @param filepath Full path to GeoJSON @returns a boundingbox as an array in a tuple in WGS84, formated like ([minLong, minLat, maxLong, maxLat], None) """ try: myGeojson = pygeoj.load(filepath=filepath) return (myGeojson.bbox, None) # except ValueError: # if geojson is not a featureCollection # myJson = open(filepath, "rb") # myJson = json.load(myJson) # # raw FeatureCollection # myGeojson = { # "type": "FeatureCollection", # "features": [] # } # myGeojson.get("features").append(myJson) # myGeojson = pygeoj.load(data=myGeojson) # return (myGeojson.bbox, None) # errors except: return (None, "File Error!")
def coord_parse(point): x = [] testfile = pygeoj.load(data=dict(point)) for feature in testfile: p = feature.geometry.coordinates x.append(p) break return x
def getCRS(filePath): ''' extracts EPSG number of the taken coordinate reference system (short: crs), as standard the crs WGS84 is used. \n input "filePath": type string, file path to geojson File \n returns the epsg code of the used coordinate reference system: type int, EPSG number of taken crs ''' def extractAfterKeyword(searchParam, gjsonContent): ''' searches for the value fo the dict entry with keyword which is given as input \n input "searchParam": type string, keyword for which is searched in the dict \n input "gjsonContent": type dict, Content of geojson File ''' if type(gjsonContent) == dict: for keyContent, valueContent in gjsonContent.items(): if keyContent == searchParam: extracted.append(valueContent) if type(valueContent) == dict or type(valueContent) == list: extractAfterKeyword(searchParam, valueContent) if type(gjsonContent) == list: for element in gjsonContent: extractAfterKeyword(searchParam, element) try: gjsonContent = pygeoj.load(filePath) crsCode = gjsonContent.crs if not crsCode: return hf.WGS84_EPSG_ID else: for key, value in crsCode.items(): if key == "properties": try: if value["name"] == "urn:ogc:def:crs:OGC:2:84": return hf.WGS84_EPSG_ID elif value["name"]: splittedCrs = value["name"].split(":") for elem in splittedCrs: try: if int(elem) is not None: crsCode = int(elem) return crsCode except: pass except: pass #formats like urn:ogc:def:crs:EPSG::25832 return hf.WGS84_EPSG_ID except: gjsonContent = extractContentFromPath(filePath) #4326 is the standard epsg after http://wiki.geojson.org/GeoJSON_draft_version_6#Specification crsCode = hf.WGS84_EPSG_ID extracted = [] extractAfterKeyword("crs", gjsonContent) if len(extracted) != 0: if type(extracted[0]) == dict and "properties" in extracted[ 0] and "code" in extracted[0]["properties"]: crsCode = extracted[0]["properties"]["code"] return crsCode
def _return_geo_walk_poly(geo_file): testfile = pygeoj.load(geo_file) for feature in testfile: #print (feature.geometry.type) #print (feature.geometry.coordinates) foot = { "type": feature.geometry.type, "coordinates": feature.geometry.coordinates } return (foot)
def geojsonCase(filepath): """Method for extracting the boundingbox of a valid GeoJSON file\n @param filepath Full path to GeoJSON @returns a boundingbox as an array in a tuple in WGS84, formated like ([minLong, minLat, maxLong, maxLat], None) """ try: myGeojson = pygeoj.load(filepath=filepath) return (myGeojson.bbox, None) except: return (None, "File Error!")
def from_file(filepath, encoding="utf8"): def decode(value): if isinstance(value, str): return value.decode(encoding) else: return value # shapefile if filepath.lower().endswith(".shp"): shapereader = pyshp.Reader(filepath) # load fields, rows, and geometries # Field name is first value in field, first value in shapereader is delete flag fields = [decode(field[0]) for field in shapereader.fields[1:]] rows = [[decode(value) for value in record] for record in shapereader.iterRecords()] def getgeoj(obj): """ Get list of geojson features and capture bbox if alreaday calculated""" # .__Geo_interface__ returns geojson dict geoj = obj.__geo_interface__ # Shapefiles store feature bounding boxes - except points obvy if hasattr(obj, "bbox"): geoj["bbox"] = obj.bbox return geoj geometries = [getgeoj(shape) for shape in shapereader.iterShapes()] # load projection string from .prj file if exists if os.path.lexists(filepath[:-4] + ".prj"): crs = open(filepath[:-4] + ".prj", "r").read() else: crs = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" return fields, rows, geometries, crs # geojson file elif filepath.lower().endswith((".geojson", ".json")): geojfile = pygeoj.load(filepath) # load fields, rows, and geometries fields = [decode(field) for field in geojfile.common_attributes] rows = [[decode(feat.properties[field]) for field in fields] for feat in geojfile] geometries = [feat.geometry.__geo_interface__ for feat in geojfile] # load crs crs = geojfile.crs return fields, rows, geometries, crs else: raise Exception( "Could not create vector data from the given filepath:" " the filetype extension is either missing or not supported")
def isValid(filePath): ''' Checks whether it is valid GML or not. \n input "path": type string, path to file which shall be extracted \n output true if file is valid, false if not ''' try: ogr2ogr.main(["","-f", "GeoJSON", "outputV.json", filePath]) myGeojson = pygeoj.load(filepath="outputV.json") return True except: raise Exception('The gml file from ' + filePath + ' has no valid gml Attributes')
def parse(**kwargs): try: path_file = kwargs.get('p', None) MASTER_MD_DICT = kwargs.get('md', None) is_debug = kwargs.get('is_debug', None) gj = pygeoj.load(filepath=path_file) if 'spatial' in MASTER_MD_DICT: if 'files' in MASTER_MD_DICT['spatial']: MASTER_MD_DICT['spatial']['files'].append({"name": path_file, "bbox": gj.bbox}) return MASTER_MD_DICT except Exception as exc: status_note(['! error while parsing geojson ', str(exc)], d=is_debug)
def get_bbox(self): """ Corrected geometry of campaign. :return: corrected coordinated :rtype: [str] """ if not self.geometry: return [] geometry = copy.deepcopy(self.geometry) geometry['features'][0]['geometry']['coordinates'][0] = \ self.corrected_coordinates() geojson = pygeoj.load(data=geometry) return geojson.bbox
def neighborList(filename): os.chdir('C:\\Users\\galli_000\\Desktop\\gitfolder\\HW1') testfile = pygeoj.load(filename) neighborList = [] for feature in testfile: multipoly = shape(feature.geometry) name = feature.properties['pri_neigh'] temp = {'Name':name, 'Geometry':multipoly} neighborList.append(temp) #print('Checking lasted: ', final - orig) #print('Data >> Neighborhood\n') return neighborList
def getBoundingBox(filePath): ''' extract bounding box from gml \n input "filepath": type string, file path to gml file \n returns bounding box of the file: type list, length = 4 , type = float, schema = [min(longs), min(lats), max(longs), max(lats)] ''' ogr2ogr.main(["","-f", "GeoJSON", "outputB.json", filePath]) myGeojson = pygeoj.load(filepath="outputB.json") os.remove("outputB.json") if myGeojson.bbox is not None: return (myGeojson.bbox) else: raise Exception('The gml file from ' + filePath + ' has no BoundingBox')
def sql_single_min(query): data = pygeoj.load('static/data/states.geojson') sql = text(query) result = db.engine.execute(sql) min_hospital = "" for row in result: name = row['name'].encode('UTF-8') state = row['state'].encode("utf-8") mincost= float(row['avg_total_payments']) min_hospital = "%s %s %.2f" % (name.decode("utf-8") , state.decode("utf-8"), mincost) return min_hospital
def readGeoJSON(self, filename): jsonfile = pygeoj.load(filepath=filename) self.num_districts = len(jsonfile) for feature in jsonfile: ewz = feature.properties['EWZ'] cases = feature.properties['cases'] deaths = feature.properties['deaths'] recovered = feature.properties['recovered'] self.total_population += ewz self.population.append(ewz) self.total_num_positive += cases self.num_positive.append(cases) self.total_num_deaths += deaths self.num_deaths.append(deaths)
def getVectorRepresentation(filePath): ''' extracts coordinates from gml File (for vector representation) \n input "filepath": type string, file path to gml file \n returns extracted coordinates of content: type list, list of lists with length = 2 ''' ogr2ogr.main(["","-f", "GeoJSON", "outputV.json", filePath]) myGeojson = pygeoj.load(filepath="outputV.json") properties= (myGeojson.get_feature(0).geometry.coordinates[0]) os.remove("outputV.json") if properties is None: raise Exception('The gml file from ' + filePath + ' has no VectorRepresentation') else: return properties
def getBoundingBox(filePath): ''' extract bounding box from geojson content \n input "filePath": type string, file path to geojson File \n returns bounding box: type list, length = 4 , type = float, schema = [min(longs), min(lats), max(longs), max(lats)] ''' bbox = None #gjsonContent is a FeatureCollection try: gjsonContent = pygeoj.load(data=convert3dto2d(filePath)) bbox = gjsonContent.bbox #gjsonContent is a single geometrie and has to be converted to a FeatureCollection except ValueError: gjsonContent = convert3dto2d(filePath) gjsonFeatureColl = {"type": "FeatureCollection", "features": []} gjsonFeatureColl["features"].append(gjsonContent) gjsonContent_FeatureColl = pygeoj.load(data=gjsonFeatureColl) bbox = gjsonContent_FeatureColl.bbox if not bbox: raise Exception("Bounding box could not be extracted") return bbox
def load_lsoa_polygons(self): polygon_data = pygeoj.load(filepath="LondonLSOA.geojson") spatial_entity_to_coordinates = {} for feature in polygon_data: coords = feature.geometry.coordinates coords = coords[0][0] lsoa_id = feature.properties['LSOA11CD'] xs = [i for i, j in coords] ys = [j for i, j in coords] spatial_entity_to_coordinates[lsoa_id] = [xs, ys] return spatial_entity_to_coordinates
def merge_geojson(cls, folder, file_name, files): geojson = pygeoj.new() for path in files: f = pygeoj.load(path) for feature in f: geojson.add_feature(obj=feature) full_path = os.path.join(folder, file_name + ".json") geojson.add_all_bboxes() geojson.update_bbox() geojson.save(full_path) return full_path
def analysis_add_trafficzone(request): if request.is_ajax() and request.POST: trafficzone_filename = request.POST.get('trafficzone_filename') file_path = "media/trafficzones/" + str(trafficzone_filename) geofile = pygeoj.load(file_path) zone_tally = 0 for feature in geofile: zone_tally = zone_tally + 1 data = {} data['no_trafficzones'] = zone_tally data['trafficzone_filename'] = trafficzone_filename data['zone_geojson'] = geojson.dumps(geofile) return HttpResponse(json.dumps(data), content_type='application/json') return HttpResponse("Non ajax post request")
def importar(self, request, *args, **kwargs): def validar(capa): if capa is None: raise ValidationError({"data":"es necesario la capa"}) capa = self.request.data.get('data') validar(capa) nombre = self.request.data.get("nombre") categoria = self.request.data.get("categoria") if nombre is None: raise ValidationError({"nombre": "es requerido"}) if categoria is None: categoria = 1 capa = json.loads(capa) geo = pygeoj.load(data=capa) importer = CapaImporter(geo, nombre, categoria) importer.importar_tabla() return Response()
def stats(args): if args.country: france = [] files = [path.join(STATS_PATH, x) for x in os.listdir(STATS_PATH) if x.endswith('.geojson') and x != "france.geojson"] files.sort() counter = 0 for json_path in files: counter += 1 log.info("{:.2f}% Treating {}".format(100 * counter / len(files), json_path)) with open(json_path) as fd: department = geojson.load(fd) for city in department.features: if city.geometry: [x1, x2, y1, y2] = pygeoj.load(data=FeatureCollection([city])).bbox city.geometry = Polygon([[(x1, y1), (x1, y2), (x2, y2), (x2, y1)]]) france.append(city) json_path = path.join(STATS_PATH, "france.geojson") with open(json_path, 'w') as fd: fd.write(geojson.dumps(FeatureCollection(france), indent=1)) elif args.department: vectorized = get_vectorized_insee(args.department) build_municipality_list(args.department.zfill(2), vectorized, force_download=args.force, umap=args.umap) elif args.insee: vectorized = {} for insee in args.insee: # Format of INSEE if [0-9]{2-3}[0-9]{3} # the first part is the department number, the second the city unique id # We just need to junk the last 3 caracters department = insee[:-3] vectorized[department] = get_vectorized_insee(department) build_municipality_list(department, vectorized[department], given_insee=insee, force_download=args.force, umap=args.umap) elif args.name: # if we got a name, we must find the associated INSEE args.insee = [] for name in args.name: args.insee.append(get_insee_for(name)) stats(args) else: log.critical("Unhandled case")
def ogr2ogrCase(filepath): """Method for extracting the crs of a valid GeoJSON file\n @param filepath Full path to GeoJSON @returns a boundingbox as an array in a tuple in WGS84, formated like ([minLong, minLat, maxLong, maxLat], None) """ try: myGeojson = pygeoj.load(filepath=filepath) crs = myGeojson.crs['properties']['name'] if crs.find('EPSG') != -1: crs = int(crs.split(':')[-1]) else: return (None, "No reference system found or not as EPSG Code") bbox = myGeojson.bbox wgsBbox = CRSTransform(bbox[1], bbox[0], crs) wgsBbox.extend(CRSTransform(bbox[3], bbox[2], crs)) return (wgsBbox, None) except: return (None, "File Error!")
def GenerateNeighborhoodsAdjacency(CityShapeFileLocation, CityShapeFile): #Khorshed worked on it directory = CityShapeFileLocation files = [CityShapeFile] for filename in files[:]: testfile = pygeoj.load(directory + filename) count = 0 a = [] aa = [] pair = [] for feature in testfile: a.append(feature.geometry.coordinates) aa.append(feature.properties['OBJECTID']) pair.append([ feature.properties['District_N'], feature.properties['Municipali'] ]) count = count + 1 p = [] output = [] for i in aa: p0 = Polygon(a[i - 1][0]) p = a intpoly = [] for j in range(len(p)): if p0.intersects(Polygon(p[j][0])) == True: if aa[j] != i: intpoly.append(aa[j]) output.append([i, intpoly]) FArray = [] Adj = [] for ii in range(len(output)): Adj = [] for k in range(len(output[ii][1])): Adj.append([ pair[output[ii][1][k] - 1][0], pair[output[ii][1][k] - 1][1] ]) FArray.append([pair[ii][0], pair[ii][1], Adj]) return FArray
def index(request): coord = '' if request.method == "POST": form = ImportGeojsonfileForm(request.POST, request.FILES) if form.is_valid(): geoJson = request.FILES['import_file'] data = json.load(geoJson) a = pygeoj.load(None, data) for feature in a: coord = feature.geometry.coordinates gtype = feature.geometry.type print(gtype) # file_ = open(os.path.join(PROJECT_ROOT, 'filename')) form = ImportGeojsonfileForm() if (coord != ''): geometry = ee.Geometry.MultiPolygon(coord) else: geometry = ee.Geometry.Polygon( [[[84.97873462030498, 27.87424989960898], [84.74527514764873, 27.563032482426987], [85.20670092889873, 27.385144636789754], [85.69833911249248, 27.368071821574812], [85.95102465936748, 27.57277145564543], [86.04166186639873, 27.840253798218345], [85.70657885858623, 27.973748526646474], [85.55826342889873, 27.95676744082692], [85.29733813592998, 27.910662458572368]]]) context = { # "tile2020" : getTile2020(geometry), "tile2019": getTile2019(geometry), "tile2018": getTile2018(geometry), "tile2017": getTile2017(geometry), "band_viz": getVisParam(), # "form" : form, "title": "Carbon Monoxide Emission", "startDate": '2020-04-01', "endDate": '2020-04-24', "form": form, } return render(request, 'index.html', context)
def __init__(self, random_mode=RandomMode.UNIFORM_DISTRIBUTION_RANDOM, sigma=0.01): # by default we set UNIFORM_DISTRIBUTION_RANDOM as Point in Polygon. self._random_mode = random_mode self.sigma = sigma self.shelve = shelve.open('geo_tag.shelve') try: # self.abbrev_us_state is the relation between the state's full name and abbrev. with open('us_state_abbrev.json') as json_file: self._abbrev_us_state = json.load(json_file) # self.city_datafile is a big json. self._city_json_file = pygeoj.load(filepath="city.json") # infer on place self._init_city_state_mapping() # infer on coordinate self._init_coord_tree_cache() # infer on user self._init_location_coordinate_mapping() except (ValueError, FileExistsError, FileNotFoundError) as err: logger.critical(err) exit(1)
def readGeoJSON(self, filename): jsonfile = pygeoj.load(filepath=filename) self.num_districts = len(jsonfile) for feature in jsonfile: residents = feature.properties['EWZ'] cases = feature.properties['cases'] deaths = feature.properties['deaths'] recovered = feature.properties['recovered'] area = feature.properties['KFL'] #in km² mortality_rate = feature.properties['death_rate'] #t_type = feature.properties['Thünen-Typ'] #indicator if the area is rural and has low economy self.total_population += residents self.population.append(residents) self.total_num_positive += cases self.num_positive.append(cases) self.total_num_deaths += deaths self.num_deaths.append(deaths) self.total_mortality_rate += mortality_rate self.mortality_rate.append(mortality_rate)
def sql_min(query): data = pygeoj.load('static/data/states.geojson') sql = text(query) result = db.engine.execute(sql) for row in result: state = 'US.' + str(row['state'].encode("utf-8")) mincost = float(row['min']) for feature in data: if feature.properties["code_hasc"] == state: feature.properties["name"] = "%.2f" % mincost data.save('static/data/states.geojson') with open('static/data/states.geojson') as f: saved = json.load(f) data_json = json.dumps(saved, ensure_ascii=False) return data_json
__author__ = 'david' from shapely.geometry import shape,Point import pygeoj import pandas as pd import numpy as np DatosTrain=pd.read_csv("data/DatosAValorar.csv",sep=";",index_col=0) js = pygeoj.load("data/spain-communities.geojson") #Seleccionamos las columnas de longitud y latitud LongLat=DatosTrain.loc[:,['lng','lat']] DatosTrain["Region"]=pd.Series(np.nan,index=DatosTrain.index) for i in range(0,len(LongLat.index)): PuntoCoord=LongLat.iloc[i] Lng=PuntoCoord.values[0] Lat=PuntoCoord.values[1] point1 = Point(Lng,Lat) # check each polygon to see if it contains the point for feature in js: polygon = shape(feature.geometry) if polygon.contains(point1): DatosTrain.iloc[i,(len(DatosTrain.columns)-1)]=feature.properties['name']
demo_stats = demo_stats.loc[demo_stats["gender"] != 'unknown'] demo_stats = demo_stats.loc[demo_stats["age_interval"] != 'unknown'] demo_stats = demo_stats[demo_stats.category == 'es_barsandrestaurants'] demo_stats["weekday"] = demo_stats["date"].map(lambda d: (d.weekday())) demo_stats["weekday"] = demo_stats["weekday"].astype('string') gbcn = demo_stats.groupby(["gender", "age_interval", "weekday", "merchant_zipcode"]).aggregate({ "payments": np.sum }) gbcn = gbcn.reset_index() total_by_zipcode = gbcn.groupby(["gender" , "age_interval", "weekday"])["payments"].max() gbcn['payments_proportion'] = gbcn.apply(lambda row: np.true_divide(row.payments, total_by_zipcode[row.gender, row.age_interval, row.weekday]), axis=1) zip_code_geojson = pygeoj.load(filepath="../dataset/best_bcn.geojson") geojson = { "type": "FeatureCollection", "features": [] } for feature in zip_code_geojson: zipcode = feature.properties['merchant_zipcode'] feature = { "type": "Feature", "properties": { "payments_proportion": "0", "zipcode": zipcode }, "geometry": { "type": "MultiPolygon",
other={}, geometry=fullgeom.__geo_interface__) # Initiate results table results = ResultsTable() # Load contemporary table for feat in pygeoj.load("BaseData/natearthprovs_codeupdates.geojson", encoding="latin1"): if feat.properties["geonunit"] != "Vietnam": continue results.add_province(start=None, #datetime.date(year=1946, month=1, day=1), end=datetime.date(year=2014, month=12, day=31), ids={"Name": feat.properties["name"], "HASC": feat.properties["code_hasc"], "ISO": feat.properties["iso_3166_2"], "FIPS": feat.properties["fips"] }, other={}, geometry=feat.geometry) # Load events table import sys
bnd_base = bnd['base'] +"/"+ bnd["resources"][0]["path"] bnd_type = bnd['type'] meta = c_data.find({'name':match['name']})[0] # dataset base and type dset_base = meta['base'] +"/"+ meta["resources"][0]["path"] dset_type = meta['type'] result = False if meta['file_format'] in ["raster", "release"]: if bnd_type == "boundary" and dset_type == "raster": # python raster stats extract # bnd_geo = cascaded_union([shape(shp) for shp in shapefile.Reader(bnd_base).shapes()]) bnd_geo = cascaded_union([shape(shp.geometry) for shp in pygeoj.load(bnd_base)]) extract = rs.zonal_stats(bnd_geo, dset_base, stats="min max") if extract[0]['min'] != extract[0]['max']: result = True elif bnd_type == "boundary" and dset_type == "release": result = True else: print "Error - Dataset type not yet supported (skipping dataset).\n" continue # check results and update tracker if result == True:
cadastre_f = 'D:/DATA/g.meunier/Desktop/elev/emprise_bati_paris.geojson' #YOUR ELEVATION FILE AS HDR # expects to also find bil file in same folder elevation_f = "elev-decoupe75.hdr" #IF YOU WANT A SMALL PART OF PARIS (IF YOU DON'T USE ZERO IN radius_around #THE ADDRESS YOU WANT address_f = "" address_f = "20 rue Gasnier-Guy Paris" #AND THE RADIUS AROUND THIS ADDRESS IN METER (OR ZERO) radius_around = 400 #CALCULATION extrude_json = pygeoj.load(cadastre_f) elevation_file = BilParser(elevation_f) h = elevation_file.header v = elevation_file.values #test if radius_around>0 or exist if radius_around: if address_f: address_coord_lat,address_coord_lon = nominatim(address_f) extrude(extrude_json, h, v, radius_around, address_coord_lat, address_coord_lon) else: print "need an address or no radius" else: extrude(extrude_json, h, v) #COULD USE MULTITHREADING :
csvfile.close() #create dict variable that will hold only replacement key,value pairs, remove matching key,pair values #iterate through key,value pairs only select those with spelling variations LookUp={} for key,value in lookup_dict_all.iteritems(): if key!=value: LookUp.update({key:value}) else: pass print "lookup table =" print LookUp # open template geoj_template = pygeoj.load(r"C:\Users\shylander\Desktop\ICE\app\ICE_Data\Countries_Template.geojson") # save copy geoj_template.save(r"C:\Users\shylander\Desktop\ICE\app\ICE_Data\Countries.geojson") # open copy geoj = pygeoj.load(r"C:\Users\shylander\Desktop\ICE\app\ICE_Data\Countries.geojson") # open csv csvfile = io.open(r"C:\Users\shylander\Desktop\ICE\app\ICE_Data\CSV\Tabular_Data.csv",'r', encoding='utf-8', errors='ignore') # intialize reader reader = csv.DictReader(csvfile) # dump the tabular data csv into a string of geojson objects out = geojson.dumps([row for row in reader]) # convert to the geojson object into a list/array out_list_tab = geojson.loads(out) #close Tabular Data csv csvfile.close() print 'outlist ='
def render_world(crs, savename): import urllib2 import json import pygeoj import pyagg import pyproj import random # load world borders global raw if not raw: raw = urllib2.urlopen("https://raw.githubusercontent.com/johan/world.geo.json/master/countries.geo.json").read() rawdict = json.loads(raw) data = pygeoj.load(data=rawdict) # convert coordinates fromproj = pyproj.Proj("+init=EPSG:4326") toproj = pyproj.Proj(crs.to_proj4()) for feat in data: if feat.geometry.type == "Polygon": feat.geometry.coordinates = [zip(*pyproj.transform(fromproj, toproj, zip(*ring)[0], zip(*ring)[1])) for ring in feat.geometry.coordinates] elif feat.geometry.type == "MultiPolygon": feat.geometry.coordinates = [ [zip(*pyproj.transform(fromproj, toproj, zip(*ring)[0], zip(*ring)[1])) for ring in poly] for poly in feat.geometry.coordinates] feat.geometry.update_bbox() # important to clear away old bbox # get zoom area data.add_all_bboxes() data.update_bbox() bbox = data.bbox ## # to avoid inf bounds and no render in satellite view ## xmins, ymins, xmaxs, ymaxs = zip(*(feat.geometry.bbox for feat in data)) ## inf = float("inf") ## xmaxs = (xmax for xmax in xmaxs if xmax != inf) ## ymaxs = (ymax for ymax in ymaxs if ymax != inf) ## bbox = (min(xmins), min(ymins), max(xmaxs), max(ymaxs)) # set up drawing c = pyagg.Canvas(1000,1000) c.geographic_space() c.zoom_bbox(*bbox) c.zoom_out(1.3) # draw countries for feat in data: try: c.draw_geojson(feat.geometry, fillcolor=tuple(random.randrange(255) for _ in range(3)), outlinecolor="white") except: # NOTE: feat.__geo_interface__ is one level too high maybe?? print("unable to draw?", feat.geometry) # draw text of the proj4 string used c.percent_space() c.draw_text(crs.to_proj4(), (50,10)) # save c.save("testrenders/"+savename+".png")
def from_file(filepath, encoding="utf8", **kwargs): # TODO: for geoj and delimited should detect and force consistent field types in similar manner as when saving select = kwargs.get("select") def decode(value): if isinstance(value, str): return value.decode(encoding) else: return value # shapefile if filepath.endswith(".shp"): shapereader = pyshp.Reader(filepath, **kwargs) # TODO: does pyshp take kwargs? # load fields, rows, and geometries fields = [decode(fieldinfo[0]) for fieldinfo in shapereader.fields[1:]] rows = ( [decode(value) for value in record] for record in shapereader.iterRecords() ) def getgeoj(obj): geoj = obj.__geo_interface__ if hasattr(obj, "bbox"): geoj["bbox"] = list(obj.bbox) return geoj geometries = (getgeoj(shape) for shape in shapereader.iterShapes()) rowgeoms = itertools.izip(rows, geometries) # load projection string from .prj file if exists if os.path.lexists(filepath[:-4] + ".prj"): crs = open(filepath[:-4] + ".prj", "r").read() else: crs = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" # geojson file elif filepath.endswith((".geojson",".json")): geojfile = pygeoj.load(filepath, encoding=encoding, **kwargs) # load fields, rows, and geometries fields = [field for field in geojfile.common_attributes] rows = ([feat.properties[field] for field in fields] for feat in geojfile) geometries = (feat.geometry.__geo_interface__ for feat in geojfile) rowgeoms = itertools.izip(rows, geometries) # load crs crs = geojfile.crs # table files without geometry elif filepath.endswith((".txt",".csv",".xls",".xlsx")): # txt or csv if filepath.endswith((".txt",".csv")): delimiter = kwargs.get("delimiter") fileobj = open(filepath, "rU") if delimiter is None: dialect = csv.Sniffer().sniff(fileobj.read()) fileobj.seek(0) rows = csv.reader(fileobj, dialect) else: rows = csv.reader(fileobj, delimiter=delimiter) def parsestring(string): try: val = float(string.replace(",",".")) if val.is_integer(): val = int(val) return val except: if string.upper() == "NULL": return None else: return string.decode(encoding) rows = ([parsestring(cell) for cell in row] for row in rows) fields = next(rows) # excel elif filepath.endswith((".xls",".xlsx")): if filepath.endswith(".xls"): import xlrd wb = xlrd.open_workbook(filepath, encoding_override=encoding, on_demand=True) if "sheet" in kwargs: sheet = wb.sheet_by_name(kwargs["sheet"]) else: sheet = wb.sheet_by_index(0) rows = ([cell.value for cell in row] for row in sheet.get_rows()) fields = next(rows) elif filepath.endswith(".xlsx"): raise NotImplementedError() geokey = kwargs.get("geokey") xfield = kwargs.get("xfield") yfield = kwargs.get("yfield") if geokey: rowgeoms = ((row,geokey(dict(zip(fields,row)))) for row in rows) elif xfield and yfield: def xygeoj(row): rowdict = dict(zip(fields,row)) x,y = rowdict[xfield],rowdict[yfield] try: x,y = float(x),float(y) geoj = {"type":"Point", "coordinates":(x,y)} except: try: x,y = float(x.replace(",",".")),float(y.replace(",",".")) geoj = {"type":"Point", "coordinates":(x,y)} except: warnings.warn("Could not create point geometry from xfield and yfield values {x} and {y}".format(x=repr(x), y=repr(y))) geoj = None return geoj rowgeoms = ((row,xygeoj(row)) for row in rows) else: rowgeoms = ((row,None) for row in rows) crs = None else: raise UnknownFileError("Could not create vector data from the given filepath: the filetype extension is either missing or not supported") # filter if needed if select: rowgeoms = ( (row,geom) for row,geom in rowgeoms if select(dict(zip(fields,row))) ) # load to memory in lists rows,geometries = itertools.izip(*rowgeoms) rows = list(rows) geometries = list(geometries) return fields, rows, geometries, crs
import maps_api import geometry as gm import time from matplotlib.path import Path import matplotlib.patches as patches import pygeoj import square_meter_price as sqm sectors = {"Bouches-du-Rhône":[]} ### values given by the formular filled on internet concurrent_place_type = 'parking' coeff_demography = 2 coords_communes_list = [] file13 = pygeoj.load("Data/france-geojson/departements/13/communes.geojson") for feature in file13: sectors["Bouches-du-Rhône"].append(feature.properties['nom']) correct_coords = [] for [ln,lat] in feature.geometry.coordinates[0]: correct_coords.append((ln,lat)) coords_communes_list.append(correct_coords) def refresh_database(): for departement,commune in sectors.iteritems(): for i in range(len(commune)): print('refreshing data of '+ commune[i]) if i>=commune.index('Istres'):