continue dl = 0.001 # length of side lat = float(sen['location']['latitude']) lon = float(sen['location']['longitude']) if args.polygons: # create rectangle, in clockwise direction p = geojson.Polygon([[(lon, lat), (lon, lat + dl), (lon + dl, lat + dl), (lon + dl, lat), (lon, lat)]]) else: p = geojson.Point((lon, lat)) # assemble properties prop = {} prop['sensorid'] = sen['sensor']['id'] prop['sensorname'] = sen['sensor']['sensor_type']['name'] prop['timestamp'] = sen['timestamp'] for pr in sen['sensordatavalues']: prop[pr['value_type']] = float(pr['value']) f = geojson.Feature(geometry=p, properties=prop) featl.append(f) # create a feature collection from feature list featcoll = geojson.FeatureCollection(featl) # empty feature list if not args.outfilename: # to stdout print(geojson.dumps(featcoll)) else: with open(args.outfilename, 'w') as o: geojson.dump(featcoll, o)
def output_from_shapes(items, filename): """ Write a list of polygons in 3857 projection to file in 4326 projection Used for debugging purposes At the moment, since this has only output intersection buffers, the resulting output won't contain any properties Args: polys - list of polygon objects filename - output file Returns: nothing, writes to file """ output = [] for item, properties in items: if item.type == 'Polygon': coords = [x for x in item.exterior.coords] reprojected_coords = [[ get_reproject_point(x[1], x[0], transformer_3857_to_4326, coords=True) for x in coords ]] elif item.type == 'MultiLineString': lines = [x for x in item] reprojected_coords = [] for line in lines: reprojected_coords.append([ get_reproject_point(x[1], x[0], transformer_3857_to_4326, coords=True) for x in line.coords ]) elif item.type == 'LineString': coords = [x for x in item.coords] reprojected_coords = [ get_reproject_point(x[1], x[0], transformer_3857_to_4326, coords=True) for x in coords ] elif item.type == 'Point': reprojected_coords = get_reproject_point(item.y, item.x, transformer_3857_to_4326, coords=True) else: print("{} not supported, skipping".format(item.type)) continue output.append({ 'type': 'Feature', 'geometry': { 'type': item.type, 'coordinates': reprojected_coords }, 'properties': properties }) with open(filename, 'w') as outfile: geojson.dump(geojson.FeatureCollection(output), outfile)
def r2v(outputpath, inimage, fpnumber, src): if fpnumber.endswith('.png'): fpnumberr = fpnumber[:-4] else: fpnumberr = fpnumber outresult = inimage.split(".")[0] + ".geojson" if 'win' in str(inimage): os.chdir(outputpath) outresult = str(fpnumberr) + '_window.geojson' elif 'wall' in str(inimage): os.chdir(outputpath) outresult = str(fpnumberr) + '_wall.geojson' elif 'stair' in src: os.chdir(outputpath) outresult = str(fpnumberr) + '_stair.geojson' else: os.chdir(outputpath) outresult = str(fpnumberr) + '_lift.geojson' src_ds = gdal.Open(outputpath + fpnumber + src) #read image originX, pixelWidth, x_rotation, originY, y_rotation, pixelHeight = src_ds.GetGeoTransform( ) #Image GeoTransform # Image EPSG proj = osr.SpatialReference(wkt=src_ds.GetProjection()) crs = "EPSG:{}".format(proj.GetAttrValue('AUTHORITY', 1)) img = src_ds.ReadAsArray() #Image as array imgbin = img == 255 #Create a boolean binary image imgbin = imgbin[1, :, :] img_thin = thin(imgbin) #The thinned image if img_thin.sum() == 0: return False img_thin = img_as_ubyte(img_thin) #Convert to 8-bit uint (0-255 range) img_thin = np.where( img_thin == 255) #this will return the indices of white pixels # Calculate the center point of each cell points = [] for i, idY in enumerate(img_thin[0]): idX = img_thin[1][i] cX = idX cY = idY points.append((cX, cY)) maxD = np.sqrt(2.0 * pixelWidth * pixelWidth) tree = spatial.cKDTree(points) groups = tree.query_ball_tree(tree, maxD) out_lines = [] for x, ptlist in enumerate(groups): for pt in ptlist: if pt > x: out_lines.append( LineString([Point(points[x]), Point(points[pt])])) # Merge the contiguous lines merged_lines = linemerge(MultiLineString(out_lines)) # Define the crs crs = {"type": "name", "properties": {"name": crs}} # Create a feature collection of linestrings feature_collection = [] if isinstance(merged_lines, LineString): merged_lines = merged_lines.simplify(maxD, True) feature_collection.append( geojson.Feature( geometry=geojson.LineString(merged_lines.coords[:]))) else: for line in merged_lines: # Simplify the result to try to remove zigzag effect line = line.simplify(maxD, True) feature_collection.append( geojson.Feature( geometry=geojson.LineString(line.coords[:]))) feature_collection = geojson.FeatureCollection(feature_collection, crs=crs) # Save the output to a geosjon file with open(outresult, 'w') as outfile: os.chdir(outputpath) geojson.dump(feature_collection, outfile) return feature_collection
def main(args): with open(args.features) as fp: collection = geojson.load(fp) shapes = [shapely.geometry.shape(feature["geometry"]) for feature in collection["features"]] del collection graph = UndirectedGraph() idx = make_index(shapes) def buffered(shape, args): projected = project_wgs_el(shape) buffered = projected.buffer(args.threshold) unprojected = project_el_wgs(buffered) return unprojected def unbuffered(shape,args): projected = project_wgs_el(shape) unbuffered = projected.buffer(-1 * args.threshold) unprojected = project_el_wgs(unbuffered) return unprojected for i, shape in enumerate(tqdm(shapes, desc="Building graph", unit="shapes", ascii=True)): embiggened = buffered(shape, args) graph.add_edge(i, i) nearest = [j for j in idx.intersection(embiggened.bounds, objects=False) if i != j] for t in nearest: if embiggened.intersects(shapes[t]): graph.add_edge(i, t) components = list(graph.components()) assert sum([len(v) for v in components]) == len(shapes), "components capture all shape indices" features = [] for component in tqdm(components, desc="Merging components", unit="component", ascii=True): embiggened = [buffered(shapes[v], args) for v in component] merged = unbuffered(union(embiggened), args) if merged.is_valid: # Orient exterior ring of the polygon in counter-clockwise direction. if isinstance(merged, shapely.geometry.polygon.Polygon): merged = shapely.geometry.polygon.orient(merged, sign=1.0) elif isinstance(merged, shapely.geometry.multipolygon.MultiPolygon): merged = [shapely.geometry.polygon.orient(geom, sign=1.0) for geom in merged.geoms] merged = shapely.geometry.MultiPolygon(merged) else: print("Warning: merged feature is neither Polygon nor MultiPoylgon, skipping", file=sys.stderr) continue # equal-area projection; round to full m^2, we're not that precise anyway area = int(round(project_ea(merged).area)) feature = geojson.Feature(geometry=shapely.geometry.mapping(merged), properties={"area": area}) features.append(feature) else: print("Warning: merged feature is not valid, skipping", file=sys.stderr) collection = geojson.FeatureCollection(features) with open(args.out, "w") as fp: geojson.dump(collection, fp)
def upload_multiple_polygon_shp(request): return_obj = {'success': False} # Check if its an ajax post request if request.is_ajax() and request.method == 'POST': file_list = request.FILES.getlist('files') # Initizalizing an empty geojson string. geojson_string = '' try: # Storing the uploaded files in a temporary directory temp_dir = tempfile.mkdtemp() for f in file_list: f_name = f.name f_path = os.path.join(temp_dir, f_name) with open(f_path, 'wb') as f_local: f_local.write(f.read()) app_workspace = app.get_app_workspace() for file in os.listdir(temp_dir): # Reading the shapefile only if file.endswith(".shp"): f_path = os.path.join(temp_dir, file) omit = ['SHAPE_AREA', 'SHAPE_LEN'] with fiona.open(f_path) as source: project = functools.partial( pyproj.transform, pyproj.Proj(**source.crs), pyproj.Proj(init='epsg:3857')) for f in source: geojson_string = '' return_obj = {'success': False} features = [] district = (f['properties']['NAME_3']) shape = shapely.geometry.shape( f['geometry'] ) # Getting the shape of the shapefile projected_shape = shapely.ops.transform( project, shape) # Transforming the shapefile # Remove the properties we don't want props = f['properties'] # props is a reference for k in omit: if k in props: del props[k] feature = geojson.Feature( id=f['id'], district=district, geometry=projected_shape, properties=props ) # Creating a geojson feature by extracting properties through the fiona and shapely.geometry module features.append(feature) fc = geojson.FeatureCollection(features) geometry = fc["features"][0]["geometry"] shape_obj = shapely.geometry.asShape(geometry) poly_bounds = shape_obj.bounds # Returning the bounds and the geo_json object as a json object return_obj["bounds"] = poly_bounds return_obj["geo_json"] = fc return_obj["success"] = True district_json = json.dumps(return_obj) crops_dir = os.path.join(app_workspace.path, 'crops') if not os.path.exists(crops_dir): os.mkdir(crops_dir) crop_dir = os.path.join(crops_dir, 'xxxxxxxxxxx') if not os.path.exists(crop_dir): os.mkdir(crop_dir) # Name of the file is its crop_name file_name = district + '.json' file_path = os.path.join(crop_dir, file_name) # Write json with open(file_path, 'w') as f: f.write(district_json) except: return 'error' finally: # Delete the temporary directory once the geojson string is created if temp_dir is not None: if os.path.exists(temp_dir): shutil.rmtree(temp_dir) return JsonResponse(return_obj)
def section_to_geojson(section, tl): """ This is the trickiest part of the visualization. The section is basically a collection of points with a line through them. So the representation is a feature in which one feature which is the line, and one feature collection which is the set of point features. :param section: the section to be converted :return: a feature collection which is the geojson version of the section """ ts = esta.TimeSeries.get_time_series(section.user_id) entry_it = ts.find_entries(["analysis/recreated_location"], esda.get_time_query_for_trip_like( "analysis/cleaned_section", section.get_id())) # TODO: Decide whether we want to use Rewrite to use dataframes throughout instead of python arrays. # dataframes insert nans. We could use fillna to fill with default values, but if we are not actually # using dataframe features here, it is unclear how much that would help. feature_array = [] section_location_entries = [ecwe.Entry(entry) for entry in entry_it] if len(section_location_entries) != 0: logging.debug("first element in section_location_array = %s" % section_location_entries[0]) if not ecc.compare_rounded_arrays( section.data.end_loc.coordinates, section_location_entries[-1].data.loc.coordinates, digits=4): logging.info("section_location_array[-1].data.loc %s != section.data.end_loc %s even after df.ts fix, filling gap" % \ (section_location_entries[-1].data.loc, section.data.end_loc)) assert (False) last_loc_doc = ts.get_entry_at_ts("background/filtered_location", "data.ts", section.data.end_ts) if last_loc_doc is None: logging.warning("can't find entry to patch gap, leaving gap") else: last_loc_entry = ecwe.Entry(last_loc_doc) logging.debug( "Adding new entry %s to fill the end point gap between %s and %s" % (last_loc_entry.data.loc, section_location_entries[-1].data.loc, section.data.end_loc)) section_location_entries.append(last_loc_entry) points_line_feature = point_array_to_line(section_location_entries) points_line_feature.id = str(section.get_id()) points_line_feature.properties.update(copy.copy(section.data)) # Update works on dicts, convert back to a section object to make the modes # work properly points_line_feature.properties = ecwcs.Cleanedsection( points_line_feature.properties) points_line_feature.properties["feature_type"] = "section" if eac.get_section_key_for_analysis_results() == esda.INFERRED_SECTION_KEY: ise = esds.cleaned2inferred_section(section.user_id, section.get_id()) if ise is not None: logging.debug("mapped cleaned section %s -> inferred section %s" % (section.get_id(), ise.get_id())) logging.debug("changing mode from %s -> %s" % (points_line_feature.properties.sensed_mode, ise.data.sensed_mode)) points_line_feature.properties["sensed_mode"] = str( ise.data.sensed_mode) else: points_line_feature.properties["sensed_mode"] = str( points_line_feature.properties.sensed_mode) else: points_line_feature.properties["sensed_mode"] = str( points_line_feature.properties.sensed_mode) _del_non_derializable(points_line_feature.properties, ["start_loc", "end_loc"]) # feature_array.append(gj.FeatureCollection(points_feature_array)) feature_array.append(points_line_feature) return gj.FeatureCollection(feature_array)
if not inner: return geojson.MultiPolygon([(o['coordinates'], ) for o in outer]) if len(outer) == 1: polygons = [outer[0]['coordinates']] for way in inner: polygons.append(way['coordinates']) return geojson.MultiPolygon([polygons]) raise ValueError('Unknown inner/outer configuration %r' % relation) else: raise ValueError('Unknown relation type %r' % relation) if __name__ == '__main__': HERE = os.path.dirname(os.path.abspath(__file__)) OSM = os.path.join(HERE, 'karlsruhe.osm') GEOJSON = os.path.join(HERE, 'coordinates.geojson') with open(OSM, 'r') as f: streets, relations = parse_osm(f) features = [] for name, ways in streets.iteritems(): features.append(geojson.Feature(geometry=ways2geometry(ways), id=name)) for name, props in relations.iteritems(): features.append( geojson.Feature(geometry=relation2geometry(props), id=name)) collection = geojson.FeatureCollection(features) with codecs.open(GEOJSON, 'w', encoding='utf8') as f: geojson.dump(collection, f)
def pipeline_start(pipeline, root, logger): """ Walk a tile directory and report about available tiles. Called from local data dir as: ddd ~/ddd/pipelines/osm/osm_tile_info.py """ source_dir = settings.DDD_WORKDIR + "/ddd_http/" #source_regex = r"output/ddd_http/([0-9]+)/([0-9]+)/([0-9]+)(.*)" source_regex = r".*(17)/([0-9]+)/([0-9]+)(.*)" logger.info("Finding output results from: %s (%s)" % (source_dir, source_regex)) #use_file = "/tmp/tiles.txt" use_file = None if use_file: listing = open(use_file, "r").read().split("\n") else: listing = glob.glob(source_dir + "**/*.glb", recursive=True) listing = [f[len(source_dir):] for f in listing] features = [] feature_idx = {} for filename in listing: #dirname = os.path.dirname(filename) #basename = os.path.basename(filename) if not filename.endswith(".glb"): continue if filename.endswith(".uncompressed.glb"): continue #print(filename) #logger.debug(filename) matches = re.match(source_regex, filename) if matches: x, y, z = int(matches.group(2)), int(matches.group(3)), matches.group(1) if z == '.': z = 17 else: z = int(z) data = {"z": z, "x": x, "y": y, "remainder": matches.group(4)} #logger.debug(data) tile = Tile.from_google(x, y, zoom=z) point_min, point_max = tile.bounds min_lat, min_lon = point_min.latitude_longitude max_lat, max_lon = point_max.latitude_longitude center_lat = (min_lat + max_lat) / 2.0 center_lon = (min_lon + max_lon) / 2.0 center = (center_lon, center_lat) area = ddd.rect([min_lon, min_lat, max_lon, max_lat]).geom file_path = os.path.join(source_dir, filename) mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file_path)) age = datetime.datetime.now() - mtime feature = geojson.Feature(geometry=area, properties={"available": True, #exists > 0, "name": filename, "z": z, "x": x, "y": y, "mtime": str(mtime), "size": os.stat(file_path).st_size if os.path.exists(file_path) else None} ) if (z, x ,y) not in feature_idx: feature_idx[(z, x, y)] = feature features.append(feature) else: pass feature_collection = geojson.FeatureCollection(features) dump = geojson.dumps(feature_collection, sort_keys=True, indent=4) print(dump + "\n") logger.info("Found %d files." % len(features))
def process_institutions(stack): key = 'stack:institutions' try: institutions_cards = _cache.get(key) except KeyError: CRS = '+ellps=GRS80 +k=1.00007 +lat_0=31.73439361111111 +lon_0=35.20451694444445 +no_defs +proj=tmerc +units=m +x_0=219529.584 +y_0=626907.39' projector = pyproj.Proj(CRS) def proj(): def func(row): row['lon'], row['lat'] = projector(row['X'], row['Y'], inverse=True) return DF.Flow( DF.add_field('lon', 'number'), DF.add_field('lat', 'number'), func, DF.delete_fields(['X', 'Y']) ) def translate_kind(): translations = { 'מרפאה': 'מרפאות', 'איצטדיון': 'איצטדיון', 'ספרייה': 'ספריות', 'בית ספר': 'בתי ספר', 'מועדון קהילתי כולל מרכז צעירים': 'מועדון קהילתי', 'בית כנסת': 'בתי כנסת', 'מועדון נוער': 'מועדון נוער', 'אולם מופעים, היכל תרבות': 'מוסדות תרבות', 'מועדון קשישים, מרכז לאזרחים ותיקים,מרכז יום לקשישים': 'מרכזי פעילות לקשישים', } def func(row): row['kind'] = translations[row['kind']] return func institutions_cards = DF.Flow( *[ DF.load(f) for f in glob.glob('institutions/*xlsx') ], DF.concatenate(dict( kind=['סוג המוסד'], title=['שם המוסד'], address=['כתובת'], X=[], Y=[] )), translate_kind(), proj(), DF.add_field('feature', 'object', lambda r: geojson.Feature( properties=dict(title=r['title'], address=r['address']), geometry=geojson.Point(coordinates=[float(r['lon']), float(r['lat'])]) )), DF.delete_fields(['title', 'lon', 'lat', 'address']), DF.join_with_self('concat', ['kind'], dict( title=dict(name='kind'), features=dict(name='feature', aggregate='array') )), DF.sort_rows('{title}', reverse=True), DF.add_field('pointGeometry', 'object', lambda r: geojson.FeatureCollection(features=r['features'])), DF.add_field('content', 'string', ' '), DF.delete_fields(['features']), # DF.printer(tablefmt='html') ).results()[0][0] _cache.set(key, institutions_cards) stack.update(dict( map=True, )) stack.setdefault('cards', []) current_cards = dict( (c['title'], c) for c in stack['cards'] ) for card in institutions_cards: current_card = current_cards.pop(card['title'], None) if current_card is not None: card['content'] = current_card['content'] else: print('SPURIOUS CARD for INSTITUTIONS', card['title']) stack['cards'] = [ c for c in stack['cards'] if c['title'] in current_cards ] + institutions_cards
def data_geojson(self): style = {} global_style = utils.get_feature_styles(self.context) style["fill"] = global_style["polygoncolor"] style["stroke"] = global_style["linecolor"] style["width"] = global_style["linewidth"] if global_style.get("marker_image", None): img = get_marker_image(self.context, global_style["marker_image"]) style["image"] = img else: style["image"] = None json_result = [] self.pc = api.portal.get_tool("portal_catalog") for contact in self.get_contacts(): brain = self.pc.unrestrictedSearchResults(UID=contact.UID())[0] if contact.use_parent_address: brain = self.pc.unrestrictedSearchResults(UID=contact.aq_parent.UID())[ 0 ] if brain.zgeo_geometry == Missing.Value: continue if brain.collective_geo_styles == Missing.Value: continue if brain.collective_geo_styles.get( "use_custom_styles", False ) and brain.collective_geo_styles.get("marker_image", None): img = get_marker_image( self.context, brain.collective_geo_styles["marker_image"] ) style["image"] = img geom = { "type": brain.zgeo_geometry["type"], "coordinates": brain.zgeo_geometry["coordinates"], } if geom["coordinates"]: if geom["type"]: classes = geom["type"].lower() + " " else: classes = "" address = get_address(contact) number = "" if address.get("number", None): number = ", {0}".format(address["number"]) formated_address = "{0}{1}<br />{2} {3}".format( safe_utf8(address.get("street") or ""), number, address.get("zip_code") or "", safe_utf8(address.get("city") or ""), ) img = "" if self.context.see_logo_in_popup: acc = getattr(contact, "logo", None) if acc and acc.filename: img = "{0}/@@images/logo/thumb".format(contact.absolute_url()) classes += brain.getPath().split("/")[-2].replace(".", "-") json_result.append( geojson.Feature( id=contact.id.replace(".", "-"), geometry=as_shape(geom), style=style, properties={ "title": brain.Title, "description": brain.Description, "style": style, "url": brain.getURL(), "classes": classes, "image": img, "address": formated_address, }, ) ) feature_collection = geojson.FeatureCollection(json_result) feature_collection.update({"title": self.context.title}) return geojson.dumps(feature_collection)
def section_to_geojson(section, tl): """ This is the trickiest part of the visualization. The section is basically a collection of points with a line through them. So the representation is a feature in which one feature which is the line, and one feature collection which is the set of point features. :param section: the section to be converted :return: a feature collection which is the geojson version of the section """ ts = esta.TimeSeries.get_time_series(section.user_id) entry_it = ts.find_entries(["analysis/recreated_location"], esda.get_time_query_for_trip_like( "analysis/cleaned_section", section.get_id())) # TODO: Decide whether we want to use Rewrite to use dataframes throughout instead of python arrays. # dataframes insert nans. We could use fillna to fill with default values, but if we are not actually # using dataframe features here, it is unclear how much that would help. feature_array = [] section_location_entries = [ecwe.Entry(entry) for entry in entry_it] if len(section_location_entries) != 0: logging.debug("first element in section_location_array = %s" % section_location_entries[0]) # Fudge the end point so that we don't have a gap because of the ts != write_ts mismatch # TODO: Fix this once we are able to query by the data timestamp instead of the metadata ts if section_location_entries[-1].data.loc != section.data.end_loc: logging.info("section_location_array[-1].data.loc %s != section.data.end_loc %s even after df.ts fix, filling gap" % \ (section_location_entries[-1].data.loc, section.data.end_loc)) last_loc_doc = ts.get_entry_at_ts("background/filtered_location", "data.ts", section.data.end_ts) if last_loc_doc is None: logging.warning("can't find entry to patch gap, leaving gap") else: last_loc_entry = ecwe.Entry(last_loc_doc) logging.debug( "Adding new entry %s to fill the end point gap between %s and %s" % (last_loc_entry.data.loc, section_location_entries[-1].data.loc, section.data.end_loc)) section_location_entries.append(last_loc_entry) points_line_feature = point_array_to_line(section_location_entries) # If this is the first section, we already start from the trip start. But we actually need to start from the # prior place. Fudge this too. Note also that we may want to figure out how to handle this properly in the model # without needing fudging. TODO: Unclear how exactly to do this if section.data.start_stop is None: # This is the first section. So we need to find the start place of the parent trip parent_trip = tl.get_object(section.data.trip_id) start_place_of_parent_trip = tl.get_object( parent_trip.data.start_place) points_line_feature.geometry.coordinates.insert( 0, start_place_of_parent_trip.data.location.coordinates) points_line_feature.id = str(section.get_id()) points_line_feature.properties = copy.copy(section.data) points_line_feature.properties["feature_type"] = "section" points_line_feature.properties["sensed_mode"] = str( points_line_feature.properties.sensed_mode) _del_non_derializable(points_line_feature.properties, ["start_loc", "end_loc"]) # feature_array.append(gj.FeatureCollection(points_feature_array)) feature_array.append(points_line_feature) return gj.FeatureCollection(feature_array)
def colexification(args): args.api._log = args.log threshold = args.threshold or 1 edgefilter = args.edgefilter words = {} def clean(word): return ''.join([w for w in word if w not in '/,;"']) varieties = args.api.db.varieties lgeo = geojson.FeatureCollection([v.as_geojson() for v in varieties]) args.api.json_dump(lgeo, 'app', 'source', 'langsGeo.json') app_source = args.api.existing_dir('app', 'source') for p in Path(__file__).parent.joinpath('app').iterdir(): target_dir = app_source.parent if p.suffix == '.html' else app_source shutil.copy(str(p), str(target_dir / p.name)) args.log.info('Adding nodes to the graph') G = nx.Graph() for concept in args.api.db.iter_concepts(): G.add_node(concept.id, **concept.as_node_attrs()) args.log.info('Adding edges to the graph') for v_, forms in tqdm(args.api.db.iter_wordlists(varieties), total=len(varieties), leave=False): cols = full_colexification(forms) for k, v in cols.items(): for formA, formB in combinations(v, r=2): # check for identical concept resulting from word-variants if formA.concepticon_id != formB.concepticon_id: words[formA.gid] = [formA.clics_form, formA.form] if not G[formA.concepticon_id].get(formB.concepticon_id, False): G.add_edge( formA.concepticon_id, formB.concepticon_id, words=set(), languages=set(), families=set(), wofam=[], ) G[formA.concepticon_id][formB.concepticon_id]['words'].add( (formA.gid, formB.gid)) G[formA.concepticon_id][ formB.concepticon_id]['languages'].add(v_.gid) G[formA.concepticon_id][ formB.concepticon_id]['families'].add(v_.family) G[formA.concepticon_id][ formB.concepticon_id]['wofam'].append('/'.join([ formA.gid, formB.gid, formA.clics_form, v_.gid, v_.family, clean(formA.form), clean(formB.form) ])) args.api.json_dump(words, 'app', 'source', 'words.json') edges = {} for edgeA, edgeB, data in G.edges(data=True): edges[edgeA, edgeB] = (len(data['families']), len(data['languages']), len(data['words'])) ignore_edges = [] for edgeA, edgeB, data in G.edges(data=True): data['WordWeight'] = len(data['words']) data['words'] = ';'.join( sorted(['{0}/{1}'.format(x, y) for x, y in data['words']])) data['FamilyWeight'] = len(data['families']) data['families'] = ';'.join(sorted(data['families'])) data['LanguageWeight'] = len(data['languages']) data['languages'] = ';'.join(data['languages']) data['wofam'] = ';'.join(data['wofam']) if edgefilter == 'families' and data['FamilyWeight'] < threshold: ignore_edges.append((edgeA, edgeB)) elif edgefilter == 'languages' and data['LanguageWeight'] < threshold: ignore_edges.append((edgeA, edgeB)) elif edgefilter == 'words' and data['WordWeight'] < threshold: ignore_edges.append((edgeA, edgeB)) G.remove_edges_from(ignore_edges) nodenames = { r[0]: r[1] for r in args.api.db.fetchall( "select distinct concepticon_id, concepticon_gloss from parametertable" ) } table = Table('ID A', 'Concept A', 'ID B', 'Concept B', 'Families', 'Languages', 'Words') count = 0 for (nodeA, nodeB), (fc, lc, wc) in sorted(edges.items(), key=lambda i: i[1], reverse=True): if (nodeA, nodeB) not in ignore_edges: table.append( [nodeA, nodenames[nodeA], nodeB, nodenames[nodeB], fc, lc, wc]) count += 1 if count >= 10: break print(table.render(tablefmt='simple')) args.api.save_graph(G, args.graphname or 'network', threshold, edgefilter)
if __name__ == '__main__': HERE = os.path.dirname(os.path.abspath(__file__)) NAMES = os.path.join(HERE, 'names.json') COORDINATES = os.path.join(HERE, 'coordinates.geojson') MERGED = os.path.join(HERE, 'streetnames.geojson') with codecs.open(COORDINATES, 'r', encoding='utf8') as f: coordinates = geojson.load(f) features = {} for feature in coordinates['features']: features[normalize_name(feature.id)] = feature with codecs.open(NAMES, 'r', encoding='utf8') as f: names = json.load(f) complete_features = [] for name, props in names.iteritems(): if not (props['year'] or props['previous'] or props['info']): print 'No information about "%s"' % name continue try: feature = features[normalize_name(name)] feature.properties = props except KeyError: print 'Could not find coordinates for "%s"' % name continue complete_features.append(feature) collection = geojson.FeatureCollection(complete_features) with codecs.open(MERGED, 'w', encoding='utf8') as f: geojson.dump(collection, f)
def load_stac_items(self, filename, recursive=False, max_connections=100, provider=None, productType=None, timeout=HTTP_REQ_TIMEOUT, **kwargs): """Loads STAC items from a geojson file / STAC catalog or collection, and convert to SearchResult. Features are parsed using eodag provider configuration, as if they were the response content to an API request. :param filename: A filename containing features encoded as a geojson :type filename: str :param recursive: Browse recursively in child nodes if True :type recursive: bool :param max_connections: Maximum number of connections for HTTP requests :type max_connections: int :param provider: Data provider :type provider: str :param productType: Data product type :type productType: str :param timeout: (optional) Timeout in seconds for each internal HTTP request :type timeout: float :param dict kwargs: Parameters that will be stored in the result as search criteria :returns: The search results encoded in `filename` :rtype: :class:`~eodag.api.search_result.SearchResult` """ features = fetch_stac_items( filename, recursive=recursive, max_connections=max_connections, timeout=timeout, ) nb_features = len(features) feature_collection = geojson.FeatureCollection(features) feature_collection["context"] = { "limit": nb_features, "matched": nb_features, "returned": nb_features, } plugin = next( self._plugins_manager.get_search_plugins(product_type=productType, provider=provider)) # save plugin._request and mock it to make return loaded static results plugin_request = plugin._request plugin._request = lambda url, info_message=None, exception_message=None: MockResponse( feature_collection, 200) # save preferred_provider and use provided one instead preferred_provider, _ = self.get_preferred_provider() self.set_preferred_provider(provider) products, _ = self.search(productType=productType, **kwargs) # restore preferred_provider self.set_preferred_provider(preferred_provider) # restore plugin._request plugin._request = plugin_request return products
# expected = int(float(line_str[7])) # if type == 'P': # p = geojson.Polygon(GEOM) # else: # p = geojson.MultiPolygon(GEOM) # f = geojson.Feature(geometry=p, properties={'county':county,'population':population,'illness':illness,'SMR':SMR,'mortality':mortality,'expected':expected}) # features_l.append(f) #geodata = geojson.dumps(geojson.FeatureCollection(features=features_l)) #print geodata for line in lines[1:]: line_str = line.strip().split() county = line_str[0] type = line_str[1] GEOM = json.loads(line_str[2]) age_Ad = float(line_str[3]) incidence = float(line_str[4]) if type == 'P': p = geojson.Polygon(GEOM) else: p = geojson.MultiPolygon(GEOM) f = geojson.Feature(geometry=p, properties={ 'county': county, 'age_Ad': age_Ad, 'incidence': incidence }) features_l.append(f) geodata = geojson.dumps(geojson.FeatureCollection(features=features_l)) print geodata
def save_json_features(self, fc, json_file): with open(json_file, mode='w+') as f: fs = geojson.FeatureCollection(fc) dump = geojson.dumps(fs, sort_keys=True) f.write(dump) f.close()
def garden_collection(gardens, user=None): """Get GeoJSON FeatureCollection for the given gardens""" return geojson.FeatureCollection( features=[garden_feature(g, user) for g in gardens])
def rank(): su.enable() index = rtree.index.Index() house_properties = [] with open("geo_value_clp.json", "r") as house_file: houses = geojson.loads(house_file.read()) for ix, feature in enumerate(houses['features']): point = sg.shape(feature['geometry']) index.insert(id=ix, coordinates=(point.bounds)) props = feature['properties'] props['geom'] = point props['trees'] = 0 house_properties.append(props) if ix % 10000 == 0: print ix del houses with fiona.collection("sfutc_m.shp", "r") as tree_file: tree_polys = [sg.shape(feature['geometry']) for feature in tree_file] for ix, poly in enumerate(tree_polys): if not poly.is_valid: # try to fix invalidities poly = poly.buffer(0) poly = so.unary_union(poly) if not poly.is_valid: continue # let canopy 'radiate' on surrounding houses size_factor = math.sqrt(poly.area) / 3.14 #print size_factor bpoly = poly.buffer(50 + math.sqrt(size_factor), resolution=8) # first get rough estimate of houses for hit in index.intersection(bpoly.bounds): point = house_properties[hit]['geom'] # chech more thoroughly if bpoly.contains(point): house_properties[hit]['trees'] += size_factor if ix % 500 == 0: print ix del index del tree_polys geo_features = [] print 'houses to convert: ' + str(len(house_properties)) for idx, house in enumerate(house_properties): if idx % 10000 == 0: print idx point = house['geom'] geometry = geojson.Point((point.x, point.y)) properties = { 'addr': house['addr'], 're': house['re'], 'rei': house['rei'], 'trs': house['trees'] } gj_house = geojson.Feature(geometry=geometry, properties=properties) geo_features.append(gj_house) print 'writing geojson' gj = geojson.FeatureCollection(geo_features) dump = geojson.dumps(gj) with open('geo_value_trees.json', 'w') as f: f.write(dump) print 'done' del gj del geo_features del dump return
def get_places(self, asGeoJsonString=True): from . import place places = {} def updatePlace(key, eventype, en_event, he_event): if not places.get(key): places[key] = {"type": eventype, "en_events": [en_event], "he_events": [he_event]} else: places[key]["en_events"] += [en_event] places[key]["he_events"] += [he_event] if places[key]["type"] != eventype: places[key]["type"] = "mixed" # get set of all places, birth, death, composition, publication if getattr(self, "birthPlace", None): updatePlace(self.birthPlace, "birth", "Born", u"נולד") for i in self.get_indexes(): # todo: Commentaries if getattr(i, "compPlace", None): tp = i.composition_time_period() if tp: en_string = "{} composed {}".format(i.get_title("en"), tp.period_string("en")) he_string = i.get_title("he") + u" נתחבר " + tp.period_string("he") else: en_string = "{} composed".format(i.get_title("en")) he_string = i.get_title("he") + u" " + u"נתחבר" updatePlace(i.compPlace, "composed", en_string, he_string) if getattr(i, "pubPlace", None): tp = i.publication_time_period() if tp: en_string = "{} first published {}".format(i.get_title("en"), tp.period_string("en")) he_string = i.get_title("he") + u" נדפס לראשונה " + tp.period_string("he") else: en_string = "{} first published".format(i.get_title("en")) he_string = i.get_title("he") + u" " + u"נדפס לראשונה" updatePlace(i.pubPlace, "published", en_string, he_string) if getattr(self, "deathPlace", None): updatePlace(self.deathPlace, "death", "Died", u"נפטר") if not places: return None for key, data in places.items(): p = place.Place().load({"key": key}) if not p: logger.warning(u"Found a bad {} place key '{}' for {}".format(data["type"], key, self.primary_name("en"))) del places[key] continue data["en_name"] = p.primary_name("en") data["he_name"] = p.primary_name("he") data["point"] = p.point_location() if not asGeoJsonString: return places features = [] for key, data in places.iteritems(): if data.get("point"): loc = data.pop("point") features.append(geojson.Feature(geometry=loc, id=key, properties=data)) return geojson.dumps(geojson.FeatureCollection(features))
def get_sympo_zone2(self): """ Retourne les zones du departement concernes """ # read the zone sympos file for the corresponding department fname_mask = '../GeoData/zones_sympo_multiples/' + self.dept + '_mask_zones_sympos.nc' da_mask = xr.open_dataarray(fname_mask) fileresult = '../zonage_homogeneous_criterion/' + self.dept + '_' + self.date.strftime( "%Y%m%d%H%M") + '_' + self.distance_choice + '.csv' print(fileresult) try: f = open(fileresult) dfres2 = pd.read_csv( fileresult, sep=',', index_col=0, na_filter=True, dtype={'zone_winning_comp': str} ) # for some reason, zones from this columns are considered float otherwise (41_202001260000_compas.csv) dfres2 = dfres2.fillna(9999) # ajout fonction self.dfres2 = dfres2 self.zones_homogenes() #print(self.zs_l) #print(self.temps_l) # fin ajout except IOError: print("File not accessible2") self.zs_l = ["departement"] if hasattr(self, "dfres2"): del ( self.dfres2 ) # we suppress the attribute in order to ignore the condition in update_chor_html # when the combined zone is not part of the "zones_sympo_combined_dpt.json" we need to create it with open("../GeoData/ZonesSympo/zones_sympo_4326.json", "r") as fp: poly_geo = json.load(fp) # list of the zones sympo id in the json file (zones_sympo_4326.json) feature = [] zs_json = [ poly_geo["features"][i]["properties"]["id"] for i in range(len(poly_geo["features"])) ] for ival, val in enumerate(self.zs_l): if not (val in list(da_mask.id.values)): #print('qd est ce qu on est dans ce cas ci?',val,self.step) val_l = val.split('+') for j, zs in enumerate(val_l): if j == 0: # init shape id_json = zs_json.index(zs) shape = sh.asShape( poly_geo['features'][id_json]['geometry']) else: id_json = zs_json.index(zs) shape = shape.union( sh.asShape( poly_geo['features'][id_json]['geometry'])) feature.append( geojson.Feature(geometry=shape, properties={'id': val})) # for the sake of not bugging the function ipyl.Choropleth self.toto = dict( zip(self.zs_l, list(np.arange(len(self.zs_l)).astype(float)))) data = geojson.FeatureCollection(feature) zsympo = "../GeoData/ZonesSympo/zones_sympo_combined_" + self.dept + ".json" with open(zsympo) as geojson1: poly_geojson = json.load(geojson1) if not len(feature) == 0: feature_extend = poly_geojson["features"] + data["features"] else: feature_extend = poly_geojson["features"] new_poly = {} new_poly["type"] = poly_geojson["type"] #new_poly["crs"] = poly_geojson["crs"] new_poly["features"] = [] for x in feature_extend: if x["properties"]["id"] in self.zs_l: x['id'] = x["properties"]["id"] new_poly["features"].append(x) self.region_geo2 = new_poly if hasattr(self, "da"): #print('passe dans da') self.aggregate()
def save(self, out): collection = geojson.FeatureCollection(self.features) with open(out, 'w') as fp: geojson.dump(collection, fp)
if 'type2_fast' in chargepoint['availability']: type2_count += chargepoint['availability']['type2_fast'] if 'type1_standard' in chargepoint['availability']: type1_count += chargepoint['availability']['type1_standard'] if charge_kind == '': charge_kind = chargepoint['kind'] elif charge_kind != chargepoint['kind']: print(str(node['id']) + " is a complex location") continue point = geojson.Point((node['lng'],node['lat'])) parking_spaces = len(node['charge_availability']) feature_properties = { 'access':'customers', 'amenity':'charging_station', 'capacity': parking_spaces, 'name':node['public_name'], 'opening_hours': '24/7', 'operator': 'Source London', } if type2_count > 0: feature_properties['socket:type2'] = type2_count if type1_count > 0: feature_properties['socket:type1'] = type1_count feature = geojson.Feature(geometry=point,properties=feature_properties) feature_list.append(feature) coll = geojson.FeatureCollection(feature_list) f = open('sl_osm.geojson','w') f.write(geojson.dumps(coll))
def geojson_deployments(dir, outfile='cproof-deployments.geojson'): props = ['deployment_start', 'deployment_end', 'platform_type', 'glider_model', 'glider_name', 'glider_serial', 'deployment_name', 'project', 'institution', 'comment'] subdirs = glob.glob(dir + '/*') features = [] kml = simplekml.Kml() np.random.seed(20190101) print('subdirs', subdirs) colornum = 0; for d in subdirs: _log.info(d) if os.path.isdir(d): subdirs2 = glob.glob(d + '/*') for d2 in subdirs2: _log.info(d2) if os.path.isdir(d2): try: nc = glob.glob(d2+'/L2-gridfiles/*.nc') with xr.open_dataset(nc[0]) as ds: _log.info(f'opened {nc[0]}') att = ds.attrs good = (ds.longitude < -125) line = np.vstack((ds.longitude[good], ds.latitude[good])).T ls = geojson.LineString(line.tolist()) feat = geojson.Feature(geometry=ls) for prop in props: if prop in ds.attrs.keys(): feat.properties[prop] = ds.attrs[prop] else: feat.properties[prop] = '' # get URL.... feat.properties['url'] = ('' + 'http://cproof.uvic.ca/gliderdata/deployments/' + d2[2:]) # get color: cols = np.random.randint(0, 200, 3) # cols = pygu.get_html_non_blue(colornum) colornum += 1 feat.properties['color'] = '#%02X%02X%02X' % (cols[0], cols[1], cols[2]) if ds['time'][-1] > np.datetime64(datetime.datetime.now()) - np.timedelta64(2, 'D'): feat.properties['active'] = True else: feat.properties['active'] = False features += [feat] # make the kml: pnt = kml.newpoint(coords=[line[-1]]) pnt.style.iconstyle.icon.href = 'http://cproof.uvic.ca/deployments/assets/images/slocum_glider.png' coords = [] for thelon, thelat in zip(ds.longitude.values, ds.latitude.values): coords += [(thelon, thelat)] pnt.timestamp.when = f'{ds.time.values[-1]}'[:-3] ls = kml.newlinestring(coords=coords, name=att['deployment_name'], ) ls.timespan.begin = f'{ds.time.values[0]}'[:-3] ls.timespan.end = f'{ds.time.values[-1]}'[:-3] ls.style.linestyle.color = 'ee' + '%02X%02X%02X' % (cols[2], cols[1], cols[0]) ls.style.linestyle.width = 3; kml.save(d2[2:]+'/'+att['deployment_name']+'.kml') except: _log.info(f'Could not find grid file {d2}') feature_collection = geojson.FeatureCollection(features) with open(outfile, 'w') as fout: s = geojson.dumps(feature_collection) fout.write(s)
features.append( geojson.Feature(geometry=feature.geometry, id=id_counter, properties={'id': id_counter})) last_speed = -1 time_samples = len(time_indices) for time_index in range(time_samples): x = x_offset * time_samples + time_index time_id = str(time_indices[time_index]) observations = 0 deviation = 0 if time_id in samples[geom_id]: last_speed = samples[geom_id][time_id][0] observations = samples[geom_id][time_id][1] deviation = samples[geom_id][time_id][2] if last_speed == -1: last_speed = find_average(samples[geom_id]) pixels[x, y] = (int(last_speed * 1.5), observations, 0, 255) id_counter += 1 out_image.save(OUT_IMAGE) print 'A .json file containing the geometry for the samples data will be generated at', OUT_GEOJSON out_geojson = open(OUT_GEOJSON, 'w') out_geojson.write( geojson.dumps(geojson.FeatureCollection(features), sort_keys=True)) out_geojson.close()
def database(): lat = flask.request.args.get('lat', default=None) long = flask.request.args.get('long', default=None) # radius = flask.request.args.get('radius', default=None) if lat is None or long is None: return flask.render_template( "error.html", error_title="No Lat or long", error="Bitte gib die Parameter ?lat und &long an") else: # print("Test") try: config = configparser.ConfigParser() config.read("config.ini") host = config.get("postgres", "host") db = config.get("postgres", "db") user = config.get("postgres", "user") password = config.get("postgres", "password") key = config.get("graphhopper", "key") connect_str = "dbname=" + db + " user="******" host=" + host + " " + \ "password="******"" # use our connection values to establish a connection conn = psycopg2.connect(connect_str) # create a psycopg2 cursor that can execute queries cursor = conn.cursor() # ST_Distance(haltestellen.geom, ST_MakePoint(""" + lat + """, """ + long + """)::geography) as distance cursor.execute(( """SELECT *, ST_Distance(haltestellen.geom, ST_MakePoint(<long>, <lat>)::geography) as distance, st_asgeojson(geom) FROM public.haltestellen ORDER BY distance LIMIT 5; """ ).replace("<lat>", lat).replace("<long>", long)) # cursor.execute("""SELECT * FROM public.haltestellen WHERE ST_DWithin(haltestellen.geom, ST_MakePoint(6.9881160312996728, 50.96659064159747)::geography, 10000);""") # create a new table with a single column called "name" # cursor.execute("""CREATE TABLE tutorials (name char(40));""") # run a SELECT statement - no data in there, but we can try it # cursor.execute("""SELECT * from tutorials""") rows = cursor.fetchall() # print(rows) # print(sum(rows, ())) features = [] start_geo = lat + "%2C" + long for col in rows: finish_geo = col[12][0] + "%2C" + col[12][1] url = "https://graphhopper.com/api/1/route?point=" + start_geo + "&point=" + finish_geo + "&vehicle=foot&instructions=false" + "&points_encoded=false&calc_points=false&locale=de&key=" + key req = urllib.request.Request(url) page = urllib.request.urlopen(req) routing_json = json.load(page) rows.append(routing_json["paths"][0]["distance"]) rows.append(routing_json["paths"][0]["time"] / 1000 / 60) # print(rows) # print(rows[0][0]) for col in rows: # print(col) properties = {} # print(col) # print("Sum " + sum(the_tuple, ())) # print(the_tuple.split(",")[1:len(the_tuple)-1]) """ print(type(the_tuple)) print(the_tuple) print(tuple(the_tuple)) """ distance = col[1] properties['gid'] = col[0] properties['name'] = col[1] properties['knotennummer'] = str(col[2]) properties['typ'] = col[3] properties['nr_stadtteil'] = str(col[4]) properties['stadtteil'] = col[5] properties['nr_stadtbez'] = col[6] properties['stadtbez'] = col[7] properties['hyperlink'] = col[8] properties['distance'] = col[11] properties['foot_distance'] = col[13] properties['foot_time'] = col[14] geo = col[12] # properties['geo'] = the_tuple[8] feature = geojson.Feature(geometry=geojson.loads(geo), properties=properties) # print(properties) features.append(feature) collection = geojson.FeatureCollection(features) # return flask.jsonify(data) resp = flask.Response(geojson.dumps(collection), status=200, mimetype='application/json') return resp # return flask.jsonify(data) except psycopg2.DatabaseError as e: print("Uh oh, can't connect. Invalid dbname, user or password?") print(e) return ""
def create_features(height, width): """ Function that calculates the midpoint coordinates of each hexagon in the transformed picture. """ # determine size of grid circles from image and step size in x direction radius = (height / 10) x_step = np.cos(np.deg2rad(30)) * radius origins = [] column = [] # determine x and y coordinates of gridcells midpoints for a in range(1, 16): # range reflects gridsize in x direction x = (x_step * a) for b in range(1, 11): # range reflects gridsize in y direction if a % 2 == 0: if b == 10: continue y = (radius * b) else: y = (radius * (b - 0.5)) origins.append([x, y]) column.append(a) origins = np.array(origins) board_cells = len(origins) """ code to add ghost cells. """ y_jump = radius / 2 dist = y_jump / np.cos(np.deg2rad(30)) x_jump = dist / 2 features = [] for i, (x, y) in enumerate(origins): # determine all the corner points of the hexagon point1 = [x + dist, y] point2 = [x + x_jump, y + y_jump] point3 = [x - x_jump, y + y_jump] point4 = [x - dist, y] point5 = [x - x_jump, y - y_jump] point6 = [x + x_jump, y - y_jump] # create a geojson polygon for the hexagon polygon = geojson.Polygon( [[point1, point2, point3, point4, point5, point6, point1]]) feature = geojson.Feature(id=i, geometry=polygon) feature.properties["z_changed"] = True feature.properties["landuse_changed"] = True feature.properties["column"] = column[i] feature.properties["tygron_id"] = i # these x and y centers are not actually relevant --> features are # transformed to other coordinates. feature.properties["x_center"] = int(round(x)) feature.properties["y_center"] = int(round(y)) feature.properties["ghost_hexagon"] = False features.append(feature) x_left = origins[0][0] - (x_step * 5) ghost_origins = [] ghost_columns = [] next_column = max(column) for a in range(1, 5): # range reflects gridsize in x direction x = x_left + (x_step * a) next_column += 1 for b in range(1, 11): # range reflects gridsize in y direction if a % 2 == 0: if b == 10: continue y = (radius * b) else: y = (radius * (b - 0.5)) ghost_origins.append([x, y]) ghost_columns.append(next_column) x_right = origins[-1][0] + x_step for a in range(0, 4): # range reflects gridsize in x direction x = x_right + (x_step * a) next_column += 1 for b in range(1, 11): # range reflects gridsize in y direction if a % 2 == 0: if b == 10: continue y = (radius * b) else: y = (radius * (b - 0.5)) ghost_origins.append([x, y]) ghost_columns.append(next_column) """ for b in range(1,11): x = x_left y = (radius * (b - 0.5)) ghost_origins.append([x, y]) ghost_columns.append(next_column) x_left += x_step next_column += 1 for b in range(1,10): x = x_left y = radius * b ghost_origins.append([x, y]) ghost_columns.append(next_column) next_column += 1 for b in range(1,10): x = x_right y = radius * b ghost_origins.append([x, y]) ghost_columns.append(next_column) x_right += x_step next_column += 1 for b in range(1,11): x = x_right y = (radius * (b - 0.5)) ghost_origins.append([x, y]) ghost_columns.append(next_column) #vert_middle = width / 2 #min_column = min(column) """ for i, (x, y) in enumerate(ghost_origins): # determine all the corner points of the hexagon point1 = [x + dist, y] point2 = [x + x_jump, y + y_jump] point3 = [x - x_jump, y + y_jump] point4 = [x - dist, y] point5 = [x - x_jump, y - y_jump] point6 = [x + x_jump, y - y_jump] # create a geojson polygon for the hexagon polygon = geojson.Polygon( [[point1, point2, point3, point4, point5, point6, point1]]) ghost_id = i + board_cells feature = geojson.Feature(id=ghost_id, geometry=polygon) feature.properties["z_changed"] = True feature.properties["landuse_changed"] = True feature.properties["column"] = ghost_columns[i] feature.properties["tygron_id"] = None # these x and y centers are not actually relevant --> features are # transformed to other coordinates. feature.properties["x_center"] = int(round(x)) feature.properties["y_center"] = int(round(y)) feature.properties["ghost_hexagon"] = True features.append(feature) # create geojson featurecollection with all hexagons. features = geojson.FeatureCollection(features) with open('ghost_cells_test.geojson', 'w') as f: geojson.dump(features, f, sort_keys=True, indent=2) return features, origins, radius
Icon = "star" Color = "#1dcc70" # green else: Units = result[2] Icon = Units Color = "#754aed" # purple newFeature = geojson.Feature(geometry=Geometry, properties={ "date": result[0], "units": Units, "address": result[3], "zip": result[1], "marker-color": Color, "marker-symbol": Icon, "marker-size": "small" }) allZIPPoints.append(newFeature) here = os.path.dirname(os.path.realpath(__file__)) filename = "%s.geojson" % ZIP.rstrip() filePath = os.path.join(here, "../geojson", filename) newGeoJSONFile = open(filePath, "w") ZIPcollection = geojson.FeatureCollection(allZIPPoints) geojson.dump(ZIPcollection, newGeoJSONFile) newGeoJSONFile.close() database.commit() database.close() ZIPfile.close()
def get_geojson_dump(features): # build a feature collection feature_collection = geojson.FeatureCollection(features) return geojson.dumps(feature_collection)
bus_table = {} gen_count = {} for x in buses: bus_table[x['Bus ID']] = x gen_count['Bus ID'] = 0 xy = x['lng'], x['lat'] props = deepcopy(x) props.pop('lng') props.pop('lat') geom = geojson.Point(xy) f = geojson.Feature(geometry=geom, properties=props) bus_features.append(f) bus_collect = geojson.FeatureCollection(features=bus_features) with open('../../FormattedData/GIS/bus.geojson', 'w') as io: json.dump(bus_collect, io, indent=4) ##### Process branches ##### branch_df = pd.read_csv('../../SourceData/branch.csv') branches = list(branch_df.T.to_dict().values()) branch_features = [] for x in branches: bf = bus_table[x['From Bus']] bt = bus_table[x['To Bus']] pf = bf['lng'], bf['lat']
def show(data_objects, **options): """Returns geojs scene for JupyterLab display :param data_objects: list of GeoData objects to display, in front-to-back rendering order. :param options: options passed to jupyterlab_geojs.Scene instance. :return: jupyterlab_geojs.Scene instance if running Jupyter; otherwise returns data_objects for default display """ if not data_objects: return None if not is_loaded(): return data_objects # (else) if not hasattr(data_objects, '__iter__'): data_objects = [data_objects] #print(data_objects) scene = jupyterlab_geojs.Scene(**options) scene.create_layer('osm') # feature_layer = scene.create_layer('feature') feature_layer = None combined_bounds = None # Reverse order so that first item ends on top for data_object in reversed(data_objects): # Create map feature #print(data_object._getdatatype(), data_object._getdataformat()) # type is vector, format is [.json, .geojson, .shp, pandas] """ data = data_object.get_data() # Can only seem to get json *string*; so parse into json *object* json_string = data.to_json() json_object = json.loads(json_string) feature = feature_layer.create_feature('geojson', json_object) #print(json_object) feature.enableToolTip = True # dont work geometry = data['geometry'] bounds = geometry.total_bounds """ meta = data_object.get_metadata() #print(meta) meta_bounds = meta.get('bounds').get('coordinates')[0] #print(meta_bounds) assert meta_bounds, 'data_object missing bounds' # Bounds format is [xmin, ymin, xmax, ymax] bounds = [ meta_bounds[0][0], meta_bounds[0][1], meta_bounds[2][0], meta_bounds[2][1] ] #print(bounds) if combined_bounds is None: combined_bounds = bounds else: combined_bounds[0] = min(combined_bounds[0], bounds[0]) combined_bounds[1] = min(combined_bounds[1], bounds[1]) combined_bounds[2] = max(combined_bounds[2], bounds[2]) combined_bounds[3] = max(combined_bounds[3], bounds[3]) # print('options:', options) rep = options.get('representation') if rep == 'outline': # Create polygon object rect = [ [bounds[0], bounds[1]], [bounds[2], bounds[1]], [bounds[2], bounds[3]], [bounds[0], bounds[3]], [bounds[0], bounds[1]], ] geojs_polygon = geojson.Polygon([rect]) properties = { 'fillColor': '#fff', 'fillOpacity': 0.1, 'stroke': True, 'strokeColor': '#333', 'strokeWidth': 2 } geojson_feature = geojson.Feature(geometry=geojs_polygon, properties=properties) geojson_collection = geojson.FeatureCollection([geojson_feature]) # print(geojson_collection) if feature_layer is None: feature_layer = scene.create_layer('feature') feature = feature_layer.create_feature('geojson', geojson_collection, **options) #elif isinstance(data_object, GirderDataObject) and \ elif data_object.__class__.__name__ == 'GirderDataObject' and \ data_object._getdatatype() == 'raster': # Use large-image display - only admin can tell if it is installed #print(data_object._getdatatype(), data_object._getdataformat()) tile_url = data_object._get_tile_url() print('tile_url', tile_url) tile_layer = scene.create_layer('tile', url=tile_url) #print(combined_bounds) corners = [[combined_bounds[0], combined_bounds[1]], [combined_bounds[2], combined_bounds[1]], [combined_bounds[2], combined_bounds[3]], [combined_bounds[0], combined_bounds[3]]] scene.set_zoom_and_center(corners=corners) #display(scene) return scene