def site_geojson_generator(params_list): """ Generator which yields a string that when concantenated will be a valid geojson FeatureCollection. The initial line will be the start of of geojson object. Subsequent lines will be a geojson Feature with a comma at the end of the line. The last line omitted will be the last feature (if any) and the closing brace and bracket for the geojson object. This is a little tricky, as we need to omit the last comma to make valid JSON, thus we use a lagging generator, similar to http://stackoverflow.com/questions/1630320/ Based on https://blog.al4.co.nz/2016/01/streaming-json-with-flask/ :param params_list: :yield String: """ yield '{"crs":{"type": "name","properties": {"name": "urn:ogc:def:crs:EPSG::4326"}},' \ '"type": "FeatureCollection","features": [\n' prev_feature = None with Session() as session: for params in params_list: site_resp = session.get(NWIS_SITES_SERVICE_ENDPOINT, params=params, headers={'Accept-Encoding': 'gzip,default'}, stream=True) if site_resp.status_code == 200: for site_feature in site_feature_generator(site_resp.iter_lines(decode_unicode=True)): if prev_feature: yield geojson_dumps(prev_feature) + ', \n' prev_feature = site_feature else: msg = create_request_resp_log_msg(site_resp) app.logger.warning(msg) # Got all of the features so yield the last one closing the geojson object if prev_feature: yield geojson_dumps(prev_feature) + ']}' else: yield ']}'
def test_valid_data(self): station = { 'dec_lat_va': '45.0', 'dec_long_va': '-100.0', 'dec_coord_datum_cd': 'NAD83', 'station_nm': 'Black River', 'agency_cd': 'USGS', 'site_no': '12345', 'huc_cd': '03120312', 'site_tp_cd': 'ST', } expected_properties = { 'stationName': 'Black River', 'agencyCode': 'USGS', 'siteNumber': '12345', 'hucCode': '03120312', 'SiteTypeCode': 'ST', 'SiteType': 'Stream', 'siteId': 'USGS-12345', 'url': 'https://waterdata.usgs.gov/nwis/inventory?agency_code=USGS&site_no=12345' } result = geojson_loads(geojson_dumps(get_site_feature(station))) self.assertEqual(result['properties'], expected_properties) self.assertEqual(result['geometry']['type'], 'Point')
def test_valid_data(self): station = { 'dec_lat_va': '45.0', 'dec_long_va': '-100.0', 'dec_coord_datum_cd': 'NAD83', 'station_nm': 'Black River', 'agency_cd': 'USGS', 'site_no': '12345', 'huc_cd': '03120312', 'site_tp_cd': 'ST', } expected_properties = { 'stationName': 'Black River', 'agencyCode': 'USGS', 'siteNumber': '12345', 'hucCode': '03120312', 'SiteTypeCode': 'ST', 'SiteType': 'Stream', 'siteId' : 'USGS-12345', 'url': 'https://waterdata.usgs.gov/nwis/inventory?agency_code=USGS&site_no=12345' } result = geojson_loads(geojson_dumps(get_site_feature(station))) self.assertEqual(result['properties'], expected_properties) self.assertEqual(result['geometry']['type'], 'Point')
def main(): import argparse parser = argparse.ArgumentParser(description='Produce GeoJSON feeds to be used by TileMill') parser.add_argument('oxpoints_file', type=argparse.FileType('r'), help='Main RDF dump expected in {default_serialization} format'.format(default_serialization=DEFAULT_SERIALIZATION)) parser.add_argument('oxpoints_shape', type=argparse.FileType('r'), help='Shapes RDF dump expected in text/turtle format') parser.add_argument('function', help="Function (should be one of '{available_functions}')".format(available_functions=', '.join(FUNCTIONS.iterkeys()))) parser.add_argument('--rdf-serialization', dest='rdf_serialization', action='store_true', default=DEFAULT_SERIALIZATION, help='RDF serialization, defaults to text/turtle') ns = parser.parse_args() sys.stderr.write("==> Loading data into the graph\n") graph = rdflib.Graph() graph.parse(ns.oxpoints_file, format=ns.rdf_serialization) graph.parse(ns.oxpoints_shape, format=ns.rdf_serialization) sys.stderr.write("==> Data loaded\n") if ns.function not in FUNCTIONS: sys.exit("Incorrect function (should be one of '{available_functions}')".format(available_functions=', '.join(FUNCTIONS.iterkeys()))) else: collection = FUNCTIONS[ns.function](graph) sys.stdout.write(geojson_dumps(collection))
poly_coords.append({ 'x': cr['x'], 'y': cr['y'], 'latitude': cr['lat'], 'longitude': cr['long'] }) poly_geo_coords.append((cr['long'], cr['lat'])) # add final closing point poly_geo_coords.append((poly[0]['long'], poly[0]['lat'])) final_coords.append(poly_coords) geo_feature = Feature(geometry=Polygon([poly_geo_coords], precision=15)) geo_features.append(geo_feature) geo_feature_collection = FeatureCollection(geo_features) geo_feature_collection_dump = geojson_dumps(geo_feature_collection, sort_keys=True) # new_ctrs=[] # new_ctrs_debug=[] # for cidx in range(len(contours)): # contour= contours[cidx] # peri = cv2.arcLength(contour, True) # approx = cv2.approxPolyDP(contour, 0.04 * peri, True) # new_ctrs_debug.append({ # 'peri': peri, # 'approx': approx, # 'coords': json.dumps(list(map(lambda x: x[0], ctr_json['contours'][cidx]))) # }) # new_ctrs.append(approx) with open(json_contour_filepath, 'w') as outfile:
def run(): try: reset_data_directory() title2coords = load_coordinates_dictionary() page_count = 0 for page in get_english_wikipedia_pages(): page_count += 1 if page_count % 100000 == 0: print("page_count:", page_count) #print("page:", page) page_id = page.find("id").text #print("page_id:", page_id) page_title = page.find("title").text #print("page_title:", page_title) page_text = page.find("revision/text").text #print("page_text:", page_text) if page_id and page_title and page_text: if page_title not in blacklist and not page_text.startswith( "#REDIRECT"): #print("page_id:", page_title) features = [] # this accidentally picks up wikilinks inside of tags for link in findall("(?<={{)[^|}]+", page_text) + findall( "(?<=\[\[)[^|}\]]+", page_text): cleaned_title = clean_title(link) if cleaned_title in title2coords: place = title2coords[cleaned_title] geometry = Point((float(place['longitude']), float(place['latitude']))) properties = {} for propname in [ "enwiki_title", "wikidata_id", "geonames_id", "osm_id" ]: value = place[propname] if value: properties[propname] = value features.append( Feature(geometry=geometry, properties=properties)) if len(features) > 1: feature_collection = FeatureCollection(features) map_as_string = geojson_dumps(feature_collection, sort_keys=True) path_to_folder = join(path_to_data, page_id) mkdir(path_to_folder) if create_geojson: path_to_map = join(path_to_folder, page_id + ".geojson") with open(path_to_map, "wb") as f: f.write(map_as_string.encode("utf-8")) path_to_text = join(path_to_folder, page_id + ".txt") with open(path_to_text, "wb") as f: f.write(page_text.encode("utf-8")) except Exception as e: print(e)
def index(self): if 'query' in request.params: query = request.params['query'] else: return 'ERROR: Use a query parameter' #log.warning(type(query)) searchList = [] threadGeonames = searchThread('geonames',query) searchList.append(threadGeonames) threadGeonames.start() threadOpenAddresses = searchThread('openaddresses',query) searchList.append(threadOpenAddresses) threadOpenAddresses.start() for search in searchList: search.join(3) rowsDict = {} featuresArray = [] # Manage GeoNames search result if threadGeonames.json != 'ko': geonamesJson = json_loads(threadGeonames.json) # Create a GeoJSON response # Iterate over json result for geoname in geonamesJson['geonames']: featureDict = {} featureDict.update(type='Feature') featurePropertiesDict = {} # Create three attributes display / origin / description displayText = geoname['name'] + ' (' + geoname['countryName'] + ',' + geoname['fcodeName'] + ')' featurePropertiesDict.update({'display': displayText}) featurePropertiesDict.update({'origin': 'geonames'}) featureDict.update(properties=featurePropertiesDict) # Create geometry pointCoordinate = geojson.Point([geoname['lng'], geoname['lat']]) geojsonCoordinate = geojson_dumps(pointCoordinate) featureDict.update(geometry=eval(geojsonCoordinate)) featuresArray.append(featureDict) # Manage OpenAddresses search result if threadOpenAddresses.json != 'ko': openaddressesJson = json_loads(threadOpenAddresses.json) # Create a GeoJSON response # Iterate over json result for openaddress in openaddressesJson['features']: featureDict = {} featureDict.update(type='Feature') featurePropertiesDict = {} # Create three attributes display / origin / description display = [openaddress['properties']['street'],openaddress['properties']['housenumber'],openaddress['properties']['city']] featurePropertiesDict.update({'display': " ".join([n for n in display if n is not None])}) featurePropertiesDict.update({'origin': 'openaddresses'}) featureDict.update(properties=featurePropertiesDict) # Create geometry pointCoordinate = geojson.Point([openaddress['geometry']['coordinates'][0], openaddress['geometry']['coordinates'][1]]) geojsonCoordinate = geojson_dumps(pointCoordinate) featureDict.update(geometry=eval(geojsonCoordinate)) featuresArray.append(featureDict) rowsDict.update(type='FeatureCollection') rowsDict.update(features=featuresArray) if 'callback' in request.params: response.headers['Content-Type'] = 'text/javascript; charset=utf-8' return request.params['callback'] + '(' + json_dumps(rowsDict) + ');' else: response.headers['Content-Type'] = 'application/json' return json_dumps(rowsDict)
def get(self, *args, **kwargs): school_api = olap_entities.School() params = self.request.GET results = [] json_results = '' query = params.get('q') session = params.get('filters[academic_year]') if session not in settings.VALID_SESSIONS: raise ValueError('Session not valid') SchoolModel, ClusterModel, BlockModel, DistrictModel, PincodeModel, AssemblyModel, ParliamentModel = get_models(session, "all") schools = SchoolModel.objects.filter( Q(school_name__icontains=query) | Q(school_code__icontains=query) ).order_by('school_name')[:3] if schools.count() > 0: temp_d = { 'text': 'Schools', 'children': [] } for school in schools: feature = {} if school.centroid is not None: feature = school_api._get_geojson(school) temp_d['children'].append({ 'type': 'school', 'id': school.school_code, 'text': school.school_name, # 'centroid': [school.centroid.y, school.centroid.x] if school.centroid is not None else [] 'feature': geojson_dumps(feature) }) results.append(temp_d) clusters = ClusterModel.objects.filter(cluster_name__icontains=query).order_by('cluster_name')[:3] if clusters.count() > 0: temp_d = { 'text': 'Clusters', 'children': [] } for cluster in clusters: temp_d['children'].append({ 'type': 'cluster', 'id': cluster.cluster_name, 'text': cluster.cluster_name, }) results.append(temp_d) blocks = BlockModel.objects.filter(block_name__icontains=query).order_by('block_name')[:3] if blocks.count() > 0: temp_d = { 'text': 'Blocks', 'children': [] } for block in blocks: temp_d['children'].append({ 'type': 'block', 'id': block.block_name, 'text': block.block_name, }) results.append(temp_d) districts = DistrictModel.objects.filter(district__icontains=query).order_by('district')[:3] if districts.count() > 0: temp_d = { 'text': 'Ed. Dept. Districts', 'children': [] } for district in districts: temp_d['children'].append({ 'type': 'district', 'id': district.district, 'text': district.district, }) results.append(temp_d) pincodes = PincodeModel.objects.filter(pincode__icontains=query).order_by('pincode')[:3] if pincodes.count() > 0: temp_d = { 'text': 'Pincodes', 'children': [] } for pincode in pincodes: temp_d['children'].append({ 'type': 'pincode', 'id': pincode.pincode, 'text': str(pincode.pincode), }) results.append(temp_d) assemblies = AssemblyModel.objects.filter(assembly_name__icontains=query).order_by('assembly_name')[:3] if assemblies.count() > 0: temp_d = { 'text': 'Assembly Constituencies', 'children': [] } for assembly in assemblies: temp_d['children'].append({ 'type': 'assembly', 'id': assembly.assembly_name, 'text': str(assembly.assembly_name), }) results.append(temp_d) parliaments = ParliamentModel.objects.filter(parliament_name__icontains=query).order_by('parliament_name')[:3] if parliaments.count() > 0: temp_d = { 'text': 'Parliamentary Constituencies', 'children': [] } for parliament in parliaments: temp_d['children'].append({ 'type': 'parliament', 'id': parliament.parliament_name, 'text': str(parliament.parliament_name), }) results.append(temp_d) json_results = json.dumps(results) return self.get_json_response(json_results)
def to_geojson_str(cls, d=dict()): return geojson_dumps(d)
def get_geojson_carto_gs(self, filepath: tuple = (), opts: dict = {}) -> dict: # [Step - 1] Get image + check color sampling img_path = os.path.join(BASE_DIR, filepath[0]) img_tiff_path = os.path.join(BASE_DIR, filepath[1]) img_extension = os.path.splitext(img_path)[1] img_name = ntpath.basename(img_path).replace(img_extension, '') img_base_path = img_path.replace(ntpath.basename(img_path), '') color_preset = self.data['color_presets'][self.options['color_preset']] logger.info('Color Preset (Carto Grayscale): ', {'color_preset': color_preset}) do_contour_normalization = bool( color_preset['building']['normalize_contours'] ) if 'normalize_contours' in color_preset['building'] else False image = cv2.imread(img_path, 1) fc_bgr_building_gray = color_preset['building']['fill']['gray'] fc_hsv_building_gray = bgr_color_to_hsv(fc_bgr_building_gray) if color_preset['building']['border']['type'] == 'relative': fc_hsv_building_gray_darker = self.transform_relative_color( fc_hsv_building_gray, color_preset['building']['border']['value']['gray']) else: fc_hsv_building_gray_darker = self.transform_color_string_to_float( color_preset['building']['border']['value']['gray']) logger.debug( self.logger_base_text + 'Color Info', { 'fill_color_bgr': { 'gray': fc_bgr_building_gray }, 'fill_color_hsv': { 'gray': fc_hsv_building_gray }, 'border_color_hsv': { 'gray': fc_hsv_building_gray_darker } }) # [Step-2] Do masking on HSV Image img_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) hsv = cv2.cvtColor(img_rgb, cv2.COLOR_RGB2HSV) mask_gray = cv2.inRange(hsv, fc_hsv_building_gray, fc_hsv_building_gray_darker) final = cv2.bitwise_or(image, image, mask=mask_gray) # [Step-3] Find Contours json_contour_filepath = self.data['file']['json_contour'].replace( '<result_path>', self.data['path']['result']).replace( '<img_name>', img_name).replace('<preset>', 'carto-gs') json_contour_debug_filepath = self.data['file'][ 'json_contour_debug'].replace('<result_path>', self.data['path']['result']).replace( '<img_name>', img_name).replace( '<preset>', 'carto-gs') geojson_filepath = self.data['file']['geojson'].replace( '<result_path>', self.data['path']['result']).replace( '<img_name>', img_name).replace('<preset>', 'carto-gs') final_gray = cv2.cvtColor(final, cv2.COLOR_BGR2GRAY) final_blurred = cv2.GaussianBlur(final_gray, (3, 3), 0) ret, final_thresh = cv2.threshold(final_blurred, 127, 255, 0) contours, hierarchy = cv2.findContours(final_thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # contour normalization if do_contour_normalization: contours = self.normalize_contours(contours) ctr_json_str = json.dumps( { 'contours': contours, 'hierarchy': hierarchy }, default=json_np_default_parser) ctr_json = json.loads(ctr_json_str) ctr_points = [] for cidx in range(len(ctr_json['contours'])): ctr_points.append( list(map(lambda x: x[0], ctr_json['contours'][cidx]))) # [Step - 4] Find Contours Geographic Coordinates geotiff_image = img_path.replace(img_extension, '.tif') translate_coords = GeoTiffProcessor.get_multi_polygon_axis_point_coordinates( geotiff_image, ctr_points, {'debug': False}) final_coords = [] geo_features = [] for poly in translate_coords['coords']: poly_coords = [] poly_geo_coords = [] for cr in poly: poly_coords.append({ 'x': cr['x'], 'y': cr['y'], 'latitude': cr['lat'], 'longitude': cr['long'] }) poly_geo_coords.append((cr['long'], cr['lat'])) # add final closing point poly_geo_coords.append((poly[0]['long'], poly[0]['lat'])) final_coords.append(poly_coords) geo_feature = Feature( geometry=Polygon([poly_geo_coords], precision=15)) geo_features.append(geo_feature) geo_feature_collection = FeatureCollection(geo_features) geo_feature_collection_dump = geojson_dumps(geo_feature_collection, sort_keys=True) with open(json_contour_filepath, 'w') as outfile: json.dump(final_coords, outfile) with open(geojson_filepath, 'w') as outfile: outfile.write(geo_feature_collection_dump) # [Step-5] Draw contours to original image clone final_wctrs = copy(image) for c in contours: cv2.drawContours(final_wctrs, [c], 0, color_preset['building']['contour'], 2) # Build result polygon_len = len(ctr_points) r = { 'file_path': geojson_filepath, 'file_size': str(get_file_size(geojson_filepath, SIZE_UNIT.KB)) + ' KB', 'polygon_total': polygon_len } if 'return_polygon_data' in opts and bool(opts['return_polygon_data']): r['geojson'] = json.loads(geo_feature_collection_dump) if self.options['save_result']: result_ftemplate = self.data['path'][ 'result'] + img_name + '-carto-gs-<fnm>' + img_extension self.write_image_results( result_ftemplate, '<fnm>', [('step-1-2-hsv-building-gray', fc_hsv_building_gray), ('step-2-image-bgr', image), ('step-3-image-rgb', img_rgb), ('step-4-0-hsv', hsv), ('step-4-1-hsv-mask-gray', mask_gray), ('step-5-final', final), ('step-6-image-gray', final_gray), ('step-7-final-blurred', final_blurred), ('step-8-final-thresh', final_thresh), ('step-9-image-final-with-contours', final_wctrs)]) if self.options['show_result']: show_image_results([ ("Step - 1-1 (HSV Gray Color)", np.uint8([[fc_hsv_building_gray]])), ("Step - 2 (Image - BGR)", image), ("Step - 3 ( Image - RGB)", img_rgb), ("Step - 4-0 (HSV)", hsv), ("Step - 4-1 (HSV - Gray)", mask_gray), ("Step - 5 (Final)", final), ("Step - 6 (Final - Gray)", final_gray), ("Step - 7 (Final - Gray Blurred)", final_blurred), ("Step - 8 (Final - Gray Thresh)", final_thresh), ("Step - 9 (Final - with contours)", final_wctrs) ]) # [Step - ending] Clean - up del contours, hierarchy, image, img_rgb, hsv, final, final_gray, final_wctrs, final_blurred, final_thresh, mask_gray, fc_hsv_building_gray return r else: # [Step - ending] Clean - up del contours, hierarchy, image, img_rgb, hsv, final, final_gray, final_wctrs, final_blurred, final_thresh, mask_gray, fc_hsv_building_gray return r
def get_geojson_osm(self, filepath: tuple = (), opts: dict = {}) -> dict: # [Step - 1] Get image + check color sampling img_path = os.path.join(BASE_DIR, filepath[0]) img_tiff_path = os.path.join(BASE_DIR, filepath[1]) img_extension = os.path.splitext(img_path)[1] img_name = ntpath.basename(img_path).replace(img_extension, '') img_base_path = img_path.replace(ntpath.basename(img_path), '') color_preset = self.data['color_presets'][self.options['color_preset']] logger.info('Color Preset (OSM): ', {'color_preset': color_preset}) do_contour_normalization = bool( color_preset['building']['normalize_contours'] ) if 'normalize_contours' in color_preset['building'] else False image_origin = cv2.imread(img_path, 1) if 'sharp_image' in color_preset['building']: sharp_img = self.unsharp_mask( image_origin, **color_preset['building']['sharp_image']) image_origin = copy(image_origin) image_new_contrast = [] if 'adjust_contrast' in color_preset['building']: image = cv2.convertScaleAbs( image_origin, alpha=color_preset['building']['adjust_contrast']['alpha'], beta=color_preset['building']['adjust_contrast']['beta']) image_new_contrast = [ cv2.convertScaleAbs(image_origin, alpha=1.0, beta=-10), cv2.convertScaleAbs(image_origin, alpha=1.0, beta=-20), cv2.convertScaleAbs(image_origin, alpha=1.0, beta=-30), cv2.convertScaleAbs(image_origin, alpha=1.0, beta=-50), cv2.convertScaleAbs(image_origin, alpha=1.0, beta=-60) ] else: image = copy(image_origin) light_brown = np.uint8([[color_preset['building']['fill']]]) # Enhance image (ref: https://chrisalbon.com/machine_learning/preprocessing_images/enhance_contrast_of_greyscale_image/) # image = cv2.imread('images/plane_256x256.jpg', cv2.IMREAD_GRAYSCALE) # image_enhanced = cv2.equalizeHist(image) # Convert BGR to HSV for masking color_codes = [] hsv_fill_color = cv2.cvtColor(light_brown, cv2.COLOR_BGR2HSV) # hsv_fill_color = cv2.cvtColor(light_brown, color_preset['building']['masking_color_mode']) # for index in hsv_fill_color: # color_codes = index[0] color_codes = hsv_fill_color[0][0] # [Step - 2] Do masking on HSV Image img_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) # img_rgb =copy(image) hsv = cv2.cvtColor(img_rgb, cv2.COLOR_RGB2HSV) # hsv = cv2.cvtColor(img_rgb, color_preset['building']['masking_color_mode']) fill_color = (float(color_codes[0]), float(color_codes[1]), float(color_codes[2])) find_border_color = [] if color_preset['building']['border']['type'] == 'relative': temp = [] for idx, bbv in enumerate( color_preset['building']['border']['value'], 0): if bbv[0] == '+': temp.append(float(color_codes[idx]) + float(bbv[1:])) elif bbv[0] == '-': temp.append(float(color_codes[idx]) - float(bbv[1:])) else: temp.append(float(bbv)) border_color = tuple(temp) # border_color = (float(color_codes[0]) + color_preset['building']['border']['value'][0], float(color_codes[1]) + color_preset['building']['border']['value'][1], float(color_codes[2]) + color_preset['building']['border']['value'][2]) else: find_border_color = cv2.cvtColor( np.uint8([[color_preset['building']['border']['value']]]), cv2.COLOR_BGR2HSV) # find_border_color = cv2.cvtColor(np.uint8([[color_preset['building']['border']['value']]]), color_preset['building']['masking_color_mode']) border_color = (float(find_border_color[0][0][0]), float(find_border_color[0][0][1]), float(find_border_color[0][0][2])) logger.debug( self.logger_base_text + 'Color Info', { 'fill_color': fill_color, 'border_color': border_color, 'float_border_color': find_border_color, 'hsv_fill_color_codes': color_codes, 'hsv_fill_color': hsv_fill_color }) mask = cv2.inRange(hsv, fill_color, border_color) final = cv2.bitwise_and(image, image, mask=mask) # self.data['path']['result'] # self.data['file']['json_contour'] json_contour_filepath = self.data['file']['json_contour'].replace( '<result_path>', self.data['path']['result']).replace( '<img_name>', img_name).replace('<preset>', 'osm') json_contour_debug_filepath = self.data['file'][ 'json_contour_debug'].replace('<result_path>', self.data['path']['result']).replace( '<img_name>', img_name).replace( '<preset>', 'osm') geojson_filepath = self.data['file']['geojson'].replace( '<result_path>', self.data['path']['result']).replace( '<img_name>', img_name).replace('<preset>', 'osm') final_gray = cv2.cvtColor(final, cv2.COLOR_BGR2GRAY) final_blurred = cv2.GaussianBlur(final_gray, (5, 5), 0) ret, final_thresh = cv2.threshold(final_blurred, 127, 255, 0) contours, hierarchy = cv2.findContours(final_thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # contour normalization if do_contour_normalization: contours = self.normalize_contours(contours) ctr_json_str = json.dumps( { 'contours': contours, 'hierarchy': hierarchy }, default=json_np_default_parser) ctr_json = json.loads(ctr_json_str) ctr_points = [] for cidx in range(len(ctr_json['contours'])): ctr_points.append( list(map(lambda x: x[0], ctr_json['contours'][cidx]))) # [Step - 4] Find Contours Geographic Coordinates geotiff_image = img_tiff_path translate_coords = GeoTiffProcessor.get_multi_polygon_axis_point_coordinates( geotiff_image, ctr_points, {'debug': False}) final_coords = [] geo_features = [] for poly in translate_coords['coords']: poly_coords = [] poly_geo_coords = [] for cr in poly: poly_coords.append({ 'x': cr['x'], 'y': cr['y'], 'latitude': cr['lat'], 'longitude': cr['long'] }) poly_geo_coords.append((cr['long'], cr['lat'])) # add final closing point poly_geo_coords.append((poly[0]['long'], poly[0]['lat'])) final_coords.append(poly_coords) geo_feature = Feature( geometry=Polygon([poly_geo_coords], precision=15)) geo_features.append(geo_feature) geo_feature_collection = FeatureCollection(geo_features) geo_feature_collection_dump = geojson_dumps(geo_feature_collection, sort_keys=True) with open(json_contour_filepath, 'w') as outfile: json.dump(final_coords, outfile) with open(geojson_filepath, 'w') as outfile: outfile.write(geo_feature_collection_dump) # [Step - 5] Draw contours to original image clone final_wctrs = copy( image ) # final_wctrs = copy(image_origin)# final_wctrs = copy(final) for c in contours: cv2.drawContours(final_wctrs, [c], 0, color_preset['building']['contour'], 2) # Build result polygon_len = len(ctr_points) r = { 'file_path': geojson_filepath, 'file_size': str(get_file_size(geojson_filepath, SIZE_UNIT.KB)) + ' KB', 'polygon_total': polygon_len } if 'return_polygon_data' in opts and bool(opts['return_polygon_data']): r['geojson'] = json.loads(geo_feature_collection_dump) if self.options['save_result']: result_ftemplate = self.data['path'][ 'result'] + img_name + '-<fnm>' + img_extension if 'sharp_image' in color_preset['building']: cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-0-sharpen-1'), sharp_img) if 'adjust_contrast' in color_preset['building']: cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-0-contrast-1'), image_new_contrast[0]) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-0-contrast-2'), image_new_contrast[1]) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-0-contrast-3'), image_new_contrast[2]) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-0-contrast-4'), image_new_contrast[3]) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-0-contrast-5'), image_new_contrast[4]) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-1-hsv-light-color'), hsv_fill_color) cv2.imwrite(result_ftemplate.replace('<fnm>', 'step-2-image-bgr'), image) cv2.imwrite(result_ftemplate.replace('<fnm>', 'step-3-image-rgb'), img_rgb) cv2.imwrite(result_ftemplate.replace('<fnm>', 'step-4-hsv'), hsv) cv2.imwrite(result_ftemplate.replace('<fnm>', 'step-5-final'), final) cv2.imwrite(result_ftemplate.replace('<fnm>', 'step-6-image-gray'), final_gray) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-7-final-blurred'), final_blurred) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-8-final-thresh'), final_thresh) cv2.imwrite( result_ftemplate.replace('<fnm>', 'step-9-image-final-with-contours'), final_wctrs) if self.options['show_result']: cv2.imshow("Step - 1 (HSV Light Color)", hsv_fill_color) cv2.imshow("Step - 2 (Image - BGR)", image) cv2.imshow("Step - 3 ( Image - RGB)", img_rgb) cv2.imshow("Step - 4 (HSV)", hsv) cv2.imshow("Step - 5 (Final)", final) cv2.imshow("Step - 6 (Final - Gray)", final_gray) cv2.imshow("Step - 7 (Final - Gray Blurred)", final_blurred) cv2.imshow("Step - 8 (Final - Gray Thresh)", final_thresh) cv2.imshow("Step - 9 (Final - with contours)", final_wctrs) # cv2.imshow("Step - 10 (Final - with shape contours)", final_shape_ctrs) cv2.waitKey(0) cv2.destroyAllWindows() # [Step - ending] Clean - up del contours, hierarchy, image, hsv_fill_color, img_rgb, hsv, final, final_gray, final_wctrs, final_blurred, final_thresh, ctr_json, ctr_json_str, final_coords, geo_features, ctr_points return r else: # [Step - ending] Clean - up del contours, hierarchy, image, hsv_fill_color, img_rgb, hsv, final, final_gray, final_wctrs, final_blurred, final_thresh, ctr_json, ctr_json_str, final_coords, geo_features, ctr_points return r
def print_geojson(feature_collection: FeatureCollection) -> None: geojson_data = geojson_dumps(feature_collection, indent=4, sort_keys=False, default=str) print(f'Geojson data: \n{geojson_data}')
def as_json(self): return geojson_dumps(self.as_dict())