def test_panoids_belgrade(): """test_panoids_belgrade""" lat = 44.7807774 lon = 20.4631614 info = streetview.panoids(lat, lon) expected = [ { 'lat': 44.78080446275501, 'panoid': 'H4gnGehUMXHbEszHFtTvDA', 'year': 2013, 'lon': 20.46312104308652, 'month': 11 }, { 'lat': 44.7807773932411, 'panoid': 'NFSzU4sTH3HR4J6QFgKFmw', 'year': 2015, 'lon': 20.4631613851401, 'month': 6 }, ] with open(os.path.join(TESTDIR, 'test_panoids_belgrade.actual'), 'w') as output: json.dump(info, output, indent=4, sort_keys=True) ids = {pano['panoid']: pano for pano in info} for example in expected: assert example['panoid'] in ids, "Could not fine expected panoid {} in response".format(example['panoid']) actual = ids[example['panoid']] aeq(actual['lat'], example['lat']) aeq(actual['lon'], example['lon']) aeq(actual['year'], example['year']) aeq(actual['month'], example['month'])
def download(start_idx, end_idx, api_key): with open(os.path.join(base_dir, "roads_dictionnary.csv"), 'a') as f: for i in range(start_idx, end_idx): #range(0,tt.shape[0]) print('%.2f' % ((i - start_idx) * 100 / (end_idx - start_idx)) + " %") # get feature geometry and bearing from shapefile feature = layer[ransample[i]] lon = feature.GetGeometryRef().GetX() lat = feature.GetGeometryRef().GetY() heading = feature.GetField("BEARING") # Get the number of panaoramas at the location panIds = streetview.panoids(lat, lon) # Randomly select one of the n panoramas at this location if len(panIds) > 0: pid = random.randint(0, len(panIds) - 1) f.write("{},{},{},{}\n".format(lon, lat, heading, panIds[pid]["panoid"])) img = streetview.api_download(panIds[pid]["panoid"], heading, DIRECTORY, api_key, fov=80, pitch=0)
def GetRandomPanoIds(self): print('Fetching panos info (this step may take several minutes)') counter = 0 start_time = time.time() filter = self._list["download"] == "n" for index, row in self._list[filter].iterrows(): # get all pano ids for each image panoids = streetview.panoids(lat=row[2], lon=row[1]) if panoids: # choose a random pano id to download panoid = random.choice(panoids) self._list.at[index, 'panoid'] = panoid['panoid'] self._list.at[index, 'download'] = 'r' if 'year' in panoid: self._list.at[index, 'year'] = panoid['year'] if 'month' in panoid: self._list.at[index, 'month'] = panoid['month'] else: self._list.at[index, 'download'] = 'x' counter = counter + 1 if counter % 50 == 0: print("Fetched "+ str(counter) + " panoids in "+ str(time.time() - start_time) + " seconds") start_time = time.time() # saving the list with selected panoids every 100 rows self._list.to_csv(self._output, index=False)
def test_panoids_belgrade(): lat = 44.7807774 lon = 20.4631614 info = streetview.panoids(lat, lon) expected = [ { 'lat': 44.78080446275501, 'panoid': 'H4gnGehUMXHbEszHFtTvDA', 'year': 2013, 'lon': 20.46312104308652, 'month': 11 }, { 'lat': 44.7807773932411, 'panoid': 'NFSzU4sTH3HR4J6QFgKFmw', 'year': 2015, 'lon': 20.4631613851401, 'month': 6 }, ] # print get(info, expected) for example in expected: assert example in info
def test_panoids_sydney(): lat = -33.8843298 lon = 151.1666392 info = streetview.panoids(lat, lon) expected = [ { 'lat': -33.88433247600134, 'panoid': 'KTnyIFMvOh9uUqDVEQdP2w', 'year': 2007, 'lon': 151.1666428923351, 'month': 12 }, { 'lat': -33.88433963284601, 'panoid': 'z8KUYeQ2l-O5zdzmSwqPRQ', 'year': 2013, 'lon': 151.1666253718159, 'month': 7 }, { 'lat': -33.88432975440379, 'panoid': 'UQrvQ_b_TwO1ylks9VI9rA', 'year': 2014, 'lon': 151.1666391815143, 'month': 5 }, ] # print get(info, expected) for example in expected: assert example in info
def start(): # Your Google Stret view API key here key = your_API_key dirc = "Image" # Getting the closest panorama ID of random lat/lon lat = random.randint(-9000000, 9000000) / 100000 lon = random.randint(-18000000, 18000000) / 100000 panoids, lat, lon = streetview.panoids(lat=lat, lon=lon) panoid = panoids[0]['panoid'] # Getting Country of the panorama locator = Nominatim(user_agent="myGeocoder") location = str(locator.reverse("{}, {}".format(lat, lon))).split(" ") country = location[len(location) - 1] # Download of 4 tiles of the panorama streetview.download_flats(panoid, key=key, flat_dir=dirc, fov=90, width=640, height=640) # Download of the panorama "photosphere" panorama = streetview.download_panorama_v3(panoid, zoom=3, disp=False) # Transforming the "photosphere" into a "tiny planet" and exporting it tiny.input_shape = panorama.shape final_image = tiny.warp(panorama, tiny.little_planet_3, output_shape=tiny.output_shape) file_name = "Image/Panorama.jpg" final_image = Image.fromarray((final_image * 255).round().astype(np.uint8), 'RGB') final_image.save(file_name) # Determining which of the 4 tiles has the most information path = streetview.tiles_info(panoid)[0][2] path = path[:len(path) - 7] path += bestImage.main(path) path = "Image/2017_" + path # Cropping of the exported tile to get rid of google tags and exporting it img = Image.open(path) area = (11, 0, 629, 618) cropped_img = img.crop(area) cropped_img.save("Image/imageToPost.jpg") return int(lat * 100000) / 100000, int(lon * 100000) / 100000, country
def runOttawa(n_loc): """ Downloads Google StreetView images of random points in Ottawa :param n_loc: number of locations to download. CAUTION: some points have no images, so it's not the exact number of subdirectories created """ DIRECTORY = "../voteimages" ds = ogr.Open('C:/Users/msawada/Desktop/Urban_RAT/Urban_RAT_inventory.dbf') layer = ds.GetLayer() for i in range(n_loc): print('%.2f' % (i * 100 / n_loc) + " %") index = random.randint(0, len(layer)) feature = layer[index] lon = feature.GetGeometryRef().GetX() lat = feature.GetGeometryRef().GetY() bearing_road = feature.bearing if bearing_road == None: heading = '' else: heading = bearing_road + 90 * np.sign(random.random() - 0.5) folder = DIRECTORY panIds = streetview.panoids(lat, lon) if len(panIds) > 0: for pan in panIds: img = streetview.api_download(pan["panoid"], heading, folder, API_KEY, fov=80, pitch=0, year=pan["year"]) if img != None: full_size = misc.imread(img) resized = misc.imresize(full_size, (64, 64)) desc = gist.extract(resized) np.savetxt(img + '.txt', desc)
def panoid_history(coordinates, save=True, visual_save=False): """This function is for getting the panoids of panorimic views from Google Street View as a function of multiple years and months""" for long, lat in coordinates: # print("lat:", lat) panoids = streetview.panoids(lat=lat, lon=long) panorama_history.append(panoids) if save: with open('panorama_hitorical_pickle', 'wb') as f: pickle.dump(panorama_history, f) if visual_save: with open('panorama_history.txt', 'w') as f: # writing to a file - human readable for item in panorama_history: f.write("%s\n" % item)
def getPointsOfInterest(pos): usedIDs = [] pointOfInterest = [] for i in range(len(pos)): lat = pos[i, 0] lon = pos[i, 1] panoids = streetview.panoids(lat, lon) for panoid in panoids: if 'year' in panoid: id = panoid['panoid'] if id not in usedIDs: usedIDs.append(id) pointOfInterest.append(panoid) return pointOfInterest
def runOttawa(): """ Downloads Google StreetView images of random points in Ottawa :param n_loc: number of locations to download. CAUTION: some points have no images, so it's not the exact number of subdirectories created """ DIRECTORY = "D:\Amaury\Desktop\ottawa_image_db" ds = ogr.Open( 'C:/Users/msawada/Desktop/Urban_RAT/Urban_RAT_inventory_4326.dbf') layer = ds.GetLayer() n_loc = len(layer) #done: range(3000) for i in range(3000, 6000): print('%.2f' % (i * 100 / n_loc) + " %") index = i feature = layer[index] lon = feature.GetGeometryRef().GetX() lat = feature.GetGeometryRef().GetY() folder = DIRECTORY + '/%2.6f,%2.6f' % (lat, lon) panIds = streetview.panoids(lat, lon) if len(panIds) > 0: if not os.path.exists(folder): os.makedirs(folder) for pan in panIds: img = streetview.api_download(pan["panoid"], folder, API_KEY, fov=80, pitch=0, year=pan["year"], month=pan["month"]) if img != None: full_size = misc.imread(img) resized = misc.imresize(full_size, (64, 64)) desc = gist.extract(resized) np.savetxt(img + '64.txt', desc)
def test_panoids_sydney(): """test_panoids_sydney""" lat = -33.8843298 lon = 151.1666392 info = streetview.panoids(lat, lon) expected = [ # { # 'lat': -33.88433247600134, # 'panoid': 'KTnyIFMvOh9uUqDVEQdP2w', # 'year': 2007, # 'lon': 151.1666428923351, # 'month': 12 # }, { 'lat': -33.88433963284601, 'panoid': 'z8KUYeQ2l-O5zdzmSwqPRQ', 'year': 2013, 'lon': 151.1666253718159, 'month': 7 }, { 'lat': -33.88432975440379, 'panoid': 'UQrvQ_b_TwO1ylks9VI9rA', 'year': 2014, 'lon': 151.1666391815143, 'month': 5 }, ] # print get(info, expected) # pylint:disable=invalid-name with open(os.path.join(TESTDIR, 'test_panoids_sydney.actual'), 'w') as f: json.dump(info, f, indent=4, sort_keys=True) ids = {pano['panoid']: pano for pano in info} for example in expected: assert example['panoid'] in ids, "Could not fine expected panoid {} in response".format(example['panoid']) actual = ids[example['panoid']] aeq(actual['lat'], example['lat']) aeq(actual['lon'], example['lon']) aeq(actual['year'], example['year']) aeq(actual['month'], example['month'])
def download_gsv(self, gsv_key, start=0, end=-1): """ Downloads Google Street View (GSV) images points in Ottawa from a shapefile layer. WARNING: the filename format is currently not compatible with the class Image definition. :param gsv_key: GSV API key :type gsv_key: str :param start: index of the feature of the layer from where download starts :type start: int :param end: index of the feature of the layer from where download ends :type end: int """ # Creation of directory self.gsv_dir = safe_folder_creation(self.gsv_dir) # Convert layer file ds = ogr.Open(self.layer_path) layer = ds.GetLayer() # Determine the number of locations to download loc_max = len(layer) if start < end < len(layer): stop = end else: stop = loc_max n_loc = stop - start # Display advancement of downloading pbar = progressbar.ProgressBar() for i in pbar(range(start, stop)): # Get location feature = layer[i] lon = feature.GetGeometryRef().GetX() lat = feature.GetGeometryRef().GetY() # Get the closest panoramas from the location pano_id = streetview.panoids(lat, lon, closest=True) # Check if there is a pano if len(pano_id): # Create filename image_key = pano_id[0]["panoid"] if pano_id[0]["month"] < 10: image_date = str(pano_id[0]["year"]) + "-0" + str(pano_id[0]["month"]) + "-01T00-00-00" else: image_date = str(pano_id[0]["year"]) + "-" + str(pano_id[0]["month"]) + "-01T00-00-00" image_lon = "{0:.6f}".format(lon) image_lat = "{0:.6f}".format(lat) image_filename = '{}_{}_{}_{}'.format(image_lon, image_lat, image_key, image_date) # Download one image try: streetview.api_download(image_key, 90, self.gsv_dir, gsv_key, fov=80, pitch=0, fname=image_filename) self.nb_gsv += 1 except Exception as err: print(err) print("Error on feature {}, lat = {}, lon = {} ".format(i, lat, lon)) continue # Display information print("Number of locations : {}".format(n_loc)) print("Number of images downloaded : {}".format(self.nb_gsv)) print("Ratio : {}%".format((self.nb_gsv / n_loc) * 100))
def test_panoids_sanfransico(): lat = 37.7743002 lon = -122.4283573 info = streetview.panoids(lat, lon) expected = [ { 'lat': 37.77432243711459, 'panoid': 'mOIblLGQqLpZUDne_VLAdQ', 'year': 2007, 'lon': -122.4283616654013, 'month': 11, }, { 'lat': 37.77429785272119, 'panoid': 'm5xxmNmdd-g0y8Y-kpmb8Q', 'year': 2008, 'lon': -122.4283486679392, 'month': 5, }, { 'lat': 37.77432814229113, 'panoid': 'DgYMRMl9pMkPojc_aFaWOw', 'year': 2011, 'lon': -122.4283649736079, 'month': 2, }, { 'lat': 37.77426806398835, 'panoid': 'QO-svL6NrTqiocGSKWFK4w', 'year': 2011, 'lon': -122.4283685897149, 'month': 4, }, { 'lat': 37.77426374081248, 'panoid': '11HdGr2_t8BIZJ-56mJm9A', 'year': 2013, 'lon': -122.428371097386, 'month': 11, }, { 'lat': 37.7743035331551, 'panoid': '1G148Vno08mtwAaQ_roRqg', 'year': 2014, 'lon': -122.4283477300864, 'month': 3, }, { 'lat': 37.7742959110328, 'panoid': 'fp0uFJfqO2e0uEWhbDkIMQ', 'year': 2014, 'lon': -122.4283817194891, 'month': 5, }, { 'lat': 37.77426752482152, 'panoid': 'ag5GcSl7BmhYVmXNYYvcmw', 'year': 2014, 'lon': -122.4283597323728, 'month': 7, }, { 'lat': 37.77426966585636, 'panoid': 'wHBAtZTAqJHtrXqqIRNfgw', 'year': 2014, 'lon': -122.4283659734244, 'month': 10, }, { 'lat': 37.77429993268145, 'panoid': '3Eng_G9SqCYWnQydcZTP3A', 'year': 2015, 'lon': -122.428373031613, 'month': 1, }, { 'lat': 37.77432015149672, 'panoid': 'Hqhy1zUzdzwyhoLD1naHUQ', 'year': 2015, 'lon': -122.4283974703461, 'month': 6, }, { 'lat': 37.7742707230769, 'panoid': 'VlSQppuJiOrEXPl0uEFUpg', 'year': 2015, 'lon': -122.428350877336, 'month': 7, }, { 'lat': 37.77429208676779, 'panoid': 'q4XZZgs2zwWtc6eNblwbng', 'year': 2015, 'lon': -122.4283729863899, 'month': 10, }, { 'lat': 37.77432525957246, 'panoid': '84EbbbrnJI-Dnj7ZFgXj3A', 'year': 2016, 'lon': -122.4283551268714, 'month': 6, }, { 'lat': 37.77432618270405, 'panoid': 'e8LRJ1mcvgXXywxz2af_og', 'year': 2017, 'lon': -122.4283773760892, 'month': 2, }, { 'lat': 37.77430015050959, 'panoid': 'atLD3spRNleJ-50dqXZDmw', 'year': 2017, 'lon': -122.4283573289222, 'month': 4, }, ] # print get(info, expected) for example in expected: assert example in info
if len(dfrows) > 0: llat = dfrows[0:1].left_lat.values[0] llong = dfrows[0:1].left_long.values[0] downloadList.append([id, llat, llong]) else: dfrows = duelsDF_head[duelsDF_head.right_id == id] rlat = dfrows[0:1].right_lat.values[0] rlong = dfrows[0:1].right_long.values[0] downloadList.append([id, rlat, rlong]) API_KEY = 'AIzaSyC_cKyaxPoDPTtN4IgiOJ_e_9ytbMDk4lE' # # Output folder for panaorama images for row in downloadList: ppidLeftLat, ppidLeftLon = row[1], row[2] panIds = streetview.panoids(ppidLeftLat, ppidLeftLon) # Randomly select one of the n panaormas at this location if len(panIds) > 0: # pid = random.randint(0, len(panIds) - 1) pid = len(panIds) - 1 img = api_download2(panIds[pid]["panoid"], row[0], 0, DIRECTORY, API_KEY, fov=80, pitch=0, year=panIds[pid]["year"]) # result_y = [] # for index, row in duelsDF.head(3000).iterrows(): # .iloc([50:]).iterrows() # ppidLeft = row['left_id'] # ppidLeftLat = row['left_lat'] # ppidLeftLon = row['left_long'] # ppidRight = row['right_id'] # ppidRightLat = row['right_lat'] # ppidRightLon = row['right_long']
import streetview import secrets panoids = streetview.panoids(lat=-33.85693857571269, lon=151.2144895142714) direc = './' # where to put image. put in current working dir panoid = panoids[0]['panoid'] # get first image from this location heading = 0 # see docs for info on this. controls which direction we're looking flat_dir = direc key = secrets.svkey r = streetview.api_download(panoid, heading, flat_dir, key)
print('%.2f' % (i * 100 / prog) + " %") # get feature geometry and bearing from shapefile feature = layer[ransample[i]] lon = feature.GetGeometryRef().GetX() lat = feature.GetGeometryRef().GetY() heading = feature.GetField("BEARING") f.write("{},{},{}\n".format(lon, lat, heading)) # Get the number of panaoramas at the location panIds = streetview.panoids(lat, lon) # Randomly select one of the n panoramas at this location if len(panIds) > 0: pid = random.randint(0, len(panIds) - 1) img = streetview.api_download(panIds[pid]["panoid"], heading, DIRECTORY, API_KEY, fov=80, pitch=0, year=panIds[pid]["year"]) prog += 1
def test_panoids_sanfransico(): """test_panoids_sanfransico""" lat = 37.7743002 lon = -122.4283573 info = streetview.panoids(lat, lon) expected = [ { 'lat': 37.77432243711459, 'panoid': 'mOIblLGQqLpZUDne_VLAdQ', 'year': 2007, 'lon': -122.4283616654013, 'month': 11, }, { 'lat': 37.77429785272119, 'panoid': 'm5xxmNmdd-g0y8Y-kpmb8Q', 'year': 2008, 'lon': -122.4283486679392, 'month': 5, }, { 'lat': 37.77432814229113, 'panoid': 'DgYMRMl9pMkPojc_aFaWOw', 'year': 2011, 'lon': -122.4283649736079, 'month': 2, }, # { # 'lat': 37.77426806398835, # 'panoid': 'QO-svL6NrTqiocGSKWFK4w', # 'year': 2011, # 'lon': -122.4283685897149, # 'month': 4, # }, # { # 'lat': 37.77426374081248, # 'panoid': '11HdGr2_t8BIZJ-56mJm9A', # 'year': 2013, # 'lon': -122.428371097386, # 'month': 11, # }, { 'lat': 37.7743035331551, 'panoid': '1G148Vno08mtwAaQ_roRqg', 'year': 2014, 'lon': -122.4283477300864, 'month': 3, }, { 'lat': 37.7742959110328, 'panoid': 'fp0uFJfqO2e0uEWhbDkIMQ', 'year': 2014, 'lon': -122.4283817194891, 'month': 5, }, # { # 'lat': 37.77426752482152, # 'panoid': 'ag5GcSl7BmhYVmXNYYvcmw', # 'year': 2014, # 'lon': -122.4283597323728, # 'month': 7, # }, # { # 'lat': 37.77426966585636, # 'panoid': 'wHBAtZTAqJHtrXqqIRNfgw', # 'year': 2014, # 'lon': -122.4283659734244, # 'month': 10, # }, { 'lat': 37.77429993268145, 'panoid': '3Eng_G9SqCYWnQydcZTP3A', 'year': 2015, 'lon': -122.428373031613, 'month': 1, }, { 'lat': 37.77432015149672, 'panoid': 'Hqhy1zUzdzwyhoLD1naHUQ', 'year': 2015, 'lon': -122.4283974703461, 'month': 6, }, # { # 'lat': 37.7742707230769, # 'panoid': 'VlSQppuJiOrEXPl0uEFUpg', # 'year': 2015, # 'lon': -122.428350877336, # 'month': 7, # }, { 'lat': 37.77429208676779, 'panoid': 'q4XZZgs2zwWtc6eNblwbng', 'year': 2015, 'lon': -122.4283729863899, 'month': 10, }, { 'lat': 37.77432525957246, 'panoid': '84EbbbrnJI-Dnj7ZFgXj3A', 'year': 2016, 'lon': -122.4283551268714, 'month': 6, }, { 'lat': 37.77432618270405, 'panoid': 'e8LRJ1mcvgXXywxz2af_og', 'year': 2017, 'lon': -122.4283773760892, 'month': 2, }, # { # 'lat': 37.77430015050959, # 'panoid': 'atLD3spRNleJ-50dqXZDmw', # 'year': 2017, # 'lon': -122.4283573289222, # 'month': 4, # }, ] # pylint:disable=invalid-name with open(os.path.join(TESTDIR, 'test_panoids_sanfransico.actual'), 'w') as f: json.dump(info, f, indent=4, sort_keys=True) ids = {pano['panoid']: pano for pano in info} for example in expected: assert example['panoid'] in ids, "Could not fine expected panoid {} in response".format(example['panoid']) actual = ids[example['panoid']] aeq(actual['lat'], example['lat']) aeq(actual['lon'], example['lon']) aeq(actual['year'], example['year']) aeq(actual['month'], example['month'])
def fetch(request): if request.method == "POST": latlng = request.POST.get('latlng', False) if not latlng: return JsonResponse({'status':'error','message':'Latitude, Longitude Not Provided Properly'}, status=404) if not settings.GOOGLE_MAP_STREET_API: print("NO GOOGLE MAP STREET API IN SERVER") return JsonResponse({'status':'error','message':'No Google Map API Available'}, status=404) saveto = None if not os.path.exists(os.path.join('media/street_view_images')): saveto = os.environ.get('PROJECT_FOLDER','') + '/media/street_view_images' else: saveto = os.path.join('media/street_view_images') filelist = [] latlngList = [] panoFileLatLngList = [] panoIdsList = [] try: # If Map Polygon points array arrives latlngList = json.loads(latlng) except Exception as e: # If Point of string lat,lng arrives latlngList = [x.strip() for x in latlng.split(',')] latlngList = [{"lat":latlngList[0], "lng":latlngList[1]}] count = 0 max_fetch = 200 if settings.DEBUG: max_fetch = 5 for coords in latlngList: if not coords.get('lat', False) or not coords.get('lng', False): continue # If lat, lng does not exists some how print('Trying to fetch streetview image at: ' + str(coords.get('lat')) + ',' + str(coords.get('lng'))) if (count > max_fetch): break panoids = streetview.panoids(lat=str(coords.get('lat')), lon=str(coords.get('lng'))) # print(panoids) for pano in panoids: if (count < max_fetch): # IF panoroma is older then 2018 ignore it if pano.get('year', False) and pano.get('year') < 2018: continue if pano.get('panoid') in panoIdsList: continue panoIdsList = panoIdsList + [pano.get('panoid')] # GET ALL Tiles images info # print(streetview.tiles_info(pano.get('panoid'))) # heading recommendation: 0, 90, 180, or 270 for heading in [30, 220, 0, 90, 180, 270]: file = None try: file = streetview.api_download(pano.get('panoid'), heading, saveto, settings.GOOGLE_MAP_STREET_API, year=pano.get('year','now')) except Exception as e: print('Failed to download this streetview image') if file: filelist.append(file) panoFileLatLngList.append(str(pano.get('lat'))+','+str(pano.get('lon'))) count += 1 else: messages.info(request, 'Max Street View Images ('+str(max_fetch)+') was fetched earlier.') break return JsonResponse({'status':'ok','message':'Images Saved','data':filelist,'coords':panoFileLatLngList}, status=200) else: return JsonResponse({'status':'error','message':'Invalid Request'}, status=404)
def GSVpanoMetadataCollector(samplesFeatureClass, ouputTextFolder, batchNum, greenmonth, year=""): ''' This function is used to call the Google API url to collect the metadata of Google Street View Panoramas. The input of the function is the shpfile of the create sample site, the output is the generate panoinfo matrics stored in the text file Parameters: samplesFeatureClass: the shapefile of the create sample sites batchNum: the number of sites proced every time. If batch size is 1000, the code will save metadata of every 1000 point to a txt file. ouputTextFolder: the output folder for the panoinfo greenmonth: a list of the green season, for example in Boston, greenmonth = ['05','06','07','08','09'] year: optional. if specified, only panos dated in that year or older will be returned ''' import urllib import xmltodict from osgeo import ogr, osr, gdal import time import os,os.path import math import streetview import pprint if not os.path.exists(ouputTextFolder): os.makedirs(ouputTextFolder) driver = ogr.GetDriverByName('ESRI Shapefile') if driver is None: print('Driver is not available.') dataset = driver.Open(samplesFeatureClass) if dataset is None: print('Could not open %s' % (samplesFeatureClass)) layer = dataset.GetLayer() sourceProj = layer.GetSpatialRef() targetProj = osr.SpatialReference() targetProj.ImportFromEPSG(4326) # change the projection of shapefile to the WGS84 # if GDAL version is 3.0 or above if gdal.__version__.startswith('2.') is False: targetProj.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) transform = osr.CoordinateTransformation(sourceProj, targetProj) # loop all the features in the featureclass feature = layer.GetNextFeature() featureNum = layer.GetFeatureCount() batch = math.ceil(featureNum/batchNum) for b in range(batch): # for each batch process num GSV site start = b*batchNum end = (b+1)*batchNum if end > featureNum: end = featureNum ouputTextFile = 'Pnt_start%s_end%s.txt'%(start,end) ouputGSVinfoFile = os.path.join(ouputTextFolder,ouputTextFile) # skip over those existing txt files if os.path.exists(ouputGSVinfoFile): continue time.sleep(1) with open(ouputGSVinfoFile, 'w') as panoInfoText: # process num feature each time for i in range(start, end): feature = layer.GetFeature(i) geom = feature.GetGeometryRef() # trasform the current projection of input shapefile to WGS84 #WGS84 is Earth centered, earth fixed terrestrial ref system geom.Transform(transform) lon = geom.GetX() lat = geom.GetY() # get the meta data of panoramas urlAddress = r'http://maps.google.com/cbk?output=xml&ll=%s,%s'%(lat,lon) time.sleep(0.05) # the output result of the meta data is a xml object metaDataxml = urllib.request.urlopen(urlAddress) metaData = metaDataxml.read() data = xmltodict.parse(metaData) # in case there is not panorama in the site, therefore, continue if data['panorama']==None: continue else: panoInfo = data['panorama']['data_properties'] panoDate, panoId, panoLat, panoLon = getPanoItems(panoInfo) if check_pano_month_in_greenmonth(panoDate, greenmonth) is False or year != "": panoLst = streetview.panoids(lon=lon, lat=lat) sorted_panoList = sort_pano_list_by_date(panoLst) panoDate, panoId, panoLat, panoLon = get_next_pano_in_greenmonth(sorted_panoList, greenmonth, year) print('The coordinate (%s,%s), panoId is: %s, panoDate is: %s'%(panoLon,panoLat,panoId, panoDate)) lineTxt = 'panoID: %s panoDate: %s longitude: %s latitude: %s\n'%(panoId, panoDate, panoLon, panoLat) panoInfoText.write(lineTxt) panoInfoText.close()
points = get_points_from_db("google_ams_whole_clipped_40", last_searched_id) with open(not_inserted_file, "r") as f: lines = f.read().splitlines() session, GSVTable = pois_storing_functions.setup_db( "gsv_ams_whole_clipped_40", "gsv_ams_whole_clipped_count", "gsv") # For each point --> search nearby in google for row_number, places_id, point_id, point_lat, point_lng in points: get_map_points_to_search.log_last_searched_point( logfile, row_number) found_flag = False print("POINT: ", row_number, places_id, point_id, point_lat, point_lng) panoids = streetview.panoids(lat=point_lat, lon=point_lng) if not panoids: with open(not_inserted_file, "a") as text_file: print( f"Not found for point \n row number: {row_number}, point_id: {point_id}, " f"place id: {places_id}", file=text_file) text_file.close() continue print(panoids) # For only last year's images # max_year_panoid = max(panoids, key=lambda x : x["year"] if "year" in x.keys() else 0) # print(max_year_panoid) # download_img(max_year_panoid, places_id, point_id, session, GSVTable, for img in panoids: if "year" in img:
def runOttawa(n_loc): """ Downloads Google StreetView images of random points in the shapefile, those points have weighted probabilities to be chosen in their attributes. :param n_loc: number of locations to download. CAUTION: some points have no images, so it's not the exact number of subdirectories created """ DIRECTORY = "trainottawa" # Opening the layer and fetching the weights, and the normalizing them ds = ogr.Open('ottawashp/ottawa_4326_clipped_weighted_points.shp') layer = ds.GetLayer() weights = np.array([feature.weight for feature in layer]) weights_sum = np.sum(weights) weights = weights / weights_sum indices = np.array(range(len(layer))) for i in range(n_loc): print('%.2f' %(i*100/n_loc) + " %") # We select randomly the index according to the weights index = int(choice(indices, p=weights)) feature = layer[index] # We fetch the coordinates lon = feature.GetGeometryRef().GetX() lat = feature.GetGeometryRef().GetY() # We fetch the bearing of the road bearing_road = feature.bearing # If there is non, we let the heading at its default value if bearing_road == None: heading = '' # Else, we take randomly a heading of +90 or -90 degrees, so the image faces a building else: heading = bearing_road + 90*np.sign(random.random() - 0.5) # Creating the folder name folder = DIRECTORY + '/new-%2.6f,%2.6f' %(lat,lon) oldfolder = DIRECTORY + '/%2.6f,%2.6f' %(lat,lon) # using the streetview module to fetch the panoids corresponding to the position panIds = streetview.panoids(lat, lon) if len(panIds) > 0: # We only proceed if the folder doesn't already exist if not os.path.exists(oldfolder): if not os.path.exists(folder): os.makedirs(folder) # We download each image for pan in panIds: img = streetview.api_download(pan["panoid"], heading, folder, API_KEY, fov=90, pitch=10, year=pan["year"]) if img != None: # We process and the GIST vector at different scales, and save them # as text files in the same folder full_size = misc.imread(img) resized = misc.imresize(full_size, (256,256)) desc = gist.extract(resized) np.savetxt(img + '256.txt', desc) resized = misc.imresize(full_size, (128,128)) desc = gist.extract(resized) np.savetxt(img + '128.txt', desc) resized = misc.imresize(full_size, (64,64)) desc = gist.extract(resized) np.savetxt(img + '64.txt', desc) resized = misc.imresize(full_size, (32,32)) desc = gist.extract(resized) np.savetxt(img + '32.txt', desc) # Processing and saving the dense sift vector sift_desc = dsift(misc.imresize(img, (64,64))) np.savetxt(img + '.nfo', sift_desc)
longs = [] ids = [] with open(wealthyDir, 'r') as csvfileReader: reader = csv.reader(csvfileReader, delimiter=',') for row in reader: ids.append(row[0]) ids.append(row[1]) #if row[3] not in lats: lats.append(row[3]) #if row[5] not in lats: lats.append(row[5]) #if row[4] not in longs: longs.append(row[4]) #if row[6] not in longs: longs.append(row[6]) reader.__next__() for i in range(len(lats)): panIds = streetview.panoids(lats[i], longs[i]) if len(panIds) > 0: pid = len(panIds) - 1 img = api_download2(panIds[pid]["panoid"], ids[i], 0, imgDir, API_KEY, fov=80, pitch=0, year=panIds[pid]["year"])
targetLon = None # load dataset csv which contains lat lon information. with open(datafilename) as csvDataFile: csvReader = csv.reader(csvDataFile) num_true_data = -1 num_count = 0 for row in csvReader: num_count += 1 targetLat = float(row[0]) targetLon = float(row[1]) ### step 1: Get near panoids. via roughly giving a gps coordinate [latitude, longitue], it returns near panoids. panoIdsNearTargetGPS = streetview.panoids(lat=targetLat, lon=targetLon) nPanoIdsNearTargetGPS = len(panoIdsNearTargetGPS) print(nPanoIdsNearTargetGPS, 'nearby panorama images are detected.') #for n in range(nPanoIdsNearTargetGPS): # print(panoIdsNearTargetGPS[n]) # Slect a image panoid which exactly corresponds to the target Lat and Lon coordinate that you've specified. checkpoint = 0 for n in range(nPanoIdsNearTargetGPS): if panoIdsNearTargetGPS[n]['lat'] == targetLat: num_true_data += 1 # Slect a image panoid which exactly corresponds to the target Lat and Lon coordinate that you've specified. print "Selected image: " print panoIdsNearTargetGPS[n]