def GeoCode(GeoCoder, strAddr): strBingMapKey = cfg.getConfigValue(r"Geocoder/BingKey") #strBingMapKey = 'AjlU0VglpeaGSVjfdrvFNEEZKSRWLtUYbDGGBbkVq1SsFK6Vz724WpqxqRi2m8SJ' try: if GeoCoder == 'google': g = geocoder.google(strAddr) return (g.lat, g.lng, g.address, GeoCoder, g.neighborhood, g.quality, g.accuracy, None) elif GeoCoder == 'bing': g = geocoder.bing(strAddr, key=strBingMapKey) return (g.lat, g.lng, g.address, GeoCoder, g.neighborhood, g.quality, g.accuracy, g.confidence) elif GeoCoder == 'census': cg = CensusGeocode() j = cg.onelineaddress(strAddr) try: return (j[0]['coordinates']['y'], j[0]['coordinates']['x'], j[0]['matchedAddress'], GeoCoder, None, None, None, None) except: return (None, None, None, GeoCoder, None, None, None, None) else: g = geocoder.yahoo(strAddr) return (g.lat, g.lng, g.json['address'], GeoCoder, g.neighborhood, g.quality, g.accuracy, None) except: print('error encountered when geocoding address: {0}'.format(strAddr)) traceback.print_exc() return (None, None, None, GeoCoder, None, None, None, None)
def uploaded_file(filename): df = pd.read_csv(os.path.join(app.config['UPLOAD_FOLDER'], filename)) saved_column = df.ADDRESS addresses = [] for i in saved_column: g = geocoder.yahoo(i) print(g) addresses.append([i, g]) return jsonify(addresses)
def getgeo_yahoo(ip_list): count=1 latlng_set=[] for ip in ip_list: geoinfo = geocoder.yahoo(ip) print("[yahoo] {} ({}/{}) country: {}".format(ip, count, len(ip_list), geoinfo.json.get('country'))) if geoinfo.latlng: latlng_set.append([ip, geoinfo.latlng[0], geoinfo.latlng[1]]) else: print("{} not found".format(ip)) latlng_set.append([ip, "","" ]) count = count + 1 #print(latlng_set) return latlng_set
def getgeo_yahoo(ip_list): count = 1 latlng_set = [] for ip in ip_list: geoinfo = geocoder.yahoo(ip) print("[yahoo] {} ({}/{}) country: {}".format( ip, count, len(ip_list), geoinfo.json.get('country'))) if geoinfo.latlng: latlng_set.append([ip, geoinfo.latlng[0], geoinfo.latlng[1]]) else: print("{} not found".format(ip)) latlng_set.append([ip, "", ""]) count = count + 1 #print(latlng_set) return latlng_set
def process_request(self, complete_location, geo_engine): ''' Description Gets the lattitude and longitud of a complete location, given an specific search engine ----------------------------- Parameters ----------------------------- complete_location : String locations complete adress (including city and country) geo_engine : String String that geocoder will use. Can be any of the following: - arcgis (free) - google (request limit) - bing (api-key) - Yahho ----------------------------- Return coordinates : numpy array Two dimensional array with the lattitude and longitud (in that order) ''' try: #Google if(geo_engine.lower() == 'google'): g = geocoder.google(complete_location) #Argis elif(geo_engine.lower() == 'arcgis'): g = geocoder.arcgis(complete_location) #Argis elif(geo_engine.lower() == 'yahoo'): g = geocoder.yahoo(complete_location) #bing elif(geo_engine.lower() == 'bing'): g = geocoder.bing(complete_location, key = 'NbUiTxM3ZsZof0mfRrts~iwCj0mVkMdct_nmYdghchg~AkdUKbDkypy_4zuKgSyuGcSxXw13Z2CNtjMMsYfjJOPZGZ1sEu1KpAp9xoW3h30a') #Error else: raise Exception('No support for engine: ' + geo_engine) #If nothing is located, the default location is returned if(len(g) == 0): return self.defult_location return(np.array(g.latlng)) except Exception as e: print(e) return self.defult_location
def test_geocoder(address, method_name='geocoder'): """ geocoder - can use various providers (Google, OSM, etc.) https://geocoder.readthedocs.org/ - can output GeoJSON """ #g_geocoder = geocoder.google(address) g_geocoder = geocoder.osm(address) if g_geocoder.latlng == []: g_geocoder = geocoder.google(address) if g_geocoder.latlng == []: g_geocoder = geocoder.arcgis(address) if g_geocoder.latlng == []: g_geocoder = geocoder.yahoo(address) (latitude, longitude) = g_geocoder.latlng print ('{0}\t{1} {2}'.format(method_name, latitude, longitude)) print(pprint(g_geocoder.geojson))
isfirst = True headers = [] thereader = csv.reader(csvinput, delimiter=',') for row in thereader: counter +=1 if isfirst and HAS_HEADERS: tempoutput.append(row + ["lat", "lon"]) headers = row isfirst = False continue dictedinput = dict(zip(headers, row)) #OSM was not accurate, switch to google g = geocoder.yahoo(dictedinput['Address'] + "," + dictedinput['City'] + "," + dictedinput['State'] + "," + dictedinput['Zip Code']) if DEBUG: print g print "looking at ", dictedinput['Address'] + "," + dictedinput['City'] + "," + dictedinput['State'] + "," + dictedinput['Zip Code'] if g.lat and g.lng: tempoutput.append(row + [g.lat, g.lng]) successdict['success'] += 1 if DEBUG: print g.lat, g.lng else: g = geocoder.google(dictedinput['Address'] + "," + dictedinput['City'] + "," + dictedinput['State'] + "," + dictedinput['Zip Code']) if g.lat and g.lng: tempoutput.append(row + [g.lat, g.lng]) successdict['success'] += 1 if DEBUG:
def test_yahoo(): g = geocoder.yahoo(location) assert g.ok assert g.city == city
def get_coords(text): """ Returns (latitude, longitude, elevation) triple geocoded text. """ g = geocoder.osm(text) lat = g.json['lat'] lon = g.json['lng'] print("%.4f, %.4f" % (lat, lon)) h = get_elevation(lat, lon) return (lat, lon, h) g = geocoder.arcgis('Lysa hora, okres Frydek') g.json g = geocoder.yahoo('Lysa hora, okres Frydek') g.json h = get_elevation(g.json['lat'], g.json['lng']) get_coords('Lysa hora, okres Frydek') # https://cgiarcsi.community/data/srtm-90m-digital-elevation-database-v4-1/ # http://www.viewfinderpanoramas.org/dem3.html graphhopper_api_key = "3aa82fd4-5229-4983-9ec3-27a49339cd4e" geocode_url = "https://graphhopper.com/api/1/geocode?q=berlin&locale=de&debug=true&key=%s" % graphhopper_api_key r = requests.get(geocode_url).json() geocode_url = "https://graphhopper.com/api/1/geocode?q=lysa hora&locale=cs&debug=true&key=%s" % graphhopper_api_key r = requests.get(geocode_url).json()
def crawl_geo_data(self, provider: str = 'arcgis') -> dict: """ Crawl continuous geo data based on categorical geo data :param provider: str Name of the provider to use: -> arcgis: ArcGis -> google: Google Maps :return: Dictionary containing the results of the geo-coding """ _geo: dict = {} _status: str = '' for i, loc in enumerate(self.location): #while _status.find('REQUEST_DENIED') >= 0 or _status == '': if provider == 'arcgis': _g: geocoder = geocoder.arcgis(location=loc, maxRows=1, method='geocode') elif provider == 'google': _g: geocoder = geocoder.google(location=loc, maxRows=1, method='geocode') elif provider == 'bing': _g: geocoder = geocoder.bing(location=loc, maxRows=1, method='geocode') elif provider == 'baidu': _g: geocoder = geocoder.baidu(location=loc, maxRows=1, method='geocode') elif provider == 'freegeoip': _g: geocoder = geocoder.freegeoip(location=loc, maxRows=1, method='geocode') elif provider == 'osm': _g: geocoder = geocoder.osm(location=loc, maxRows=1, method='geocode') elif provider == 'tomtom': _g: geocoder = geocoder.tomtom(location=loc, maxRows=1, method='geocode') elif provider == 'yahoo': _g: geocoder = geocoder.yahoo(location=loc, maxRows=1, method='geocode') else: raise HappyLearningUtilsException( 'Provider "{}" for geocoding not supported'.format( provider)) _status = _g.status if _status.find('OK') >= 0: _geo.update({loc: _g.json}) elif _status.find('ERROR') >= 0: _geo.update({loc: 'NaN'}) else: if _status.find('REQUEST_DENIED') < 0: raise HappyLearningUtilsException( 'Unknown request error "{}"'.format(_g.status)) if self.full_path is not None: DataExporter(obj=_geo, file_path=self.full_path).file() return _geo
try: arcpy.AddMessage( "Getting coordinates from external source...(" + str(i + 1) + "/" + str(len(street)) + ")") if geocodeMethod == "google": g = geocoder.google(street[i] + " " + house[i] + ", " + city[i]) if geocodeMethod == "osm": g = geocoder.osm(street[i] + " " + house[i] + ", " + city[i]) if geocodeMethod == "geoOttawa": g = geocoder.ottawa(street[i] + " " + house[i] + ", " + city[i]) if geocodeMethod == "yahoo": g = geocoder.yahoo(street[i] + " " + house[i] + ", " + city[i]) xCoords.append(g.x) yCoords.append(g.y) except: arcpy.AddMessage(street[i] + " " + house[i] + ", " + city[i] + " not found") # add coordinates to .csv-file with open(os.path.dirname(inTable) + '/temp_misAddPos.csv', 'wb') as csvfile: spamwriter = csv.writer(csvfile, delimiter=';', quotechar='|', quoting=csv.QUOTE_MINIMAL)
isfirst = True headers = [] thereader = csv.reader(csvinput, delimiter=',') for row in thereader: counter += 1 if isfirst and HAS_HEADERS: tempoutput.append(row + ["lat", "lon"]) headers = row isfirst = False continue dictedinput = dict(zip(headers, row)) #OSM was not accurate, switch to google g = geocoder.yahoo(dictedinput['Address'] + "," + dictedinput['City'] + "," + dictedinput['State'] + "," + dictedinput['Zip Code']) if DEBUG: print g print "looking at ", dictedinput['Address'] + "," + dictedinput[ 'City'] + "," + dictedinput['State'] + "," + dictedinput[ 'Zip Code'] if g.lat and g.lng: tempoutput.append(row + [g.lat, g.lng]) successdict['success'] += 1 if DEBUG: print g.lat, g.lng else: g = geocoder.google(dictedinput['Address'] + "," + dictedinput['City'] + "," +
def getLocation(place): return geocoder.yahoo(place)
def combine_score_info(mn_hospital, hosp_refiend_score_file, combined_file, interplate=False, parse_location=False): """Combine score location and score information in MN""" score = pd.read_csv(hosp_refiend_score_file, sep='\t') hosp = pd.read_csv(mn_hospital, sep='\t', index_col=1) #print(score) combine = [] for index, row in score.iterrows(): name = row['name'] search_name = name.upper() #print(name) if search_name in hosp.index: info = {} info['name'] = name info['city'] = row['city'] info['county'] = hosp.ix[search_name, 'County Name'] try: loc_info = hosp.ix[search_name, 'Location'].split(',') except: #print(hosp.ix[search_name, 'Location']) print('duplicate records:', name) continue address = loc_info[0].replace(row['city'].upper(), '').strip() info['address'] = address try: lat = loc_info[1].split('(')[1] long = loc_info[2].strip(' )') except: # lat and long code are not provided... if parse_location: print (name, address) place = address + ', ' + row['city'] + ', MN' ## if quote == 10: ## print("needs to wait for a minute") ## time.sleep(60) # ## quote = 0 ## quote += 1 location = geocoder.google(place) parsed_address = location.address if not parsed_address: print('query yahoo...') location = geocoder.yahoo(place) parsed_address = location.address else: print('query google') if not parsed_address: print(name, address) exit(1) lat = location.lat long = location.lng else: lat = '' long = '' #print('no lat/long:', name) info['lat'] = lat info['lng'] = long info['zipcode'] = hosp.ix[search_name, 'ZIP Code'] info['type'] = hosp.ix[search_name, 'Hospital Type'] info['overall_rating'] = row['overall_rating'] info['patient_rating'] = row['patient_rating'] info['percentage_of_10'] = row['percentage_of_10'] combine.append(info) elif name == 'Mayo Clinic Methodist Hospital': info = {} info['name'] = name info['city'] = 'Rochester' info['county'] = 'OLMSTED' info['address'] = '201 West Center Street'.upper() info['lat'] = 44.0234914 info['lng'] = -92.465934 info['zipcode'] = 55902 info['type'] = 'Acute Care Hospitals' info['overall_rating'] = row['overall_rating'] info['patient_rating'] = row['patient_rating'] info['percentage_of_10'] = row['percentage_of_10'] combine.append(info) else: # cannot find hospital information. print ("not found:", name) continue combine = pd.DataFrame(combine) if interplate: score_group = combine.groupby('overall_rating') score_mean = score_group.mean() print(score_mean) for index, row in combine.iterrows(): score = row['overall_rating'] patient_rating = row['patient_rating'] percent = row['percentage_of_10'] if math.isnan(patient_rating): patient_rating = score_mean.ix[score, 'patient_rating'] if math.isnan(patient_rating): print(row['name']) combine.ix[index, 'patient_rating'] = 4.0 else: combine.ix[index, 'patient_rating'] = patient_rating if math.isnan(percent): percent = score_mean.ix[score, 'percentage_of_10'] if math.isnan(percent): print(row['name']) combine.ix[index, 'percentage_of_10'] = 0.7 else: combine.ix[index, 'percentage_of_10'] = percent combine['patient_rating'] = combine['patient_rating'].map(lambda x:'%.0f' % x) combine.to_csv(combined_file, sep='\t', index=False, float_format='%.2f')