def getLatLon(codigo_pais, latitud, longitud, localizacion): result = "" # From twitter if latitud != "" and longitud !="": # OK result = [latitud, longitud, "T"] else: # Nominatim request geolocator = Nominatim() if(localizacion != ""): # Nominatim latlong_loc = geolocator.geocode(localizacion, timeout=60) if (latlong_loc != None) and (latlong_loc != ""): result = [latlong_loc.latitude, latlong_loc.longitude ,"L"] else: # Nominatim latlong_cod = geolocator.geocode(codigo_pais, timeout=60) if (latlong_cod != None) and (latlong_cod != ""): result = [latlong_cod.latitude, latlong_cod.longitude ,"LM"] elif(codigo_pais != ""): # Nominatim latlong_cod = geolocator.geocode(codigo_pais, timeout=60) if (latlong_cod != None) and (latlong_cod != ""): result = [latlong_cod.latitude, latlong_cod.longitude ,"C"] return result
def api_v1_canvas(request): if request.method == 'POST': try: geolocator = Nominatim() location = None access_token = request.POST.get('access_token') # print "access_token:" + access_token text_location = request.POST.get('text_location') gps_location = request.POST.get('gps_location').decode('utf8') if text_location: geo = geolocator.geocode(text_location) if hasattr(geo, 'latitude') and hasattr(geo, 'longitude'): # print geo.latitude # print geo.longitude location = str(geo.latitude) + "," + str(geo.longitude) else: pass else: location = str(gps_location) # print "location: " + location pass filename = parse_places_api(location, access_token) except Exception,e: print e return HttpResponse(json.dumps({'success':True,'filename':filename}), content_type="application/javascript; charset=utf-8")
def set_location(set_type=None): user = User.query.filter_by(id=g.user.id).first() if request.method == 'POST': input_location = request.form['address'] geolocator = Nominatim() try: location = geolocator.geocode(input_location) user.loc_input = input_location user.loc_latitude = location.latitude user.loc_longitude = location.longitude db.session.commit() except: location = None return render_template( 'set_location.html', location=location, type=set_type ) if set_type: loc_input = user.loc_input else: loc_input = None return render_template( 'set_location.html', type=set_type, current_loc=loc_input )
def get_ll(postal_code): if postal_code == None: return None geolocator = Nominatim() location = geolocator.geocode(postal_code) # it seems that cannot write abbreviation here if location == None: return None al = str(location.latitude) + ', ' + str(location.longitude) return al
def tags(): url = request.args["url"] clarifai_api = ClarifaiApi() # assumes environment variables are set. result = clarifai_api.tag_image_urls(url) results = result['results'][0]["result"]["tag"]["classes"]#[:3] client = foursquare.Foursquare(client_id='JEK02X44TGMNJSE0VC1UBEB4FRNNW3UMFQ4IQOPI4BAR2GXA', \ client_secret='A2Z50VTUHHXEUYJBHCQKB1LXTNVVBYBQR4SDASVTXTWUMWXS') #foursquare shit address = request.args["address"] #address is currently a string geolocator = Nominatim() location = geolocator.geocode(address) newLocation = str(location.latitude) + str(", ") + str(location.longitude) # foursquare_dictionary = client.venues.explore(params={'ll': '40.7, -74', 'v': '20160402', 'query': results[0] + ',' + results[1] + ',' + results[2]}) foursquare_dictionary = client.venues.explore(params={'ll': newLocation, 'v': '20160402', 'query': results[0] + ',' + results[1] + ',' + results[2]}) #first place to eat # status1 = foursquare_dictionary['groups'][0]['items'][0]['venue']['hours']['status'] address1 = foursquare_dictionary['groups'][0]['items'][0]['venue']['location']['formattedAddress'] name = foursquare_dictionary['groups'][0]['items'][0]['venue']['name'] #second place to eat address2 = foursquare_dictionary['groups'][0]['items'][1]['venue']['location']['formattedAddress'] # status2 = foursquare_dictionary['groups'][0]['items'][1]['venue']['hours']['status'] name1 = foursquare_dictionary['groups'][0]['items'][1]['venue']['name'] return render_template('tags.html',\ newAddress1 = newAddress(address1), name = name,\ newAddress2 = newAddress(address2), name1 = name1,\ newLocation = newLocation)
def find_coordinates(self): """Get and set longitude and Latitude Scrape individual posting page, if no coordinates are found, cascade precision (try location, try user_location, or set to zero). Returns an array, first latitude and then longitude. """ self.coordinates = [] geolocator = Nominatim() follow_this = self.url follow_page = requests.get(follow_this) follow_soup = BeautifulSoup(follow_page.text, "html.parser") location = follow_soup.find("div", class_="viewposting") if location is not None: # Get from Page lat = location['data-latitude'] lon = location['data-longitude'] else: try: # Get from posted location lat = geolocator.geocode(self.location).latitude lon = geolocator.geocode(self.location).longitude except: try: # Get from user locatoin lat = geolocator.geocode(self.user_location).latitude lon = geolocator.geocode(self.user_location).longitude except: lat = 0 #38.9047 # This is DC lon = 0 #-77.0164 self.coordinates.append(lat) self.coordinates.append(lon)
def set_description(sender, instance, created=False, **kwargs): if created: try: geolocator = Nominatim() lat = instance.location.latitude lon = instance.location.longitude print('.....{}, {}........'.format(lat, lon)) loc = geolocator.reverse([lat, lon]) address = loc.address print(address) instance.description = address instance.save() except: try: geolocator = Nominatim() lat = instance.location.latitude lon = instance.location.longitude print('..........{}, {}........'.format(lat, lon)) loc = geolocator.reverse([lat, lon]) address = loc.address print(address) instance.description = address instance.save() except: print('......didnt work.......') instance.description = 'Location created on {}'.format(instance.date_added) instance.save() pass
def get(self, height, weight, city, state): # combined = pd.read_csv("wonderfullyamazingdata.csv", encoding='ISO-8859-1') combined = pd.read_csv("newamazingdata.csv", encoding='ISO-8859-1') location = str(city) + ' ' + str(state) geolocator = Nominatim() place = geolocator.geocode(location[0]) latitude = place.latitude longitude = place.longitude users = [float(height), float(weight), latitude, longitude ] players = combined[["height", "weight", "latitude", "longitude"]] result = [] for index in range(0,len(players)): result.append(1-distance.cosine(users, players.iloc[index])) result = sorted(range(len(result)), key=lambda i: result[i])[-5:] result.reverse() ids = [] for index in result: ids.append( combined.ID.iloc[index] ) ids = str(ids) with open('reply.json', 'w') as outfile: json_stuff = json.dumps(ids) json.dump(json_stuff, outfile) return json_stuff
def geo_tweets(): geolocator = Nominatim() count =1 tweets_list = db1.view('sentiment-analysis/get_tweet_ids') for doc in db.view('sentiment-analysis/get_alltweets'): tw=doc.value if (not_exist(tw['id'])== "true"): loc_point= tw['geo'] #print loc_point try: location = geolocator.geocode(loc_point['coordinates']) #print type(location) try: print count ,location.address modified_place={'geo_address': location.address} place_update = {'place-mod': modified_place} new_tw = tw.copy() new_tw.update(place_update) new_tw.update({'_id': tw['id_str']}) try: db1.save(new_tw) except: print ("Tweet " + tw['id_str'] + " already exists !!!! " ) except: print ("Returned Location is Empty !!!! " ) except GeocoderTimedOut as e: print("Error: geocode faied on input %s with message ") print loc_point['coordinates'] else: print count, 'ALREADY EXIST!!' count =count+1
def getTrackCountryOfOrigin(billboard_df_final): geolocator = Nominatim() track_state_of_origin = [] track_country_of_origin = [] for index_artist, row in billboard_df_final.iterrows(): if (not pd.isnull(row['latitude'])) & (not pd.isnull(row['longitude'])): try: location = geolocator.reverse(str(row['latitude']) +',' + str(row['longitude']), language='en') state = location.raw['address']['state'] if state == "Puerto Rico": country = "Puerto Rico" else: country = location.raw['address']['country'] if country == "The Netherlands": country = "Netherlands" except: print row["Artist(s)"] country = "" state = "" else: country = "" state = "" track_country_of_origin.append(country) if country == "United States of America": track_state_of_origin.append(state) else: track_state_of_origin.append("") return [track_country_of_origin, track_state_of_origin]
def make_LatLonDict(tweet_lat_lon_dict, tweet_location_unique): geolocator = Nominatim() # Turn the user defined location place name into a geographic (lat,lon) location for i, placeName in enumerate(tweet_location_unique): if placeName not in tweet_lat_lon_dict: try: placeGeo = geolocator.geocode(placeName) except Exception as E: print("exception happened", type(E), E) if i % 20 == 0: print(i) if placeGeo is not None: tweet_lat_lon_dict[placeName] = (placeGeo.latitude, placeGeo.longitude) else: tweet_lat_lon_dict[placeName] = (None, None) # How many location place names were not able to be turned into (lat,lon) locations? # Which names were they? print("The total (lat,lon) dictionary is %d items long." %(len(tweet_lat_lon_dict))) bad_place_names = [k for k,v in tweet_lat_lon_dict.items() if v == (None,None)] print("Of these, %d were unable to be converted into (lat,lon) coordinates. These were specifically the following locations: \n" %(len(bad_place_names))) print("\n".join(bad_place_names)) # Write out the dictionary of place names and corresponding latitutes & longitudes to a JSON file with open('../data/Twitter_Zika_PlaceName_Geo_info.json', 'w') as twitterGeo_JSONFile: json.dump(tweet_lat_lon_dict, twitterGeo_JSONFile, indent=2)
def login(): login_name = request.json['email'] service = request.json['service'] password = request.json['password'] location = request.json['location'] geolocator = Nominatim() location = geolocator.geocode(location) if not location: location = geolocator.geocode("Düsseldorf, Germany") latitude = location.latitude longitude = location.longitude try: logged_in = pokeapi.login(service, login_name, password, lat=latitude, lng=longitude, app_simulation=True) except AuthException as e: return jsonify({'status': 'error', 'message': e.__str__()}) time.sleep(1) if not logged_in: return jsonify({'status': 'error', 'message': 'Failed to login. If the Pokemon GO Servers are online, your credentials may be wrong.'}) else: return jsonify({'status': 'ok'})
def current_temp_will(self, message=None, location=None): """what's the temperature (in _______): I know how to tell you the temperature!""" geolocator = Nominatim() if location is None: forecast = forecastio.load_forecast(FORECAST_API_KEY, LINCOLN_LAT, LINCOLN_LNG, units="us") currently = forecast.currently() temp = currently.temperature feels_like = currently.apparentTemperature combined = "It's currently %sF and feels like %sF here in Lincoln, NE" % (temp, feels_like) self.save("local_temp", combined) if message: return self.say(combined, message=message) else: geolocation = geolocator.geocode(location) lat = geolocation.latitude lng = geolocation.longitude forecast = forecastio.load_forecast(FORECAST_API_KEY, lat, lng, units="us") currently = forecast.currently() temp = currently.temperature feels_like = currently.apparentTemperature full_address = geolocation.address combined = "It's currently %s degrees and feels like %s degrees in %s" % (temp, feels_like, full_address) self.save(full_address + "_temp", combined) if message: self.say(combined, message=message)
def convertgeo(city): geolocator = Nominatim() location = geolocator.geocode(city) if location: return location.latitude, location.longitude else: return None
def geocode_w(txt): time.sleep(16+scipy.randn(1)) geoloc = Nominatim() print(txt) cded = geoloc.geocode(txt) geoloc = 0 return cded
def handle(self, *args, **options): geolocator = Nominatim() for i in filter(lambda x: x.location.strip(), Event.objects.filter(location__isnull=False)): if LocationCache.objects.filter(string=i.location).exists(): location = LocationCache.objects.get(string=i.location) else: time.sleep(5) try: location = geolocator.geocode(i.location) except GeocoderTimedOut: location = None if location is None and re.search("\(.*\)", i.location): time.sleep(5) try: location = geolocator.geocode(re.search("(\(.+\))", i.location).group()[1:-1]) except GeocoderTimedOut: location = None location = LocationCache.objects.create( string=i.location, lat=location.latitude if location else None, lon=location.longitude if location else None, ) if (location.lat, location.lon) == (None, None): continue i.lon = location.lon i.lat = location.lat i.save()
def parse(self,text,place="NYC"): dict_addr,addr_type = self.preprocess(text) google_key = pickle.load(open("google_api_key.pickle","r")) g_coder = GoogleV3(google_key) if addr_type == 'complete': combined_addr = [] keys = ["AddressNumber","StreetName","StreetNamePostType","PlaceName","StateName","ZipCode"] for key in keys: try: combined_addr += [dict_addr[key]] except KeyError: continue addr = " ".join(combined_addr) n_coder = Nominatim() addr = self.pre_formatter(addr,dict_addr) lat_long = n_coder.geocode(addr) if lat_long: #means the request succeeded return lat_long else: lat_long = g_coder.geocode(addr) return lat_long #If None, means no address was recovered. if addr_type == 'cross streets': #handle case where dict_addr is more than 2 nouns long cross_addr = " and ".join(dict_addr) + place try: lat_long = g_coder.geocode(cross_addr) return lat_long except geopy.geocoders.googlev3.GeocoderQueryError: return None
def address_2_lonlat_hack_dev(x): """ in case frequent request would make script get block here is a mini hack : make script sleep until the API is able to response then do the run again """ print (x) time.sleep(1) # let's see if sleep 1 per epoch is OK for limitation # --------- API 1) GOOGLE MAP API results= get_google_results(x) print ('google API results :' , results) if [results['latitude'], results['longitude']] != [None,None]: return [results['latitude'], results['longitude']] # --------- API 2) GEOPY API else: try: geolocator = Nominatim() location = geolocator.geocode(x, timeout=3) print(location.latitude, location.longitude) return [location.latitude, location.longitude] except Exception as e: print (e) if str(e) == '[Errno 61] Connection refused': print ('meet API request limit, try again...') print ('sleep 1 min ...') time.sleep(60) address_2_lonlat_hack_dev(x) else: print ('fail to convert address to lon & lat ') return [None,None]
def geolocator(): table_string = "" listAddress=[] geolocator = Nominatim() locationsList=['Banke','Makwanpur','Lalitpur'] for i in locationsList: table_string += "<tr>" + \ "<td>" + \ "".join(i) + \ "</td>" + \ "</tr>\n" sys.stdout.write( table_string ) location = geolocator.geocode(i) #here we pass the values of latitude and longitudes in a javascript function "<html><td>" "<tr>" "< a href="" onclick=javascript:loadMap({{location.latitude , location.longitude}});>{{location}}</a>" "</tr>" "</td>" "</html>" a=print(location.latitude, location.longitude) listAddress.append(a) return listAddress
def get_gps(address): geolocator = Nominatim() try: location = geolocator.geocode(address, timeout=5) except: return None return location
def get_adr(gps): geolocator = Nominatim() try: location = geolocator.reverse(gps, timeout=5) except: return None return location.address
def CalculateDistance(place,city_list,Path_to_Take,Distance,length): geolocator = Nominatim(); main_location = geolocator.geocode(place, exactly_one=True, timeout=None); main_coordinates = (main_location.latitude,main_location.longitude); list_toSort = []; for x in city_list: place_location = geolocator.geocode(x, exactly_one=True, timeout=None); place_coordinates = (place_location.latitude,place_location.longitude); print place_coordinates; result = vincenty(main_coordinates,place_coordinates).miles print result; list_toSort.append({'Name':x,'Distance':result}); newlist = sorted(list_toSort, key=lambda k: k['Distance']) Path_to_Take.append(newlist[1]); Distance += newlist[1]['Distance'] city_list.remove(city_list[0]); if len(Path_to_Take) < length: CalculateDistance(newlist[1]['Name'],city_list,Path_to_Take,Distance,length); if len(Path_to_Take) == length: main_location = geolocator.geocode(Path_to_Take[0]['Name'], exactly_one=True, timeout=None); main_coordinates = (main_location.latitude,main_location.longitude); place_location = geolocator.geocode(Path_to_Take[length-1]['Name'], exactly_one=True, timeout=None); place_coordinates = (place_location.latitude,place_location.longitude); result = vincenty(main_coordinates,place_coordinates).miles Path_to_Take.append({'Name':Path_to_Take[0]['Name'],'Distance':result});
def get_nearest(user_location, address_list, maps_api_key=None): ''' This function returns the nearest address to the user location. It compares the user location with an existing list of addresses. Args: user_location (string): can be either the zip code or address. maps_api_key (string): optional google maps api key. address_list (tuple): list of addresses Returns: string: the nearest address to the user_location or false if the address list is not a tuple ''' if isinstance(address_list, tuple): if len(address_list) > 1: if maps_api_key: geolocator = GoogleV3(maps_api_key, timeout=10) else: geolocator = Nominatim(timeout=10) user_location = geolocator.geocode(user_location) user_latlon = (user_location.latitude, user_location.longitude) geo_locations = [geolocator.geocode(address) for address in address_list] distance_dict = {vincenty((address.latitude, address.longitude), user_latlon).miles: address for address in geo_locations if address is not None} min_distance = min(distance_dict.items(), key=lambda k: k[0]) return min_distance[1].address raise Exception('Tuple must be contain more than one address') raise TypeError('Second parameter must be a tuple')
def getGeo(self, lon, lat): if self.geocoderCache: try: nodeObj = self.db.get(GeoAssign,{'lat' : lat, 'lon' : lon}) nodeObj['cached'] = True return nodeObj except GeoAssign.DoesNotExist: pass if self.printStatus: print('lon: '+str(lon)+', lat: '+str(lat)+' not in cache - start lookup at Nominatim-API') geolocator = Nominatim() location = geolocator.reverse([lat, lon], timeout=20) if 'address' in location.raw: location = location.raw['address'] nodeObj = GeoAssign({ 'lat' : lat, 'lon' : lon, 'payload' : location }) self.db.save(nodeObj) self.db.commit() nodeObj['cached'] = False return nodeObj else: # got no results (i.e. coordinates are incorrect) return None
def getTweets(self): auth = tweepy.OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(self.access_token, self.access_token_secret) api = tweepy.API(auth) geolocator = Nominatim() location = geolocator.geocode(self.zip_code) geo_code = str(location.latitude) + "," + str(location.longitude) + "," + self.radius + "mi" public_local_tweets = api.search(lang = "en",geocode=geo_code, count=100) print public_local_tweets count = len(public_local_tweets) twitter_data = "" twitter_time = "" for each in range(0, count-1): twitter_data += public_local_tweets[each].text + " , " + "\n" self.status = True twitter_data_file = open("twitter_data.txt", "r+") twitter_data = twitter_data.encode('utf-8').strip() twitter_data_file.write(twitter_data) return self.status
def get_address(location): geolocator = Nominatim() latitude, longitude = location address = "" if latitude is not None: address = geolocator.reverse(latitude, longitude).address return address
def claim_map(request, claim_id): claim = Claims.objects.get(pk=claim_id) address = claim.address try: geolocator = Nominatim() location = geolocator.geocode(address.encode('utf-8') + ", Умань, Україна") lat = location.latitude str_lat = str(lat) min_lat = str(lat - 0.0008) max_lat = str(lat + 0.0008) lon = location.longitude str_lon = str(lon) min_lon = str(lon - 0.00117) max_lon = str(lon + 0.00117) except: results = Geocoder.geocode(address.encode('utf-8') + ", Умань, Україна") lat = results[0].coordinates[0] str_lat = str(lat) min_lat = str(lat - 0.0008) max_lat = str(lat + 0.0008) lon = results[0].coordinates[1] str_lon = str(lon) min_lon = str(lon - 0.00117) max_lon = str(lon + 0.00117) return render(request, 'maps.html', locals())
def example1(): '''Simple Geocoding ''' from geopy.geocoders import Nominatim # Nominatim 替换成你要用到的引擎 engine = Nominatim() loc = engine.geocode('1400 S Joyce St Arlington, VA 22202') print loc.address
def getCoordinatesForAllData(sourceUrl,patternUrl): data = EventsPortal.getAllEventsData(sourceUrl,patternUrl) for docs in data: latitudeField = docs['latitude'] if latitudeField == None: geocity = docs['locationCity'] geolocator = Nominatim() locatorCity = geocity location = geolocator.geocode(locatorCity) latitude = location.latitude longitude = location.longitude docs['latitudetest'] = latitude docs['longitudetest'] = longitude else: pass return data
def getCoordinates(l): geolocator = Nominatim() location = geolocator.geocode(l, timeout=20) coord = "{latitude: %s , longitude: %s }" %(location.latitude,location.longitude) print coord return coord
import os import glob import folium import gpxpy import numpy as np import pandas as pd from geopy.geocoders import Nominatim geolocator = Nominatim() location = geolocator.geocode( "Montreal Quebec") # Change this to change location centering lat_check = float(location.raw['lat']) lon_check = float(location.raw['lon']) data = glob.glob('*.gpx') fitdata = glob.glob('*.fit') if not len(fitdata) == 0: print('Converting Garmin FIT files') os.system('python fit_to_csv.py') os.system('mkdir fit_files') os.system('mv *.fit ./fit_files') csvdata = glob.glob('*.csv') lat = [] lon = [] all_lat = []
def handle_find_venue_by_location_message(self, conversation_response): """ The handler for the findVenueByLocation action defined in the Watson Conversation dialog. Queries Foursquare for doctors based on the speciality identified by Watson Conversation and the location entered by the user. Parameters ---------- conversation_response - The response from Watson Conversation """ self.calltraceNo = self.calltraceNo + 1 print str(self.calltraceNo) + ' CALLNO handle_find_venue_by_location_message' if self.foursquare_client is None: return 'Please configure Foursquare.' # Get the eventType from the context to be used in the query to Foursquare query = '' if 'eventType' in conversation_response['context'].keys() and conversation_response['context']['eventType'] is not None: query = query + conversation_response['context']['eventType'] + ' ' query = query # + 'Doctor' # Get the location entered by the user to be used in the query location = '' if 'entities' in conversation_response.keys(): for entity in conversation_response['entities']: if (entity['entity'] == 'sys-location'): if len(location) > 0: location = location + ' ' location = location + entity['value'] # full address lookup is not supported by Foursquare #location = conversation_response['input']['text'] #address="500 East 4th St Austin, Texas" address=conversation_response['input']['text'] geolocator = Nominatim() locationll = geolocator.geocode(address) latitude = locationll.latitude longitude = locationll.longitude print ' latitude ' + str(latitude) + ' longitude ' + str(longitude) print locationll local = str(latitude) +',' + str(longitude) params = { 'query': query, #'near': location, 'll' : local, #'radius' : 5000 'radius': self.travelDistance } venues = self.foursquare_client.venues.search(params=params) if venues is None or 'venues' not in venues.keys() or len(venues['venues']) == 0: reply = 'Sorry, I couldn\'t find any venues near you.' else: for venue in venues['venues']: reply = 'How about '+ venue['name'] +' ? \n'; break """ reply = 'Here is what I found:\n'; count = 0 for venue in venues['venues']: count = count + 1 if len(reply) > 0: reply = reply + '\n' reply = reply + str(count) + ') \t ' + venue['name'] + ' --- has tips ' + str(venue['stats']['tipCount']) reply = reply + '\n What venue number would you like to go ?' """ return reply
from geopy.geocoders import Nominatim geolocator = Nominatim(user_agent='MMCA1629') place = input("Place: ") gn = geolocator.geocode(place) lan = gn.latitude lon = gn.longitude loc = str(lan) + "," + str(lon) print(loc + "\n++++++++++++++++++\n") print(gn.address + "\n++++++++++++++++++\n") print(gn.raw)
from datetime import datetime import dash_table as dt import io import requests from plotly import graph_objs as go import plotly.plotly as py from plotly.graph_objs import * from uszipcode import Zipcode from uszipcode import SearchEngine from geopy.geocoders import Nominatim #global ranked_data #global map_data, ranked_data geolocator = Nominatim(user_agent="Movin\'OnUp") #"specify_your_app_name_here" #data from US CENSUS search = SearchEngine(simple_zipcode=True) app = dash.Dash(__name__) #__name__, external_stylesheets=external_stylesheets app.title = 'Movin\'OnUP!' server = app.server server.secret_key = os.environ.get("SECRET_KEY", "secret") # API keys and datasets mapbox_access_token = 'pk.eyJ1IjoiZHluZGwiLCJhIjoiY2p4M2gyYm9wMDBzbDRhbmxzYWMya2tvZCJ9.xWu9JsGNMrFmk6yiydXlqw' # loading data # path = '/Users/dmlee/Desktop/Insight_DS/data/'
7, 'My City', 'My State', 'My Country', ' My Name', 5, 'My Continent' ] df_ch = df_ch_temp.T # to close the pandas tutorial we will do an interesting example # The goal is to convert each address in the dafaframe to its corresponding geographical # location, in latitude and longitude # This process is called GeoCoding # We will add the Latitude and Longitude columns # but we will need a external package called geopy import geopy dir(geopy) from geopy.geocoders import Nominatim nom = Nominatim() location = nom.geocode( 'Rua Sebastião Antônio Carlos, 260, Belo Horizonte, Minas Gerais, Brasil') location # 0.o, uau!! haha # sometimes iit miight pass a None object, because it was not able to find the specified address print(location.latitude, location.longitude) df = pd.read_csv('supermarkets.csv') # update the address column to have the string addres representation accepted by the geocoder df['Address'] = df['Address'] + ', ' + df['City'] + ', ' + df[ 'State'] + ', ' + df['Country'] # pandas makes it easy to apply functions and methods to the dataframe, without havint to # iterate through columns and rows
from geopy.geocoders import Nominatim geolocator = Nominatim() location = geolocator.geocode("175 5th Avenue NYC") print(location.address) geolocator = Nominatim() location = geolocator.reverse("25.12786, 121.7327") print(location.address) location = geolocator.reverse("25.12786, 121.7680") print(location.address)
param = raw_input("Enter a domain name: ") propagation.do_call(param) elif choice == "24": param = raw_input("Please enter a url string: ") print urllib.unquote_plus(param) elif choice == "25": param = raw_input("Please enter an IP address: ") erg = shell("curl -u \"141447:Udkm6zdeXYgJ\" -X GET https://geoip.maxmind.com/geoip/v2.1/country/%s?pretty" % param) for i in erg.output(): print "\033[0;249;0m" + i + "\033[0;249;0m" elif choice == "26": georef = [] param = raw_input("Please enter an IP address: ") erg = shell("curl -u \"141447:Udkm6zdeXYgJ\" -X GET https://geoip.maxmind.com/geoip/v2.1/city/%s?pretty" % param) for i in erg.output(): print i if "latitude" in i: georef.append(i.lstrip()[12:-2]) if "longitude" in i: georef.append(i.lstrip()[13:-2]) geolocator = Nominatim(user_agent="specify_your_app_name_here") location = geolocator.reverse(georef) print "\n" print "\033[0;249;0mLast known address to which IP address is assigned: \033[0;249;0m" print "\n" print "\033[0;249;0m" + (location.address) + "\033[0;249;0m"
def on_status(self, status): url = "https://gender-api.com/get?name=" key = "&key=946d64a0022933dec936848d4e68fb30c588ddd8e1d19292603b709ce0be2539" geopy.geocoders.options.default_timeout = None geolocator = Nominatim(user_agent="trek_smcc") if 'stolen' not in status.text and 'star' not in status.text and 'Star' not in status.text and 'Stolen' not in status.text: try: tw_created_at = status.created_at tw_year = tw_created_at.strftime("%Y") tw_month = tw_created_at.strftime("%m") tw_day = tw_created_at.strftime("%d") except: tw_created_at = datetime.now() tw_year = tw_created_at.strftime("%Y") tw_month = tw_created_at.strftime("%m") tw_day = tw_created_at.strftime("%d") tw_id = status.id_str try: tw_text = status.extended_tweet['full_text'] except: tw_text = status.text if hasattr(status, 'retweeted_status'): try: tw_text = status.retweeted_status.full_text except: tw_text = status.retweeted_status.text if (tw_text.split()[0] == 'RT'): tw_text = ' '.join(tw_text.split()[1:]) wlist = tw_text.split() for word in wlist: if word[0] == '@': wlist.remove(word) tw_text = ' '.join(wlist) for word in wlist: if word.split(':')[0] == 'https': wlist.remove(word) tw_text = ' '.join(wlist) tw_user = status.user.id_str tweet_user_location = status.user.location try: if tweet.tweet_user_location != None: location = geolocator.geocode(tweet_user_location) if location != None: tw_longitude = location.longitude tw_latitude = location.latitude except: tw_longitude = None tw_latitude = None tw_place = None #status.place.__dict__ tw_retweet = status.retweeted try: tw_media = status.entities.media.__dict__ except: tw_media = None try: tw_hashtags = status.entities.hashtags.__dict__ except: tw_hashtags = None try: tw_psensitive = status.possibly_sensitive except: tw_psensitive = 0 tw_score = af.score(tw_text) print(tw_score) tweet_user_user_name = status.user.name first_name = tweet_user_user_name.split()[0] URL = url + str(first_name) + key response = requests.get(URL) response = response.json() tw_user_gender = response["gender"] print(tw_user_gender) print(first_name) user_id = status.user.id user_screen_name = status.user.screen_name user_name = status.user.name user_location = status.user.location user_description = status.user.description if not Tweet.objects.filter( tweet_created_at=tw_created_at, tweet_id=tw_id, tweet_text=tw_text, tweet_user=tw_user, tweet_longitude=tw_longitude, tweet_latitude=tw_latitude, tweet_place=tw_place, tweet_retweeted_status=tw_retweet, tweet_media=tw_media, tweet_hashtags=tw_hashtags, tweet_possibly_sensitive=tw_psensitive, tweet_score=tw_score, tweet_user_user_name=tweet_user_user_name, tweet_user_location=tweet_user_location, tweet_year=tw_year, tweet_month=tw_month, tweet_day=tw_day): t = Tweet(tweet_created_at=tw_created_at, tweet_id=tw_id, tweet_text=tw_text, tweet_user=tw_user, tweet_longitude=tw_longitude, tweet_latitude=tw_latitude, tweet_place=tw_place, tweet_retweeted_status=tw_retweet, tweet_media=tw_media, tweet_hashtags=tw_hashtags, tweet_possibly_sensitive=tw_psensitive, tweet_score=tw_score, tweet_user_user_name=tweet_user_user_name, tweet_user_location=tweet_user_location, tweet_year=tw_year, tweet_month=tw_month, tweet_day=tw_day) print(t) t.save() if not Twitter_User.objects.filter( t_id=user_id, t_screen_name=user_screen_name, t_user_name=user_name, t_user_location=user_location, t_user_description=user_description, t_user_gender=tw_user_gender): u = Twitter_User(t_id=user_id, t_screen_name=user_screen_name, t_user_name=user_name, t_user_location=user_location, t_user_description=user_description, t_user_gender=tw_user_gender) print(u) u.save(u)
import requests #access APIs import folium #create map import pandas as pd #data manipulation import matplotlib.pyplot as plt #scatterplots from math import sin, cos, sqrt, asin, atan2, pow, acos #operations to convert spherical to cartesian import utm #utm module for lat/lon to x/y from geopy.geocoders import Nominatim #geolocator data from datetime import datetime #date for API requests df = pd.DataFrame(columns=['name', 'population', 'zip'], data=[]) df = df[['zip', 'population']] df = df.sort_values(by=['zip']).reset_index(drop=True) df = df.astype(int) address = 'Cleveland, OH' geolocator = Nominatim(user_agent="cle_bars") location = geolocator.geocode(address) latitude = location.latitude longitude = location.longitude map_cle = folium.Map(location=[latitude, longitude], zoom_start=12) df['lat'] = [] df['lon'] = [] url = 'https://api.foursquare.com/v2/venues/explore' params = dict( client_id='DYLLTWFBCB0RPXB3RKFNAWYZGXGJMGCKPGMPG4LEKLQ4MFSL', client_secret='TBKXLTH2RESZKL5FFAF1I5HKQF1AERA0WKRKXY444YCHT1KO', v=datetime.today().strftime('%Y%m%d'), ll='41.5051613,-81.6934446',
def getCoords(address): geolocator = Nominatim(user_agent="Darksky API") location = geolocator.geocode(address) x = location.latitude y = location.longitude return x, y
import json from getUri import urify import os from geopy.geocoders import Nominatim path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)) ) inputFile = open( path + "/Monument/CleanedMonument.geojson", "r" ); geolocator = Nominatim(user_agent="1O3B0hu2EPPisdXJvpDFzDM6WOt3xWQQ") nativeFile = json.load( inputFile ); definitiveFile = [] temp = {} counters = {} for feature in nativeFile: if ( ("comune" in feature) and (feature["comune"].lower() == "milano") ): latitude = feature["wgs84_x"]; longitude = feature["wgs84_y"]; nome = feature["denominazione"]; autore = feature["autore"]; definizione = feature["definizione"]; abstract = feature["abstract"]; if ( str(definizione) != "None" ): definizione = definizione.capitalize(); else: definizione = "void" if ( str(abstract) != "None" ): abstract = abstract; else: abstract = "void"
from django.contrib.auth import get_user_model from django.shortcuts import get_object_or_404 from rest_framework import status from rest_framework.exceptions import ValidationError from order.tasks import send_notification_when_canceled_by_customer, send_notification_when_canceled_by_courier from order.models import OrderHolder, Order from order.enums import OrderStatus, CheckOutStatus from order.tasks import send_order_list_email from user.enums import UserRole User = get_user_model() User = get_user_model() geolocator = Nominatim(user_agent="yam_com") # Class for some routing staff class Route: # Method for creating JSON route when Courier retrieving Order def create_route_for_courier(obj): if not obj.order_holder.address: restaurant_location = geolocator.geocode( obj.food.restaurant.address) customer_location = geolocator.geocode( obj.order_holder.customer.address) elif obj.order_holder.address: restaurant_location = geolocator.geocode( obj.food.restaurant.address) customer_location = geolocator.geocode(obj.order_holder.address)
def handle_find_food(user_id, context, sentence, nounPhrase, message, incomingMessage, stage, location_from_memory=0): if stage == 'receive_request': # "Stage 1" contextNow = {'context':'find-food', 'location': None, 'coordinates': None, 'terms': nounPhrase, 'location_from_memory': location_from_memory } Mongo.add_context(users, g.user, contextNow) FB.send_message(app.config['PAT'], user_id, "Can you send me your location? :D") if len(g.user['yelp_location_history']) > 0: FB.send_quick_replies_yelp_suggest_location(app.config['PAT'], user_id, get_recent_locations_yelp()) return 'pseudo' elif stage == 'receive_location_gps': # "Stage 2-GPS" if location_from_memory == 1: Mongo.update_context(users, g.user, 'find-food', 'location_from_memory', 1) location = message['data'] Mongo.update_context(users, g.user, 'find-food', 'coordinates', location) FB.send_message(app.config['PAT'], user_id, NLP.oneOf(NLP.looking_replies)) FB.show_typing(app.config['PAT'], user_id) result = yelp_search(context['terms'], None, location) FB.show_typing(app.config['PAT'], user_id, 'typing_off') if result['status'] == 1: FB.send_message(app.config['PAT'], user_id, "Okay, I've found %s places:"%(len(result['businesses']))) FB.send_yelp_results(app.config['PAT'], user_id, result['businesses']) FB.send_quick_replies_yelp_search(app.config['PAT'], user_id) return 'pseudo' else: return "Sorry I couldn't find anything :(" elif stage == 'receive_location_text': # "Stage 2 - Text" if context['location'] == None and context['coordinates'] == None: context['location'] = nounPhrase try: geolocator = Nominatim() location_lookup = geolocator.geocode(nounPhrase) coords = [location_lookup.latitude, location_lookup.longitude] Mongo.update_context(users, g.user, 'find-food', 'coordinates', coords) Mongo.update_context(users, g.user, 'find-food', 'location', nounPhrase) FB.show_typing(app.config['PAT'], user_id) FB.send_message(app.config['PAT'], user_id, NLP.oneOf(NLP.looking_replies)) result = yelp_search(context['terms'], None, coords) except Exception, e: print e Mongo.update_context(users, g.user, 'find-food', 'location', nounPhrase) FB.send_message(app.config['PAT'], user_id, NLP.oneOf(NLP.looking_replies)) result = yelp_search(context['terms'], nounPhrase) FB.show_typing(app.config['PAT'], user_id, 'typing_off') if result['status'] == 1: # Successful search FB.send_message(app.config['PAT'], user_id, "Okay, I've found %s places:"%(len(result['businesses']))) FB.send_yelp_results(app.config['PAT'], user_id, result['businesses']) FB.send_quick_replies_yelp_search(app.config['PAT'], user_id) return 'pseudo' else: Mongo.pop_context(users, g.user) return "Sorry I can't find any results for that :(" # Follow up? else: Mongo.pop_context(users, g.user) return
# -*- coding: utf-8 -*- """ GIS 5103 Dr. Johnson Mini Project 2 Morgan Runion and Claudia Vila """ import geopy from geopy.geocoders import Nominatim from geopy import distance # instantiate the geocoder geolocator = Nominatim(user_agent="*****@*****.**") # geocode a location location = geolocator.geocode("Tallahassee, FL") # get distance between two locations newport_ri = (41.49008, -71.312796) cleveland_oh = (41.499498, -81.695391) dist = distance.distance(newport_ri, cleveland_oh) print(dist) print(location) location_lat = location.latitude location_long = location.longitude #Question 2 print(location.latitude, location.longitude)
def get_lat_long(city, state, zipc, country): geolocator = Nominatim() us_state_abbrev = { 'Alabama': 'AL', 'Alaska': 'AK', 'Arizona': 'AZ', 'Arkansas': 'AR', 'California': 'CA', 'Colorado': 'CO', 'Connecticut': 'CT', 'Delaware': 'DE', 'Florida': 'FL', 'Georgia': 'GA', 'Hawaii': 'HI', 'Idaho': 'ID', 'Illinois': 'IL', 'Indiana': 'IN', 'Iowa': 'IA', 'Kansas': 'KS', 'Kentucky': 'KY', 'Louisiana': 'LA', 'Maine': 'ME', 'Maryland': 'MD', 'Massachusetts': 'MA', 'Michigan': 'MI', 'Minnesota': 'MN', 'Mississippi': 'MS', 'Missouri': 'MO', 'Montana': 'MT', 'Nebraska': 'NE', 'Nevada': 'NV', 'New Hampshire': 'NH', 'New Jersey': 'NJ', 'New Mexico': 'NM', 'New York': 'NY', 'North Carolina': 'NC', 'North Dakota': 'ND', 'Ohio': 'OH', 'Oklahoma': 'OK', 'Oregon': 'OR', 'Pennsylvania': 'PA', 'Rhode Island': 'RI', 'South Carolina': 'SC', 'South Dakota': 'SD', 'Tennessee': 'TN', 'Texas': 'TX', 'Utah': 'UT', 'Vermont': 'VT', 'Virginia': 'VA', 'Washington': 'WA', 'West Virginia': 'WV', 'Wisconsin': 'WI', 'Wyoming': 'WY', 'Ontario': 'ON', 'Alberta': 'AB', 'British Columbia': 'BC'} match = True if(state.islower()): state = state.upper() if(len(state) == 2): for i in us_state_abbrev.keys(): if state == us_state_abbrev[i]: state = i city_state = city + ", " + state if(zipc == '0' or len(zipc) != 5): location = geolocator.geocode(city_state + ", " + country) else: location = geolocator.geocode(city_state + ", " + zipc + ", " + country) ''' print(location.address) address = location.address.split(", ") city = str(address[0]) try: state2 = us_state_abbrev[str(address[2])] except KeyError as k: print('Address incorrect') state2 = us_state_abbrev[str(address[1])] state = us_state_abbrev[str(state)] if(state != state2): print("States do not match; add a zip code") match = False else: print(location.latitude, location.longitude) print ''' try: print(location.address) except AttributeError: print('Location is unavailable.') else: print(location.latitude, location.longitude) print latit, longit = location.latitude, location.longitude return city, state, latit, longit, match
from geopy.geocoders import Nominatim import requests import json geolocator = Nominatim(user_agent="forestfireapp") forest = input("Enter name of Forest: ").strip() location = geolocator.geocode({forest}) res = requests.get(f'https://api.darksky.net/forecast/1c9424849fc849dc22a73f5c96032111/{location.latitude},{location.longitude}') json_data = json.loads(res.text) #---------------Print Real-Time Data--------------# for key,value in json_data['currently'].items(): print(key,' -> ',value)
import json import os # Gets the required measuremnt from RIPE ATLAS and creates the initial dictionary file # info from https://ripe-atlas-cousteau.readthedocs.io/_/downloads/en/latest/pdf/ from ripe.atlas.cousteau import Ping, Traceroute, AtlasSource, AtlasRequest, AtlasCreateRequest, AtlasLatestRequest, Probe, Measurement # Sagans sole purpose is to make RIPE Atlas measurements manageable from within Python. # https://ripe-atlas-sagan.readthedocs.io/en/latest/use.html#how-to-use-this-library # Attributes and Methods at https://ripe-atlas-sagan.readthedocs.io/en/latest/types.html measurement = Measurement(id=28380424) from ripe.atlas.sagan import Result, TracerouteResult # Opensource Geocoder from geopy.geocoders import Nominatim geolocator = Nominatim(user_agent="aswindow") # A Python library to gather IP address details (ASN, prefix, resource holder, reverse DNS) using the RIPEStat API, # with a basic cache to avoid flood of requests and to enhance performances. https://pypi.org/project/ipdetailscache/ #from pierky.ipdetailscache import IPDetailsCache #cache = IPDetailsCache() #cache.UseIXPs() #r = cache.GetIPInformation( "193.0.6.139" ) # example use #print (r) # target_address = "90 Oxford Street, Randburg" # sample target address # Discover the geo cordinates of the target location #location = geolocator.geocode(target_address) #print(location) #latitude = location.latitude #longitude = location.longitude #print ("lat is ", location.latitude) #print ("lon is ", location.longitude)
def locate(address): geolocator = Nominatim() location = geolocator.geocode(address) lat = location.latitude lon = location.longitude return [lat, lon]
def latlon_to_geolocator(data_in): geolocator = Nominatim() lat = IP2LocObj.get_all(data_in['ip']).latitude long = IP2LocObj.get_all(data_in['ip']).longitude location = geolocator.reverse(f"{lat}, {long}") print(location)
# Add blank space for missing cities to prevent dropping columns for n, row in enumerate(data): data[n] = "Unknown" + row if row[0] == "," else row # Split each row into a list of data data_split = [row[0:4] + row[-1:] for row in csv.reader(data)] # Find date of last update last_updated = data_split[0][-1] cities = [row for row in data_split[1:] if row[4] != '0'] # Setup the geolocator geopy.geocoders.options.default_user_agent = 'my_app/1' geopy.geocoders.options.default_timeout = 7 geolocator = Nominatim() # Find location based on my zip code location = geolocator.geocode({ "postalcode": ZIP_CODE, 'country': 'United States' }) my_loc = (location.latitude, location.longitude) # Calculate the distance between my location and the other locations in the dataset def distance_in_miles(my_loc, city_loc): miles = distance(my_loc, city_loc).miles return miles
import wget import joblib import sklearn import requests import logging import sys from collections import OrderedDict import asyncio from pathlib import Path from airly import Airly from airly.measurements import Measurement weather_dict = [] from geopy.geocoders import Nominatim address = input("Prosze podać kod pocztowy lub adres: ") geolocator = Nominatim(user_agent="Monika") location = geolocator.geocode(address) print(location.address) print((location.latitude, location.longitude)) LATITUDE = location.latitude LONGITUDE = location.longitude MAX_DIST_KM = 0.5 import logging import sys from collections import OrderedDict import asyncio from pathlib import Path import aiohttp
#!/usr/bin/env python # -*- coding: utf-8 -*- from geopy.geocoders import Nominatim import sys sys.path.append('../') geolocator = Nominatim() def get_coord(): gpsFile = open('GPS-log.txt') temp = gpsFile.readline() gpsFile.close() value = temp.split(",") t1 = value[2] #= 48.3581516667 t2 = value[3] #= -4.56562166667 #location = geolocator.reverse(""+str(t1)+","+str(t2)+"") #res = " "+location.raw['address']['town'] return (t1, t2) def get_town(): coord = get_coord() # try : location = geolocator.reverse("" + str(coord[0]) + "," + str(coord[1]) + "") # except : # raise ValueError("Problème de connexion internet essayer plus tard") try: res = " " + location.raw['address']['village']
def train(): #Dataset from https://data.gov.sg/dataset/resale-flat-prices file_url = "https://docs.google.com/spreadsheets/d/e/2PACX-1vQ8OfO82KXoRmO0E6c58MdwsOSc8ns5Geme87SiaiqTUrS_hI8u8mYE5KIOfQe4m2m3GGf9En22xuXx/pub?gid=382289391&single=true&output=csv" data = pd.read_csv(file_url) dataframe = data.copy() #let's break date to years, months dataframe['date'] = pd.to_datetime(dataframe['month']) dataframe['month'] = dataframe['date'].apply(lambda date: date.month) dataframe['year'] = dataframe['date'].apply(lambda date: date.year) #Get number of years left on lease as a continuous number (ignoring months) dataframe['remaining_lease'] = dataframe['remaining_lease'].apply( lambda remaining_lease: remaining_lease[:2]) #Get storey range as a continuous number dataframe['storey_range'] = dataframe['storey_range'].apply( lambda storey_range: storey_range[:2]) #Concat address dataframe['address'] = dataframe['block'].map( str) + ', ' + dataframe['street_name'].map(str) + ', Singapore' ''' #Geocode by address locator = Nominatim(user_agent="myGeocoder") # 1 - convenient function to delay between geocoding calls geocode = RateLimiter(locator.geocode, min_delay_seconds=1) # 2- - create location column dataframe['location'] = dataframe['address'].apply(geocode) print("step 2") # 3 - create longitude, laatitude and altitude from location column (returns tuple) dataframe['point'] = dataframe['location'].apply(lambda loc: tuple(loc.point) if loc else None) print("step 3") # 4 - split point column into latitude, longitude and altitude columns dataframe[['latitude', 'longitude', 'altitude']] = pd.DataFrame(dataframe['point'].tolist(), index=df.index) print("step 4") ''' #Geocode by town (Singapore is so small that geocoding by addresses might not make much difference compared to geocoding to town) town = [x for x in dataframe['town'].unique().tolist() if type(x) == str] latitude = [] longitude = [] for i in range(0, len(town)): # remove things that does not seem usefull here try: geolocator = Nominatim(user_agent="ny_explorer") loc = geolocator.geocode(town[i]) latitude.append(loc.latitude) longitude.append(loc.longitude) #print('The geographical coordinate of location are {}, {}.'.format(loc.latitude, loc.longitude)) except: # in the case the geolocator does not work, then add nan element to list # to keep the right size latitude.append(np.nan) longitude.append(np.nan) # create a dataframe with the locatio, latitude and longitude df_ = pd.DataFrame({ 'town': town, 'latitude': latitude, 'longitude': longitude }) # merge on Restaurant_Location with rest_df to get the column dataframe = dataframe.merge(df_, on='town', how='left') ### label encode the categorical values and convert them to numbers ''' le = LabelEncoder() dataframe['town']= le.fit_transform(dataframe['town'].astype(str)) dataframe['flat_type'] = le.fit_transform(dataframe['flat_type'].astype(str)) dataframe['street_name'] = le.fit_transform(dataframe['street_name'].astype(str)) #dataframe['storey_range'] = le.fit_transform(dataframe['storey_range'].astype(str)) dataframe['flat_model'] = le.fit_transform(dataframe['flat_model'].astype(str)) dataframe['block'] = le.fit_transform(dataframe['block'].astype(str)) dataframe['address'] = le.fit_transform(dataframe['address'].astype(str)) ''' townDict = { 'ANG MO KIO': 1, 'BEDOK': 2, 'BISHAN': 3, 'BUKIT BATOK': 4, 'BUKIT MERAH': 5, 'BUKIT PANJANG': 6, 'BUKIT TIMAH': 7, 'CENTRAL AREA': 8, 'CHOA CHU KANG': 9, 'CLEMENTI': 10, 'GEYLANG': 11, 'HOUGANG': 12, 'JURONG EAST': 13, 'JURONG WEST': 14, 'KALLANG/WHAMPOA': 15, 'MARINE PARADE': 16, 'PASIR RIS': 17, 'PUNGGOL': 18, 'QUEENSTOWN': 19, 'SEMBAWANG': 20, 'SENGKANG': 21, 'SERANGOON': 22, 'TAMPINES': 23, 'TOA PAYOH': 24, 'WOODLANDS': 25, 'YISHUN': 26, } flat_typeDict = { '1 ROOM': 1, '2 ROOM': 2, '3 ROOM': 3, '4 ROOM': 4, '5 ROOM': 5, 'EXECUTIVE': 6, 'MULTI-GENERATION': 7, } dataframe['town'] = dataframe['town'].replace(townDict, regex=True) dataframe['flat_type'] = dataframe['flat_type'].replace(flat_typeDict, regex=True) # drop some unnecessary columns dataframe = dataframe.drop('date', axis=1) dataframe = dataframe.drop('block', axis=1) #dataframe = dataframe.drop('lease_commence_date',axis=1) dataframe = dataframe.drop('month', axis=1) dataframe = dataframe.drop('street_name', axis=1) dataframe = dataframe.drop('address', axis=1) dataframe = dataframe.drop('flat_model', axis=1) #dataframe = dataframe.drop('town',axis=1) dataframe = dataframe.drop('year', axis=1) #dataframe = dataframe.drop('latitude',axis=1) dataframe = dataframe.drop('remaining_lease', axis=1) X = dataframe.drop('resale_price', axis=1) y = dataframe['resale_price'] X = X.values y = y.values #splitting Train and Test from sklearn.model_selection import train_test_split X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=101) #standardization scaler - fit&transform on train, fit only on test s_scaler = StandardScaler() X_train = s_scaler.fit_transform(X_train.astype(np.float)) X_test = s_scaler.transform(X_test.astype(np.float)) knn = KNeighborsRegressor(algorithm='brute') knn.fit(X_train, y_train) #save model filename = 'hdbknn.sav' scalername = 'scaler.sav' pickle.dump(knn, open(filename, 'wb')) pickle.dump(s_scaler, open(scalername, 'wb')) loaded_model = pickle.load(open(filename, 'rb')) result = loaded_model.score(X_test, y_test) print(result) return result
########## Data contest ########## # # ###### EE17B072 - Team name - Brainstorm ###### import numpy as np import os import pandas as pd from tqdm import tqdm import matplotlib.pyplot as plt from datetime import datetime import random import lightgbm as lgb from geopy.geocoders import Nominatim geolocator = Nominatim(user_agent="http") import statistics from statistics import mode from sklearn.cluster import KMeans import csv import xgboost as xgb # In[56]: # reading the csv files with open('bikers.csv', 'r') as f: bikers = pd.read_csv(f, dtype={ "biker_id": str, "language_id": str,
class PartyPhaseSpider(scrapy.Spider): name = "partyphase" allowed_domains = ["muenster.partyphase.net"] geolocator = Nominatim(user_agent='muenster-info-hub') def start_requests(self): if ('SCRAPE_START' in os.environ and 'SCRAPE_END' in os.environ): start = clean_date(os.environ['SCRAPE_START']) end = clean_date(os.environ['SCRAPE_END']) else: start = datetime.strftime(datetime.today(), '%Y-%m-%d') end = datetime.strftime(datetime.today() + timedelta(days=6), '%Y-%m-%d') start_urls = [ f'http://muenster.partyphase.net/veranstaltungskalender-muenster/?eme_scope_filter={start}--{end}&eme_submit_button=Submit&eme_eventAction=filter', ] self.log("------------ START PARAMETERS -------------- ") self.log(f"START: {start}") self.log(f"END: {end}") self.log("------------ ") for url in start_urls: yield scrapy.Request(url=url, callback=self.parse) def parse(self, response): # split events raw = response.xpath('//div[@class="kalenderbreit"]') # get event URLs from overview A = raw.xpath('//div[@class="veranstaltungsname"]/a') event_urls = A.xpath('@href').getall() # crawl events for event_url in event_urls: yield scrapy.Request(event_url, callback=self._parse_event) def _get_location_address(self, url): response = scrapy.Request(url=url) add = response.xpath('//div[@class="entry-content"]/text()').getall() return f'{add[1].strip()} {add[2].strip()}' def _parse_event(self, response): event = items.PartyItem() event['title'] = response.xpath( '//div/div[@class="eme_period"]/text()').get() wday, date, time = response.xpath( '//div/div[@class="beginn"]/text()').get().split(' | ') mday, month, year = date.split(' ') start_date = f'{mday} {MONTH[month]} {year} {time}' start_date = datetime.strptime(start_date, '%d. %B %Y %H:%M Uhr').isoformat() event['start_date'] = f'{start_date}+02:00' event['location_name'] = response.xpath( '//div/div[@class="ort"]/a/text()').get() event['link'] = response.url event['description'] = u' '.join( [s.strip() for s in response.xpath('//div/p/text()').getall()]) if any(tag in event['title'].lower() for tag in ['live', 'party', 'fest']): event['category'] = 'Party' event['source'] = 'muenster.partyphase.net' location_url = response.xpath('//div/div[@class="ort"]/a/@href').get() try: request = scrapy.Request(url=location_url, callback=self._parse_location, meta={'event': event}) except ValueError: return return request def _parse_location(self, response): add = u' '.join( map( str.strip, response.xpath( '//div[@class="entry-content"]/text()').getall())).strip() event = response.meta['event'] event['location_address'] = add loc = self.geolocator.geocode(add) event['geo'] = dict(lat=loc.latitude, lon=loc.longitude) return event
n=1000 filepath = 'location_data/location4.csv' filenumber = 4 offset = n * filenumber ################################################## #start the timing start = time.time() #read in the location file as casper casper = pd.read_csv('casper_with_location2.csv') #init the geolocator geolocator = Nominatim() #init the location, latitude, and longitude lists location_lst = [] lat_lst = [] long_lst = [] #run through the desired observations for i in range(n): try: x = geolocator.geocode(casper.city_state[i + offset], timeout=100) location_lst.append(x.address) lat_lst.append(x.latitude) long_lst.append(x.longitude) except: location_lst.append('None')
# Gets the required measuremnt from RIPE ATLAS and creates the initial dictionary file # info from https://ripe-atlas-cousteau.readthedocs.io/_/downloads/en/latest/pdf/ from ripe.atlas.cousteau import AtlasLatestRequest, Probe, Measurement # Sagans sole purpose is to make RIPE Atlas measurements manageable from within Python. # https://ripe-atlas-sagan.readthedocs.io/en/latest/use.html#how-to-use-this-library # Attributes and Methods at https://ripe-atlas-sagan.readthedocs.io/en/latest/types.html from ripe.atlas.sagan import Result, TracerouteResult # Opensource Geocoder from geopy.geocoders import Nominatim geolocator = Nominatim(user_agent="aswindow") # A Python library to gather IP address details (ASN, prefix, resource holder, reverse DNS) using the RIPEStat API, # with a basic cache to avoid flood of requests and to enhance performances. https://pypi.org/project/ipdetailscache/ from pierky.ipdetailscache import IPDetailsCache cache = IPDetailsCache() #cache.UseIXPs() #r = cache.GetIPInformation( "193.0.6.139" ) # example use #print (r) target_address = "90 Oxford Street, Randburg" # sample target address # Discover the geo cordinates of the target location location = geolocator.geocode(target_address) print(location) latitude = location.latitude longitude = location.longitude print("lat is ", location.latitude)
from geopy.geocoders import Nominatim import json import time geolocator = Nominatim() with open('countriesToCities.json', 'r') as f: data = json.load(f) count = 0 i = 0 while i != 10: try: i += 1 for key in data: for city in data[key]: if key == u'China': with open('newcountry.json', 'r') as ff: data1 = json.load(ff) if city in data1: print(city) continue try: citystring = city.encode() except UnicodeEncodeError: readed = json.load(open('errorcountry.json', 'r')) readed[city] = city json.dump(readed, open('errorcountry.json', 'w')) continue location = geolocator.geocode(citystring) if location == None: continue
def convertPostToCoord(self, postcode): geolocator = Nominatim(user_agent='postcodeConverter') location = geolocator.geocode(postcode) return location.latitude, location.longitude
def get_geolocation(address): """ Tries to find the geolocation, reducing the address precision if doesn't finds one """ location = geolocator.geocode(address) while not location and address: address = ','.join(address.split(',')[:-1]) location = geolocator.geocode(address) if not address: return None return (location.latitude, location.longitude) geolocator = Nominatim(user_agent='lead_recommender') locations = dict() for address in tqdm(geo['address'].unique(), desc='Finding geolocations'): locations[address] = get_geolocation(address) geo['lat'], geo['lon'] = zip(*geo['address'].map(locations)) compression_opts = { 'method': 'zip', 'archive_name': 'geo.csv' } geo[['id', 'lat', 'lon']].to_csv( '../data/geo.zip', index=False, compression=compression_opts )