def get_geo_cords(address): ctx = ssl.create_default_context(cafile=certifi.where()) geopy.geocoders.options.default_ssl_context = ctx geolocation_api_key = settings.GEOLOCATION_API_KEY geolocator = OpenMapQuest(api_key=geolocation_api_key) output = geolocator.geocode(address) return {"latitude": output.latitude, "longitude": output.longitude}
def get_country(latitude, longitude): ctx = ssl.create_default_context(cafile=certifi.where()) geopy.geocoders.options.default_ssl_context = ctx geolocation_api_key = settings.GEOLOCATION_API_KEY geolocator = OpenMapQuest(api_key=geolocation_api_key) output = geolocator.reverse("{latitude}, {longitude}".format( latitude=latitude, longitude=longitude)) country = output.address.split(',')[-1] country = unidecode.unidecode(country) py_country = pycountry.countries.get(official_name=country.strip()) if py_country is None: py_country = pycountry.countries.get(name=country.strip()) return py_country
def getaddress(lat, lon, index, apikey): try: loc = str(lat) + ", " + str(lon) locator = OpenMapQuest(api_key=apikey) location = locator.reverse(loc) parsed = location.raw addres = parsed['address'] street = addres['road'] point = address(street, lat, lon, index) return point except: print( "You have an incorrect API or are out of credits to configure a new api key run \'python .\getApiKey.py\'" ) exit(1)
def get_longitude_latitude_info(data, api_key=None): if api_key is None: geolocator = Nominatim(user_agent='pyntxos') else: geolocator = OpenMapQuest(user_agent='pyntxos', api_key=api_key) failure_count = 0 for index, pintxo in enumerate(data): location = geolocator.geocode( f'{pintxo["name"]}, {pintxo["post_code"]}, Bilbao, Spain') if location is None: location = geolocator.geocode(pintxo["address"]) if location is not None: print( f'{index} - Success - {pintxo["name"]} - Getting Longitude and Latitude' ) data[index]["geopy_address"] = location.address data[index]["longitude"] = location.longitude data[index]["latitude"] = location.latitude else: data[index]["longitude"] = None data[index]["latitude"] = None print( f'{index} - Failure - {pintxo["name"]} - Getting Longitude and Latitude' ) failure_count += 1 print( f'{failure_count / len(data) * 100:.0f}% failed for getting longitude and latitude.' ) return data
def main(): geolocator = OpenMapQuest() locations = {} firstlocation = "none" with open(sys.argv[1]) as file: count = 0 for line in file: place = line.strip() if firstlocation == "none": firstlocation = place location = geolocator.geocode(place) coordinates = location[1:2] locations[place] = coordinates results = Search(locations, firstlocation) print(results[0]) print(results[1])
def setUpClass(cls): # setUpClass is still called even if test is skipped. # OpenMapQuest raises ConfigurationError when api_key is empty, # so don't try to create the instance when api_key is empty. if env.get('OPENMAPQUEST_APIKEY'): cls.geocoder = OpenMapQuest(scheme='http', timeout=3, api_key=env['OPENMAPQUEST_APIKEY']) cls.delta = 0.04
def get_coordinates(self): try: location_request = OpenMapQuest( api_key=OpenMapQuest_API_KEY).geocode(self.location_name) location_en = OpenMapQuest(api_key=OpenMapQuest_API_KEY).geocode( self.location_name, language='en') except Exception: print('Ошибка запроса места') return False if not location_request: print('Такое место не найдено') return False location_latitude = str(round(location_request.latitude, 4)) location_longitude = str(round(location_request.longitude, 4)) self.coordinates = location_latitude + ',' + location_longitude self.location_en = str(location_en).split(',')[0] return True
def geocode(id): """ Geocode an institution. Args: id (int): The institution id. """ coder = OpenMapQuest(config['mapquest']['api_key']) inst = Institution.get(Institution.id==id) # Geocode. location = coder.geocode(inst.geocoding_query, timeout=10) if location: # Write the coordinate. inst.metadata['Latitude'] = location.latitude inst.metadata['Longitude'] = location.longitude inst.save()
def PlaceToMap(dataset): """Build dataframe with place and the list of documents which are located there""" #-----------------------Group by location-------------------- location = list(dataset["Place"]) temp = [] for loc in location: temp += loc location = list(set(temp)) length = len(location) # data={"Place":[0]*length,"Documents":[0]*length} data = {"Place": [], "Documents": []} for m in range(length): temp = [] event = location[m] locs = event for j, i in zip(dataset["Name"], dataset["Place"]): if locs in i: temp.append(j) if not (locs in data["Place"]): data['Place'].append(locs) temp = list(set(temp)) data["Documents"].append(temp) dataset = pd.DataFrame(data) #--------------------Beginning for locatalization-------------- geolocator = OpenMapQuest(api_key='kNFyXsWRe50Q85tXM8szsWN0A3SS3X0T', timeout=100) #geolocator=Here("Af9fc3JTNkg1N4IwwVEz","3_R3z-sJU6D1BEFE9HWy7Q") # geolocator=GeoNames(username="******") length = dataset.shape[0] data = { "Place": dataset["Place"], "Documents": dataset["Documents"], "Coordinate": [0] * length, "Count": [0] * length } for i in range(length): place = dataset["Place"][i] try: data["Coordinate"][i] = [ place, find_map_coordinates(place, geolocator) ] except GeocoderQuotaExceeded: continue except AttributeError: geolocator1 = GeoNames(username="******", timeout=100) # print(find_map_coordinates(place,geolocator1)) data["Coordinate"][i] = [ place, find_map_coordinates(place, geolocator1) ] data["Count"][i] = len(data["Documents"][i]) return pd.DataFrame(data)
def PlaceToMap(dataset): """Build dataframe with place and the list of documents which are located there""" #-----------------------Group by location-------------------- location=["paris rive gauche","paris saint lazare","paris saint-lazare","paris st lazare","paris gare du nord", "paris gare de l’est","paris gare de lyon","paris sud est","paris austerlitz","paca", "paris montparnasse"] loctaxo=locationTaxonomyNew() temploc=[] keys=loctaxo.keys() #Change abbreviation to real name of the place for code in keys: code=code.lower() if not(code in ["marseille","st charles","lille","flandres"]): temploc.append(code) location1=temploc+list(loctaxo.values()) location=list(set(location+location1)) length=len(location) # data={"Place":[0]*length,"Documents":[0]*length} data={"Place":[],"Documents":[]} for m in range(length): temp=[] event=location[m] if event in keys: locs=loctaxo[event] #change the abbreviation to the real place else: locs=event #Keep the real name of the place for j,i in zip(dataset["Name"],dataset["Place"]): if locs in i: temp.append(j) if not(locs in data["Place"]): data['Place'].append(locs) data["Documents"].append(temp) dataset=pd.DataFrame(data) #--------------------Beginning for locatalization-------------- geolocator = OpenMapQuest(api_key='kNFyXsWRe50Q85tXM8szsWN0A3SS3X0T') # geolocator=GeoNames(username="******") length=dataset.shape[0] data={"Place":dataset["Place"],"Documents":dataset["Documents"],"Coordinate":[0]*length,"Count":[0]*length} for i in range(length): place=dataset["Place"][i] try: data["Coordinate"][i]=[place,find_map_coordinates(place,geolocator)] except GeocoderQuotaExceeded: continue except AttributeError: geolocator1=GeoNames(username="******") # print(find_map_coordinates(place,geolocator1)) data["Coordinate"][i]=[place,find_map_coordinates(place,geolocator1)] data["Count"][i]=len(data["Documents"][i]) return pd.DataFrame(data)
def collectGeocoders(): config = configparser.ConfigParser() conf = r'..\conf\config.ini' config.read(conf) keys = { 'Here_app_id': config['DEFAULT']['Here_app_id'], 'Here_app_code': config['DEFAULT']['Here_app_code'], 'TomTom': config['DEFAULT']['TomTom_api_key'], 'OpenMapQuest': config['DEFAULT']['OpenMapQuest_api_key'], 'GoogleV3': config['DEFAULT']['GoogleV3_api_key'] } locators = [{ 'locator': Nominatim(user_agent="afan"), 'name': 'Nominatim', 'type': 'Geopy' }, { 'locator': GeoNames(username="******"), 'name': 'GeoNames', 'type': 'Geopy' }, { 'locator': Here(app_id=keys['Here_app_id'], app_code=keys['Here_app_code']), 'name': 'Here', 'type': 'Geopy' }, { 'locator': TomTom(api_key=keys['TomTom']), 'name': 'TomTom', 'type': 'Geopy' }, { 'locator': OpenMapQuest(api_key=keys['OpenMapQuest']), 'name': 'OpenMapQuest', 'type': 'Geopy' }, { 'locator': Photon(), 'name': 'Photon', 'type': 'Geopy' }] #locators.append({'locator':GoogleV3(api_key=keys['GoogleV3']),'name':'GoogleV3','type':'Geopy'}) locators.append({ 'locator': revGeocodingbyIQ, 'name': 'revGeocodingbyIQ', 'type': 'Custom' }) return locators
def mapper_init(self): #State Boundaries with open('us-counties.json') as f: states = json.loads(f.read()) self.state_polygons = [(shape(state['geometry']),state['id']) for state in states['features'] ] #Keywords keywords = [] with open('search_short.csv','rU') as f: lines = csv.reader(f) #Skip Header lines.next() for line in lines: keywords.append(line[0]) self.keywords = keywords #GeoMappers keys=[] with open('dbparams.json') as f: for line in f: keys.append(json.loads(line)) keys = keys[0] self.omq = OMQ(keys['geo']['OMQ']) self.gg = GoogleV3()
def make_geocoder(cls, **kwargs): return OpenMapQuest(api_key=env['OPENMAPQUEST_APIKEY'], timeout=3, **kwargs)
def get_band_map_score(bands, tries_to_geoservice): conn = sqlite3.connect('geocoder.db') conn.row_factory = sqlite3.Row c = conn.cursor() geolocator = OpenMapQuest() for i, band in enumerate(bands): for j, tour_date in enumerate(band.tour_dates): # US states are treated as countries in the table, so need to convert country label to USA instead of state region_is_US_state = False if tour_date.region == "BROOKLYN": tour_date.region = "NY" if len(tour_date.region) == 2 and tour_date.region.isupper(): region_is_US_state = True print(tour_date.city, tour_date.region, region_is_US_state) if region_is_US_state: c.execute( "SELECT * from geolocations where city = '" + appos(tour_date.city) + "' and region = '" + appos( tour_date.region) + "'") else: try: c.execute("SELECT * from geolocations where city = '" + appos( tour_date.city) + "' and country = '" + appos(abbr(tour_date.region)) + "'") except NameError: print(tour_date.region + "***123***") tour_date.dist_score = -1 continue data = c.fetchone() # no data was found therefore look using MapQuest online API if data is None: tour_date.dist_score = -1 # assume -1 means you can't find a distance score continue print("There is no location yet in: " + ', '.join((tour_date.city, tour_date.region))) success = False unknown = False counter = 0 while True: try: tour_date.location = geolocator.geocode(', '.join((tour_date.city, tour_date.region))) time.sleep(10) # wait 10 seconds success = True except geopy.exc.GeocoderTimedOut: print("timed out. waiting...") pass except geopy.exc.GeocoderServiceError: print("service error") time.sleep(10) unknown = True success = True if success: break else: time.sleep(60) # wait 1 minutes counter += 1 if counter >= tries_to_geoservice - 1: unknown = True break if unknown or tour_date.location is None: print("unknown") tour_date.dist_score = -1 else: dist = vincenty(constants.hometown, (tour_date.location.latitude, tour_date.location.longitude)).meters print("distance: " + str(dist)) if dist <= 0: tour_date.dist_score = 0 else: tour_date.dist_score = 1 / dist unknown = "" if region_is_US_state: unknown = "US" c.execute( '''INSERT INTO geolocations(city, region, country, latitude, longitude) VALUES (?,?,?,?,?)''', (tour_date.city, tour_date.region, unknown, tour_date.location.latitude, tour_date.location.longitude)) else: unknown = "unknown" c.execute( '''INSERT INTO geolocations(city, region, country, latitude, longitude) VALUES (?,?,?,?,?)''', (tour_date.city, unknown, abbr(tour_date.region), tour_date.location.latitude, tour_date.location.longitude)) conn.commit() else: print('Location: ' + ', '.join((tour_date.city, tour_date.region)) + ' already in table.') dist = vincenty(constants.hometown, (data['latitude'], data['longitude'])).meters print("distance: " + str(dist)) if dist <= 0: tour_date.dist_score = 0 else: tour_date.dist_score = 1 / dist conn.commit() conn.close() return bands
def test_raises_without_apikey(self): with pytest.raises(ConfigurationError): OpenMapQuest()
"WV": "West Virginia", "WI": "Wisconsin", "WY": "Wyoming" } """ begin! """ clear_terminal() print_logo() """ initialize geocoders (just using ArcGIS for now) """ arcgis = ArcGIS(timeout=100) nominatim = Nominatim(user_agent="WQTS", timeout=100) # opencage = OpenCage('your-API-key', timeout=100) openmapquest = OpenMapQuest('vooH8ziES69RZKTpR4LLUyuImVpaSY78') geolocator = Nominatim(user_agent="WQTS") geocoders = [openmapquest, arcgis, nominatim] """ Progress bar function courtesy of StackOverflow :) """ def printProgressBar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='█',
from geopy.geocoders import ArcGIS, Bing, Nominatim, OpenCage, GoogleV3, OpenMapQuest import csv, sys import pandas as pd import keys import logging logging.basicConfig(filename="geocode_log.log", level=logging.DEBUG, format="%(asctime)s:%(levelname)s:%(message)s") in_file = 'gp_sites.txt' out_file = str('gc_' + in_file) timeout = 100 print('creating geocoding objects.') logging.debug('creating geocoding objects.') openmapquest = OpenMapQuest(api_key=keys.omq_api, timeout=timeout) # choose and order your preference for geocoders here geocoders = [openmapquest] def gc(address): for gcoder in geocoders: location = gcoder.geocode(address) if location != None: print(f'geocoded record {address}') logging.info(f'SUCCESSFULLY geocoded record {address}') located = pd.Series({ 'lat': location.latitude, 'lng': location.longitude, 'time': pd.to_datetime('now')
class MongoKeywordCount(MRJob): #Job to count the number of tweets per keyword, date, location. #Input is a JSON file. INPUT_PROTOCOL = JSONValueProtocol #OUTPUT_PROTOCOL = JSONProtocol #mapper_init ~ __init__ #Assing properties def mapper_init(self): #State Boundaries with open('us-counties.json') as f: states = json.loads(f.read()) self.state_polygons = [(shape(state['geometry']),state['id']) for state in states['features'] ] #Keywords keywords = [] with open('search_short.csv','rU') as f: lines = csv.reader(f) #Skip Header lines.next() for line in lines: keywords.append(line[0]) self.keywords = keywords #GeoMappers keys=[] with open('dbparams.json') as f: for line in f: keys.append(json.loads(line)) keys = keys[0] self.omq = OMQ(keys['geo']['OMQ']) self.gg = GoogleV3() #No key defined because of INPUT_PROTOCOL def mapper(self, _, tweet): #Datetime # dt = parser.parse(tweet['created_at']) # dt = dt.strftime('%Y/%m') result = None keyword = None match = False combined_key = None #Tweet text = tweet['text'].encode('utf8') #print text #Keyword Match for k in self.keywords: #String Match if k in text: keyword = k break #Location user_loc = tweet['user']['location'].encode('utf8') #If tweet coordinate is embedded in the tweet if tweet['coordinates']: try: point = shape(tweet['coordinates']) match = True except: #print 'Shapely error' pass #If tweet coordinate not available. #Grab User Location if lonlat information not available #This is an approximation but should still work. elif user_loc: #print 'Geocode user location: ', user_loc try: result = self.omq.geocode(user_loc) except: try: #print 'error in open mapquest. try google api' result = self.gg.geocode(user_loc) except: #print 'google api not responding. skipping the entry' pass #Convert into GeoJSON File if there sis a match if result: #print result coordinate = {'coordinates': [result[1][1],result[1][0]], 'type': 'Point'} point = shape(coordinate) #print point.xy match = True #Match State and Keyword if match and keyword: #Match State try: for i, state in enumerate(self.state_polygons): if state[0].contains(point): #print 'Found polygon: {}'.format(state[1]) #print 'key :', k, dt, state[1] #Increment the Counter combined_key = (k, state[1]) #Counter self.increment_counter('mapper', 'processed tweets', 1) #Break State Loop break except: pass else: self.increment_counter('mapper', 'skipped tweets', 1) #Generator yield combined_key, 1 def reducer(self, combined_key, values): yield combined_key, sum(values)
from geopy.geocoders import Nominatim, ArcGIS, OpenCage, OpenMapQuest from geopy.exc import GeocoderTimedOut, GeocoderQuotaExceeded, GeocoderInsufficientPrivileges import os nomatim = Nominatim(user_agent='locators') arcgis = ArcGIS(timeout=10) geocoders = [nomatim, arcgis] if 'OPEN_CAGE_API_KEY' in os.environ: geocoders.append(OpenCage(os.environ['OPEN_CAGE_API_KEY'])) if 'OPEN_MAP_QUEST_API_KEY' in os.environ: geocoders.append(OpenMapQuest(os.environ['OPEN_MAP_QUEST_API_KEY'])) for i in range(len(geocoders)): geocoders[i] = geocoders[i].geocode def address_to_lat_long(df): df['lat-long'] = df['address'].apply(convert) def convert(address): i = 0 if address is None: return None while i < len(geocoders): print(i, geocoders[i])
def setUpClass(cls): cls.geocoder = OpenMapQuest(scheme='http', timeout=3, api_key=env.get('OPENMAPQUEST_APIKEY')) cls.delta = 0.04
def parse_run(debug, fitfiles): for fitfile_in in fitfiles: if debug: print('#' * 80) print('Debug mode active') print('#' * 80) ########################################## # Parse the fit file ########################################### try: fitfile_processor = StandardUnitsDataProcessor() fitfile = FitFile(fitfile_in, data_processor=fitfile_processor, check_crc=False) fitfile.parse() except FitParseError as err: print('Error while parsing {}: {}'.format(fitfile_in.relpath(), err)) sys.exit(1) # Build our api instances geocoder = OpenMapQuest(api_key=run_app.config['OPEN_MAPQUEST_KEY'], scheme='http', timeout=100) tf = TimezoneFinder() #ureg = UnitRegistry() # Pull manufacturer data for record in fitfile.get_messages('file_id', with_definitions=False): manufacturer = record.get_value('manufacturer') product = record.get_value('garmin_product') for record in fitfile.get_messages('file_creator', with_definitions=False): pass if debug: print(f"device: {manufacturer} -- {product}") print() # Parse all events for record in fitfile.get_messages('event', with_definitions=False): event_group = record.get_value('event_group') timestamp = record.get_value('timestamp') if debug: print(f"event: {event_group} -- {timestamp}") for record_data in record: print(f" * {record_data.name}: {record_data.value}") print() initial = True for record in fitfile.get_messages('record', with_definitions=False): # Parse all fields lat = record.get_value('position_lat') lng = record.get_value('position_long') if lat and lng: timezone = find_timezone(tf, lat, lng) location = geocoder.reverse([lat, lng]).raw else: print('skipping record w/o lat or long\n') continue utc_time = pendulum.instance(record.get_value('timestamp')) local_tz = pendulum.timezone(timezone) local_time = local_tz.convert(utc_time) distance = record.get_value('distance') * ureg.km elevation = record.get_value('enhanced_altitude') * ureg.meter speed = record.get_value( 'enhanced_speed') * ureg.kilometer_per_hour if speed.magnitude > 0: pace = 60 / speed.to(ureg.mile_per_hour).magnitude else: print('too fast for me!') continue if not debug: # Add to the database if initial: print('Setting up initial city/state/country') try: cur_country = db.session.query(Country).filter( Country.name == location['address'] ['country_code']).one() except: cur_country = Country( name=location['address']['country_code']) try: cur_state = db.session.query(State).filter( State.name == location['address']['state']).one() except: cur_state = State(name=location['address']['state'], country=cur_country) try: cur_city = db.session.query(City).filter( City.name == location['address']['city']).one() except: cur_city = City(name=location['address']['city'], state=cur_state) cur_run = Run(cur_city) cur_leg = Leg(cur_run) db.session.add_all( [cur_country, cur_state, cur_city, cur_run, cur_leg]) initial = False point = Point(local_time, elevation.magnitude, lat, lng, distance.to(ureg.meter).magnitude, speed.magnitude, cur_leg, cur_run) print(point) print('Adding prev. point') db.session.add(point) output_str = [] output_str.append( f" * datetime: {local_time.strftime('%Y-%m-%d %H:%M:%S')}") output_str.append(f" * timezone: {timezone}") output_str.append(f" * location: {lat},{lng}") if 'city' in location['address']: output_str.append(f" * city: {location['address']['city']}") else: output_str.append(f" * city: {None}") if 'state' in location['address']: output_str.append(f" * state: {location['address']['state']}") else: output_str.append(f" * state: {None}") if 'country_code' in location['address']: output_str.append( f" * country: {location['address']['country_code']}") else: output_str.append(f" * country: {None}") output_str.append(f" * distance: {distance.to(ureg.mile):02.2~}") output_str.append(f" * elevation: {elevation.to(ureg.foot):.5~}") output_str.append( f" * speed: {speed.to(ureg.mile / ureg.hour):.3~}") output_str.append( f" * pace: {round(pace):02}:{round((pace % 1) * 60):02} min / mi" ) print(f"record: {local_time.strftime('%Y-%m-%d %H:%M:%S')}") print('\n'.join(output_str)) print() if not debug: print('DB session committing') db.session.commit() print('DB session committed')
def test_user_agent_custom(self): geocoder = OpenMapQuest(user_agent='my_user_agent/1.0') self.assertEqual(geocoder.headers['User-Agent'], 'my_user_agent/1.0')
def test_user_agent_custom(self): geocoder = OpenMapQuest(api_key='DUMMYKEY1234', user_agent='my_user_agent/1.0') self.assertEqual(geocoder.headers['User-Agent'], 'my_user_agent/1.0')
import pandas as pd from geopy.geocoders import Nominatim from geopy.geocoders import OpenMapQuest import geopy import glob import time interEDGE='*****.csv' interADDR='*****.csv' roadEDGE='*****.csv' roadADDR='*****.csv' geolocator=OpenMapQuest('*****') table1=pd.read_csv(roadADDR) #table1=pd.read_csv(roadEDGE) add=[0]*len(table1) lon=[0]*len(table1) lat=[0]*len(table1) for i in range(len(table1)): try: location = geolocator.geocode(str(table1["comname_n"][i])+' NORMAN,OK', timeout=4000) if type(location)==geopy.location.Location: add[i]=location.address lon[i]=location.longitude lat[i]=location.latitude else: add[i]=0 lon[i]=0 lat[i]=0
if __name__ == '__main__': stats_df = pd.read_csv('data/player_locations_and_stat_totals.csv') stats_df['birthplace_country'] = stats_df['birthplace_country'].str.strip() country_codes_df = pd.read_csv('data/country_codes.csv', index_col=0) stats_df = stats_df.merge(country_codes_df, how='left', left_on='birthplace_country', right_on='code_3digit') # Instantiate Geopy Nominatim geolocator = OpenMapQuest(api_key='XqxsOEvVxEWTifxzkzeh1rWC9sJMZg0x') geocode = RateLimiter(geolocator.geocode, min_delay_seconds=.4) # print(geocode('Hightstown, NJ, United States of America')) stats_df['PER'] = stats_df.apply(calculate_per, axis=1) pandarallel.initialize(progress_bar=True) stats_df['miles_between_school_and_home'] = stats_df.parallel_apply( get_distance_between, axis=1) stats_df = stats_df.sort_values(by='PER', ascending=False).reset_index(drop=True) stats_df.to_csv('data/output_showing_miles_between.csv')
parser.add_argument('-end', '-t', type=int, required=True) parser.add_argument('-geo', '-g', required=True) conn = sqlite3.connect("adresses_31.db") logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) geolocator = None args = parser.parse_args() if args.geo == 'google': geolocator = GoogleV3(api_key='Your API key', timeout=2) elif args.geo == 'osm': geolocator = Nominatim(timeout=30) elif args.geo == 'mapquest': geolocator = OpenMapQuest(api_key='Your Api key') df = pd.read_csv('census.csv', sep=';') adresses = df.to_dict() # insgesamt 72212 for number in range(args.begin, args.end): try: time.sleep(1) logger.info("Try to get data for Nummer: {}".format(number)) logger.info(adresses['adresse'][number]) # try to geocode using a service location = geolocator.geocode("{}, Berlin".format( adresses['adresse'][number])) # if it returns a location
#!/usr/bin/env #Raymond Duncan #Sina Aghli #Assignment 6 #Import the sys and geopy modules import sys from geopy.geocoders import OpenMapQuest from geopy.distance import vincenty geolocator = OpenMapQuest() #Assign variable name for terminal argument #locations = 'cityList.txt' locations = sys.argv[1] def Greedy_Search(locations): fo = open(locations,'r') #Open the file and read each line as a different location locationList = [] for line in fo: line = line.strip('\n') line = line.strip('\r') locationList += [line] fo.close() #print locationList startLocation = locationList[0] #Remove start from list after creating related variable del locationList[0] #Create list for ordered locations, and ordered distances. Then variable for current location
def setUpClass(cls): cls.geocoder = OpenMapQuest(scheme='http', timeout=3) cls.delta = 0.04
def test_raises_without_apikey(self): with self.assertRaises(ConfigurationError): OpenMapQuest()
from matplotlib.collections import LineCollection from mpl_toolkits.axes_grid.inset_locator import zoomed_inset_axes import shapefile import pycountry import haversine from geolite2 import geolite2 import warnings import matplotlib.cbook warnings.filterwarnings("ignore", category=matplotlib.cbook.mplDeprecation) arcgis = ArcGIS(timeout=100) nominatim = Nominatim(timeout=100) googlev3 = GoogleV3(timeout=100) openmapquest = OpenMapQuest(timeout=100) logloc = logging.getLogger('locator') logloc.setLevel(logging.WARNING) ch = logging.StreamHandler() ch.setLevel(logging.WARNING) logloc.addHandler(ch) # choose and order your preference for geocoders here geocoders = [nominatim, arcgis, openmapquest] class Participant(object): def __init__(self, name=None, firstname=None,
from geopy.geocoders import OpenMapQuest import csv key = '%apikey%' geo_locator = OpenMapQuest(key) def reverse_geocode(data, output_path='geocoded.csv'): """ Get address from coordinates TODO: Convert csv I/O to pandas :param data: coordinate data from Toronto KSI :return: """ d_out = [] with open('KSI_CLEAN2.csv', mode='r') as fin: reader = csv.reader(fin) j = 0 for row in reader: j += 1 try: coordinates = [row[4], row[5]] result = geolocator.reverse(coordinates).address row.extend(result) d_out.append(row) except: print('Error') with open(output_path, 'w') as f_out: writer = csv.writer(f_out)
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer from flask import current_app from itsdangerous import BadSignature, SignatureExpired from geopy.geocoders import OpenMapQuest from datetime import datetime from config import Config from .user import User from .. import db geolocator = OpenMapQuest(api_key=Config.OPENMAP_KEY) def get_suburb_state(latitude, longitude): try: location = geolocator.reverse("{}, {}".format(latitude, longitude)) return (location.raw['address']['suburb'], location.raw['address']['state']) except: return ("NA", "NA") from sqlalchemy.inspection import inspect class ModelSerializer(object): def serialize(self): return {c: getattr(self, c) for c in inspect(self).attrs.keys()} @staticmethod def serialize_list(l): return [m.serialize() for m in l] def to_json(inst, cls):
def setUpClass(cls): cls.geocoder = OpenMapQuest(api_key='my_api_key', scheme='http', timeout=3) cls.delta = 0.04