def get_location(address): logger.info(f"Determining location of {address}") geolocator = GeoNames(username=CONFIG["geonames_username"]) location = geolocator.geocode(address) if location is None: logger.error(f"Unable to find address \"{address}\"") return location
def locate(request, country, city): """ Locates a city input by the user. """ dajax = Dajax() g = GeoNames(None, "veitheller") place, (lat, lon) = g.geocode(str(city) + ", " + str(country)) dajax.add_data([lat, lon], 'addLocationMarker') return dajax.json()
def __init__(self, filename, geo_identity=None): """ Initializes the class. Keyword arguments: filename -- string representation of the file containing the geodata geo_identity -- string representation of the identity in geonames """ pygeoip.GeoIP.__init__(self, filename) self._setup_segments() if geo_identity: self.gnames = GeoNames(None, geo_identity)
def geopy_geolocate_by_address_using_geonames(query): from geopy.geocoders import GeoNames print 'Search by Geonames location, query: ', query geolocator = GeoNames(username=GEONAMES_USER_NAME) location = geolocator.geocode(query) if location != None: print 'Search by Geonames location, address: ', location.address print 'Search by Geonames location, coordinates:', location.latitude, location.longitude print 'Search by Geonames location, location raw:', location.raw else: print 'Search by Geonames location, location for query:', query, 'could not be found.' return location
def geopy_get_address_by_coordinates_using_geonames(coordinates): from geopy.geocoders import GeoNames print 'Search by Geonames coordinates, query: ', coordinates geolocator = GeoNames(username=GEONAMES_USER_NAME) address = geolocator.reverse(coordinates) if address != None: print 'Search by Geonames coordinates, address: ', address[0].address print 'Search by Geonames coordinates, coordinates:', address[0].latitude, address[0].longitude print 'Search by Geonames coordinates, location raw:', address[0].raw else: print 'Search by Geonames coordinates, address for coordinates:', coordinates, 'could not be found.' return address
def __init__(self, paese): self.paese = paese self.historical_table = "c:/data/tools/sparc/input_data/historical_data/floods - refine.csv" self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1) self.outDriver = ogr.GetDriverByName("ESRI Shapefile") self.countries_shp_location = os.getcwd() + '/input_data/countries' self.outShp = os.getcwd( ) + "/input_data/geocoded/shp/" + self.paese + ".shp" self.events_location = os.getcwd() + '/input_data/geocoded/shp/' self.risk_map_location = os.getcwd() + '/input_data/geocoded/risk_map/'
def extractLocation(self, location): geolocator = GeoNames(username='******') geolocation = geolocator.geocode(location, timeout=10000) geopoint = {} if geolocation == None: return None; else: geopoint['latitude'] = geolocation.latitude geopoint['longitude'] = geolocation.longitude return geopoint
def _compute_loc_sunset(input_str): geolocator = Nominatim(user_agent='sunset_app', timeout=3) geoloc = geolocator.geocode(input_str) lat, lon = geoloc.latitude, geoloc.longitude loc = api.Topos('{0} N'.format(lat), '{0} E'.format(lon)) t0 = ts.utc(2020, 7, 1) t1 = ts.utc(2021, 7, 1) t, y = almanac.find_discrete(t0, t1, almanac.sunrise_sunset(e, loc)) df = pd.DataFrame({'datetime': t.utc_iso(), 'sun_down': y}) df['datetime'] = pd.to_datetime(df['datetime']) tz = GeoNames(username='******').reverse_timezone( (geoloc.latitude, geoloc.longitude)) try: df['datetime'] = df['datetime'].dt.tz_localize('utc').dt.tz_convert( tz.pytz_timezone) except TypeError: df['datetime'] = df['datetime'].dt.tz_convert(tz.pytz_timezone) df['date'] = df['datetime'].dt.date df['time'] = df['datetime'].dt.time df['hour'] = (df['time'].astype(str).str.split( ':', expand=True).astype(int).apply( lambda row: row[0] + row[1] / 60. + row[2] / 3600., axis=1)) df['hour_24'] = 240 df['daylight'] = np.abs(df['hour'].diff().shift(-1)) return df, geoloc
def __init__(self, paese): self.paese = paese self.historical_table = "C:/sparc/input_data/historical_data/floods - refine.csv" self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1)
def load(request): """ Loads locations. """ dajax = Dajax() apis = Api.objects.filter(user=User.objects.get(username=request.user)) api_objects = [] if not apis: error = "'Could not load data. No API available.'" dajax.script("toastr.warning(" + error + ", 'API warning')") dajax.script("markers = []") return dajax.json() plugindir = os.listdir(preferences.BASE_DIR + "/datavis/plugins") for api in apis: if not api.api + ".py" in plugindir: error = "'Could not load API " + api.api + ". No such API.'" dajax.script("toastr.error(" + error + ", 'API error')") credentials = ApiKey.objects.filter(identification=api) if api.needs_credentials and not credentials: error = "'Could not load API " + api.api + ". No credentials.'" dajax.script("toastr.error(" + error + ", 'API error')") continue impobj = getattr(__import__("datavis.plugins." + api.api, fromlist=[api.api]), api.api) if credentials: api_objects.append(APIInterface(api.api, impobj, credentials[0].authentication)) else: api_objects.append(APIInterface(api.api, impobj)) script = "markers = [" g = GeoNames(None, "veitheller") for api in api_objects: for entry in api.locations: entry_name = api.locations[entry][0] + ", " + entry try: place, (lat, lon) = g.geocode(entry_name) except (TypeError, exc.GeopyError): continue script += str([lat, lon, entry_name]) + "," script = script[:-1] + script[-1:].replace(",", "]") dajax.script(script) return dajax.json()
def __init__(self,paese): self.paese = paese self.historical_table = "c:/data/tools/sparc/input_data/historical_data/floods - refine.csv" self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1) self.outDriver = ogr.GetDriverByName("ESRI Shapefile") self.countries_shp_location = os.getcwd() + '/input_data/countries' self.outShp = os.getcwd() + "/input_data/geocoded/shp/" + self.paese + ".shp" self.events_location = os.getcwd() + '/input_data/geocoded/shp/' self.risk_map_location = os.getcwd() + '/input_data/geocoded/risk_map/'
def test_unicode_name(self): """ GeoNames.geocode unicode """ # work around ConfigurationError raised in GeoNames init self.geocoder = GeoNames(username=env['GEONAMES_USERNAME']) self.geocode_run( {"query": u"\u6545\u5bab"}, {"latitude": 30.90097, "longitude": 118.49436}, )
def test_unicode_name(self): """ GeoNames.geocode unicode """ # work around ConfigurationError raised in GeoNames init self.geocoder = GeoNames(username=env['GEONAMES_USERNAME']) self.geocode_run( {"query": "Mount Everest, Nepal"}, {"latitude": 27.987, "longitude": 86.925}, )
def test_reverse(self): """ GeoNames.reverse """ # work around ConfigurationError raised in GeoNames init self.geocoder = GeoNames(username=env['GEONAMES_USERNAME']) self.reverse_run( {"query": "40.75376406311989, -73.98489005863667"}, {"latitude": 40.75376406311989, "longitude": -73.98489005863667}, )
def load(request): """ Loads locations. """ dajax = Dajax() apis = Api.objects.filter(user=User.objects.get(username=request.user)) api_objects = [] if not apis: error = "'Could not load data. No API available.'" dajax.script("toastr.warning(" + error + ", 'API warning')") dajax.script("markers = []") return dajax.json() plugindir = os.listdir(preferences.BASE_DIR + "/datavis/plugins") for api in apis: if not api.api + ".py" in plugindir: error = "'Could not load API " + api.api + ". No such API.'" dajax.script("toastr.error(" + error + ", 'API error')") credentials = ApiKey.objects.filter(identification=api) if api.needs_credentials and not credentials: error = "'Could not load API " + api.api + ". No credentials.'" dajax.script("toastr.error(" + error + ", 'API error')") continue impobj = getattr( __import__("datavis.plugins." + api.api, fromlist=[api.api]), api.api) if credentials: api_objects.append( APIInterface(api.api, impobj, credentials[0].authentication)) else: api_objects.append(APIInterface(api.api, impobj)) script = "markers = [" g = GeoNames(None, "veitheller") for api in api_objects: for entry in api.locations: entry_name = api.locations[entry][0] + ", " + entry try: place, (lat, lon) = g.geocode(entry_name) except (TypeError, exc.GeopyError): continue script += str([lat, lon, entry_name]) + "," script = script[:-1] + script[-1:].replace(",", "]") dajax.script(script) return dajax.json()
def findLocations(user_name, locs, file_name_s): #TODO: delete before committing geolocator = GeoNames(username=user_name) g = geocoders.GeoNames(username=user_name) csv_writer = csv.writer(open(file_name_s, 'wb')) csv_writer.writerow(['LOCATION', 'LAT', 'LON']) for loc in locs: loc = str(loc) coord = coordFromLocName(loc, geolocator, g) csv_writer.writerow([loc, coord[0], coord[1]])
def findTimezones(user_name, file_name, file_name_s): geolocator = GeoNames(username=user_name) g = geocoders.GeoNames(username=user_name) location_index = 0 lat_index = 1 lon_index = 2 res = [] data = [] HOUR = 60 * (60 + 4) utc = pytz.utc utc.zone dat = csv.reader(open(file_name)) w = tzwhere.tzwhere() i = 0 for row in dat: if i > 0: data.append([row[location_index], row[lat_index], row[lon_index]]) i = i + 1 csv_writer = csv.writer(open(file_name_s, 'wb')) #print "number of rows: ", len(data) csv_writer.writerow(HEADER2) for row in data: if (row[lat_index] <> '0' and row[lon_index] <> '0'): lat = float(row[lat_index]) lon = float(row[lon_index]) timezone = w.tzNameAt(lat, lon) print lat print lon print timezone try: country_info = reverceGeoCode([row[lat_index], row[lon_index]], g, geolocator, user_name) except GeocoderServiceError: print "hourly limit has been exceeded, time to wait for an hour..." time.sleep(HOUR) print "starting again..." country_info = reverceGeoCode([row[lat_index], row[lon_index]], g, geolocator, user_name) try: time_diff = timeDifference(utc, timezone) except AttributeError: time_diff = 0 print timezone temp = [ row[location_index], row[lat_index], row[lon_index], timezone, time_diff, country_info[2], country_info[3], country_info[4] ] else: temp = row + [0, 0, 0, 0, 0] res.append(temp) try: csv_writer.writerow(temp) except UnicodeEncodeError: csv_writer.writerow(row + [0, 0, 0, 0, 0]) return res
def __init__(self, area, schema, tabella_pesi, tabella_pop_stat, tabella_cicloni): self.dati_per_plot = {} self.dati_per_prob = {} self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1) self.outDriver = ogr.GetDriverByName("ESRI Shapefile") self.outShp = "classes/geocodifica/shp/" + self.paese + ".shp" self.area = area self.schema = schema self.tabella_pesi = tabella_pesi self.tabella_pop_stat = tabella_pop_stat self.tabella_cicloni = tabella_cicloni try: self.conn = psycopg2.connect("dbname=sparc user=postgres") except Exception as e: print e.message self.cur = self.conn.cursor()
def __init__(self): self.geolocator = GeoNames(username=self.GEO_USER_NAME) self.geomappings = {} self.geomappings_blacklist = [] if os.path.isfile(self.PICKLED_GEO): with open(self.PICKLED_GEO, 'rb') as f: self.geomappings = pickle.load(f) if os.path.isfile(self.PICKLED_GEO_BLACKLIST): with open(self.PICKLED_GEO_BLACKLIST, 'rb') as f: self.geomappings_blacklist = pickle.load(f) log_info("known geomappings size: %s " % len(self.geomappings)) log_info("known geo blacklist size: %s " % len(self.geomappings_blacklist))
def extract_coord_list(listemon): """ Extraction des coordonnées des monuments recherchés. arg : liste des monuments choisis Retourne : liste de tuples (monument, latitude, longitude) """ geolocatorOSM = Nominatim() #Open Street Maps geolocatorGN = GeoNames(username="******") prob = ["Hôtel de Ville", "Pont Neuf", "Place de la Concorde" ] #obtention de coordonnees erronnées avec Open Street Maps mon_coord = [] for monument in listemon: if monument not in prob: location = geolocatorOSM.geocode( monument) #coordonnées avec Open Street Maps mon_coord.append((monument, location.latitude, location.longitude)) else: location = geolocatorGN.geocode( monument) #coordonnées avec GeoNames mon_coord.append((monument, location.latitude, location.longitude)) return mon_coord
def geocoding_cities(): geolocator = GeoNames(username='******') # output file fw = codecs.open('../city_coords_AllCategories.csv','w','utf-8') #read from the city list with codecs.open('../city_poi_by_type/AllCategory/city_list.csv','r','utf-8') as fr: for line in fr: this_city_name = line.strip() splitted = this_city_name.split(",") this_city_name_new = splitted[0].title()+", "+splitted[1].upper() location = geolocator.geocode(this_city_name_new) if location != None: #print(location.raw) #location = json.dumps(location.raw, encoding='utf-8') print(this_city_name +": "+ str(location.latitude) +", "+ str(location.longitude)) fw.write(this_city_name+"|"+str(location.latitude)+"|"+str(location.longitude)+"\n") else: print(this_city_name+' not geocoded') fw.write(this_city_name+"||\n") fw.close()
def PlaceToMap(dataset): """Build dataframe with place and the list of documents which are located there""" #-----------------------Group by location-------------------- location = list(dataset["Place"]) temp = [] for loc in location: temp += loc location = list(set(temp)) length = len(location) # data={"Place":[0]*length,"Documents":[0]*length} data = {"Place": [], "Documents": []} for m in range(length): temp = [] event = location[m] locs = event for j, i in zip(dataset["Name"], dataset["Place"]): if locs in i: temp.append(j) if not (locs in data["Place"]): data['Place'].append(locs) temp = list(set(temp)) data["Documents"].append(temp) dataset = pd.DataFrame(data) #--------------------Beginning for locatalization-------------- geolocator = OpenMapQuest(api_key='kNFyXsWRe50Q85tXM8szsWN0A3SS3X0T', timeout=100) #geolocator=Here("Af9fc3JTNkg1N4IwwVEz","3_R3z-sJU6D1BEFE9HWy7Q") # geolocator=GeoNames(username="******") length = dataset.shape[0] data = { "Place": dataset["Place"], "Documents": dataset["Documents"], "Coordinate": [0] * length, "Count": [0] * length } for i in range(length): place = dataset["Place"][i] try: data["Coordinate"][i] = [ place, find_map_coordinates(place, geolocator) ] except GeocoderQuotaExceeded: continue except AttributeError: geolocator1 = GeoNames(username="******", timeout=100) # print(find_map_coordinates(place,geolocator1)) data["Coordinate"][i] = [ place, find_map_coordinates(place, geolocator1) ] data["Count"][i] = len(data["Documents"][i]) return pd.DataFrame(data)
def PlaceToMap(dataset): """Build dataframe with place and the list of documents which are located there""" #-----------------------Group by location-------------------- location=["paris rive gauche","paris saint lazare","paris saint-lazare","paris st lazare","paris gare du nord", "paris gare de l’est","paris gare de lyon","paris sud est","paris austerlitz","paca", "paris montparnasse"] loctaxo=locationTaxonomyNew() temploc=[] keys=loctaxo.keys() #Change abbreviation to real name of the place for code in keys: code=code.lower() if not(code in ["marseille","st charles","lille","flandres"]): temploc.append(code) location1=temploc+list(loctaxo.values()) location=list(set(location+location1)) length=len(location) # data={"Place":[0]*length,"Documents":[0]*length} data={"Place":[],"Documents":[]} for m in range(length): temp=[] event=location[m] if event in keys: locs=loctaxo[event] #change the abbreviation to the real place else: locs=event #Keep the real name of the place for j,i in zip(dataset["Name"],dataset["Place"]): if locs in i: temp.append(j) if not(locs in data["Place"]): data['Place'].append(locs) data["Documents"].append(temp) dataset=pd.DataFrame(data) #--------------------Beginning for locatalization-------------- geolocator = OpenMapQuest(api_key='kNFyXsWRe50Q85tXM8szsWN0A3SS3X0T') # geolocator=GeoNames(username="******") length=dataset.shape[0] data={"Place":dataset["Place"],"Documents":dataset["Documents"],"Coordinate":[0]*length,"Count":[0]*length} for i in range(length): place=dataset["Place"][i] try: data["Coordinate"][i]=[place,find_map_coordinates(place,geolocator)] except GeocoderQuotaExceeded: continue except AttributeError: geolocator1=GeoNames(username="******") # print(find_map_coordinates(place,geolocator1)) data["Coordinate"][i]=[place,find_map_coordinates(place,geolocator1)] data["Count"][i]=len(data["Documents"][i]) return pd.DataFrame(data)
def geoResetter(GeoNamesAccounts, holder): #print("geoResetter Loop") #locational information printed in console to know where the program is getting caught try: choice = random.choice(GeoNamesAccounts) except: #GeoNamesAccounts is empty all accounts are exhausted print( "All accounts exhausted at this time, putting system into sleep mode for 30 minutes" ) GeoNamesAccounts = holder + GeoNamesAccounts choice = random.choice(GeoNamesAccounts) time.sleep(1800) #sleep for 25 minutes since the list is exhuasted print("System sleep: Over") GeoNamesAccounts.remove(choice) geolocator = GeoNames(username=choice) return geolocator #return new account
def __init__(self, paese, hazard): self.paese = paese self.hazard = hazard self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1) self.totali = 0 self.successo = 0 self.insuccesso = 0 self.poligono_controllo = [] self.n = 0
async def get_loc_timezone(lat: float, lon: float) -> int: """ :param lat: Latitude. min/max: -90 to +90 :param lon: Longitude. min/max: -180 to +180 :return: time zone offset in hours """ async with GeoNames(GEONAMES_USERNAME, adapter_factory=AioHTTPAdapter) as locator: try: timezone = await locator.reverse_timezone((lat, lon)) except Exception as e: print(e) return False finally: return timezone.raw['rawOffset']
def collectGeocoders(): config = configparser.ConfigParser() conf = r'..\conf\config.ini' config.read(conf) keys = { 'Here_app_id': config['DEFAULT']['Here_app_id'], 'Here_app_code': config['DEFAULT']['Here_app_code'], 'TomTom': config['DEFAULT']['TomTom_api_key'], 'OpenMapQuest': config['DEFAULT']['OpenMapQuest_api_key'], 'GoogleV3': config['DEFAULT']['GoogleV3_api_key'] } locators = [{ 'locator': Nominatim(user_agent="afan"), 'name': 'Nominatim', 'type': 'Geopy' }, { 'locator': GeoNames(username="******"), 'name': 'GeoNames', 'type': 'Geopy' }, { 'locator': Here(app_id=keys['Here_app_id'], app_code=keys['Here_app_code']), 'name': 'Here', 'type': 'Geopy' }, { 'locator': TomTom(api_key=keys['TomTom']), 'name': 'TomTom', 'type': 'Geopy' }, { 'locator': OpenMapQuest(api_key=keys['OpenMapQuest']), 'name': 'OpenMapQuest', 'type': 'Geopy' }, { 'locator': Photon(), 'name': 'Photon', 'type': 'Geopy' }] #locators.append({'locator':GoogleV3(api_key=keys['GoogleV3']),'name':'GoogleV3','type':'Geopy'}) locators.append({ 'locator': revGeocodingbyIQ, 'name': 'revGeocodingbyIQ', 'type': 'Custom' }) return locators
def __init__(self, area,schema,tabella_pesi,tabella_pop_stat,tabella_cicloni): self.dati_per_plot = {} self.dati_per_prob = {} self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1) self.outDriver = ogr.GetDriverByName("ESRI Shapefile") self.outShp = "classes/geocodifica/shp/" + self.paese + ".shp" self.area = area self.schema = schema self.tabella_pesi = tabella_pesi self.tabella_pop_stat = tabella_pop_stat self.tabella_cicloni = tabella_cicloni try: self.conn = psycopg2.connect("dbname=sparc user=postgres") except Exception as e: print e.message self.cur = self.conn.cursor()
def writeLocTZ(user_name, locs, file_name): # coordinates of user's location are saved csv_writer = csv.writer(open(file_name, 'a')) HEADER = ['Unique_Locations', 'LAT', 'LON'] csv_writer.writerow(HEADER) #geolocator = Nominatim() #g = geocoders.GoogleV3() geolocator = GeoNames(username=user_name) g = geocoders.GeoNames(username=user_name) rows = [] #new data #print locs for loc in locs: #if the data was not filled if loc[0] != '': coord = coordFromLocName(loc[0], geolocator, g) #timezone = Timezone(coord, time_diff) res = [loc[0], coord[0], coord[1]] csv_writer.writerow(res)
class GeoFinder(object): PICKLED_GEO = "geomappings.pkl" PICKLED_GEO_BLACKLIST = "geomappings_blacklist.pkl" GEO_USER_NAME = "arssher" def __init__(self): self.geolocator = GeoNames(username=self.GEO_USER_NAME) self.geomappings = {} self.geomappings_blacklist = [] if os.path.isfile(self.PICKLED_GEO): with open(self.PICKLED_GEO, 'rb') as f: self.geomappings = pickle.load(f) if os.path.isfile(self.PICKLED_GEO_BLACKLIST): with open(self.PICKLED_GEO_BLACKLIST, 'rb') as f: self.geomappings_blacklist = pickle.load(f) log_info("known geomappings size: %s " % len(self.geomappings)) log_info("known geo blacklist size: %s " % len(self.geomappings_blacklist)) def __del__(self): with open(self.PICKLED_GEO, 'wb') as f: pickle.dump(self.geomappings, f, pickle.HIGHEST_PROTOCOL) with open(self.PICKLED_GEO_BLACKLIST, 'wb') as f: pickle.dump(self.geomappings_blacklist, f, pickle.HIGHEST_PROTOCOL) def get_location(self, location_string): if location_string in self.geomappings: return self.geomappings[location_string] elif location_string in self.geomappings_blacklist: return (0, 0, "", 0) else: location = self.geolocator.geocode(location_string, exactly_one=True, timeout=60) if location and u'countryCode' in location.raw: cc_alphabet = location.raw[u'countryCode'].encode('utf_8') cc_numeric = int(countries.get(cc_alphabet).numeric) res = (location.latitude, location.longitude, location.raw[u'countryName'].encode('utf_8'), cc_numeric) self.geomappings[location_string] = res if len(self.geomappings) % 200 == 0: log_info("Geomappings size now %s" % len(self.geomappings)) return res else: self.geomappings_blacklist.append(location_string) log_warn("Failed to get location for string %s" % location_string.encode('utf_8')) return (0, 0, "", 0)
def extract_coord_dico(dicomon): """ Extraction des coordonnées des monuments recherchés. arg : dictionnaire des monuments choisis et les étiquette de l'espace Retourne : dictionnaire de tuples etiquette d'espace: [(monument, latitude, longitude),...] """ # Le même méthode d'extraction que la fonction extract_coord_list(listmon) geolocatorOSM = Nominatim() geolocatorGN = GeoNames(username="******") prob = ["Hôtel de Ville", "Pont Neuf", "Place de la Concorde"] dico_coord = {} for etiquette in dicomon: mon_coord = [] for monument in dicomon[etiquette]: if monument not in prob: location = geolocatorOSM.geocode(monument) if location: mon_coord.append( (monument, location.latitude, location.longitude)) else: print(monument) dico_coord[etiquette] = mon_coord return dico_coord
def main(): geolocator = GeoNames(username='******') print('start') print('done') testmap = folium.Map(location=[49,24], zoom_start=5) fg_pp = folium.FeatureGroup(name='Населення країн') fg_pp.add_child(folium.GeoJson(data=open('world.json', 'r', encoding='utf-8-sig').read(), style_function=lambda x: {'fillColor': 'red' if x['properties']['POP2005']<10000000 else '#404040' if x['properties']['POP2005']<50000000 else 'red' if x['properties']['POP2005']<100000000 else 'green' if x['properties']['POP2005']<500000000 else 'yellow'})) i = 0 for i in range(50): c = i*i*i testmap.add_child(folium.CircleMarker(location=[i,i], popup=str(c)+get_color(c), radius=10, fill_color=get_color(c), fill_opacity=1)) testmap.add_child(fg_pp) testmap.save('Map1.html')
def create_city(user_input, country = ""): geopy.geocoders.options.default_timeout = 7 # Nominatim geolocator = Nominatim() location = geolocator.geocode(user_input) popularity = location.raw['importance'] # GeoNames geolocator = GeoNames(username='******') location = geolocator.geocode(user_input) city_name = location.raw['toponymName'] country = location.raw['countryCode'] latitude = location.latitude longitude = location.longitude population = location.raw['population'] user_city = City(city_name, country, latitude, longitude, population, popularity) return(user_city)
def geocode(string): geolocator = GeoNames(username='******') return geolocator.geocode(string)
def geoLocate(list_of_places, list_of_locations): #Using Geopy for geolocations NOTE this works GeoNamesAccounts = [ "semantic_1", "semantic_2", "semantic_3", "semantic_4", "semantic_5", "semantic_6" ] latin = '^[ -~]+$' holder = [] holder = holder + GeoNamesAccounts geolocations = [] choice = random.choice(GeoNamesAccounts) GeoNamesAccounts.remove(choice) geolocator = GeoNames(username=choice) #print("After geoLocate Settings") #locational information printed in console to know where the program is getting caught #removing duplicates to be sure it should already be distinct places = list(set(list_of_places)) #print(len(places)) # i = 0 for place in places: if (len(place) == 1) and (place.lower() == "island" or place.lower() == "islands"): pass # i += 1 geo = None #print("new place loop") #locational information printed in console to know where the program is getting caught # if counter >= 1500: #Code for when we didn't have multiple locations from the config file # try: # choice = random.choice(GeoNamesAccounts) # except: # GeoNamesAccounts = holder + GeoNamesAccounts # choice = random.choice(GeoNamesAccounts) # GeoNamesAccounts.remove(choice) # geolocator = GeoNames(username=choice) # counter = 1 while geo == None: try: #print("in geolocator loop") geo = geolocator.geocode(place[0], timeout=20) if geo != None: if re.search(latin, geo.address): #is it an latin letter location if 3 > len(geo.address): geolocations.append(geo) break for location in list_of_locations: #print(geo.address) #print(location+"\n") split_loc = geo.address.split( " ") #len is for country names or state names if location in geo.address or 0 < len( split_loc) < 3: geolocations.append(geo) break elif not location in geo.address: pass break while True: #continue till all locations are exhausted #print("going through location loop") #locational information printed in console to know where the program is getting caught if not "Montana" in list_of_locations and not "Washington" in list_of_locations: break #NOTE this is to avoid increasing the time complexity too much, since he's a Montana authour most of his work will feature town names in Montana and Washington new_place = place[0] + " " + list_of_locations[0] #print(new_place) #print(place[0]) #print(list_of_locations[0]) try: geo = geolocator.geocode(new_place, timeout=20) #print(geo) break except Exception as e: #print(type(e)) if "limit" in e: geoResetter( GeoNamesAccounts, holder ) #need to switch account because lookup limit has been reached continue if geo != None: for location in list_of_locations: #print(geo.address) #print(location+"\n") if location in geo.address: geolocations.append(geo) break elif not location in geo.address: pass break except Exception as e: print(e) geoResetter( GeoNamesAccounts, holder ) #need to switch account because lookup limit has been reached continue # Used to error check looping to make sure the same amount of iterations were happening as there were places # print(len(places)) # print(str(i)+" interations") geoplaces = [] #print(geolocations) for geoloc in geolocations: geoplaces.append(geoloc.address + " (" + str(geoloc.latitude) + "," + str(geoloc.longitude) + ")") geolocations = KeywordCounter(geoplaces) return geolocations
# -*- coding: utf-8 -*- """ Created on Mon Jun 17 09:27:10 2019 @author: LokalAdm """ from geopy.geocoders import GeoNames import unicodedata import codecs file = codecs.open("D:/sopron.txt", encoding='utf-8') geolocator = GeoNames(username='******') #location = geolocator.geocode("Czenk (Kis-)") #print((location.latitude, location.longitude)) sopron = [] for line in file: l = line.rstrip().split('\t') lp = [] for i in range(len(l)): k = l[i] if i: lp.append( float( unicodedata.normalize('NFKD', k).encode('ascii', 'ignore'))) else: i = str(unicodedata.normalize('NFKD', k).encode('ascii', 'ignore')) lp.append(i[2:(len(i) - 1)]) sopron.append(lp) print(sopron) latlong = []
class UtilitieSparc(ProjectFlood): campo_nome_paese = "ADM0_NAME" campo_iso_paese = "ADM0_CODE" campo_nome_admin = "ADM2_NAME" campo_iso_admin = "ADM2_CODE" nome_paese = "" cod_paese = "" def __init__(self, area,schema,tabella_pesi,tabella_pop_stat,tabella_cicloni): self.dati_per_plot = {} self.dati_per_prob = {} self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1) self.outDriver = ogr.GetDriverByName("ESRI Shapefile") self.outShp = "classes/geocodifica/shp/" + self.paese + ".shp" self.area = area self.schema = schema self.tabella_pesi = tabella_pesi self.tabella_pop_stat = tabella_pop_stat self.tabella_cicloni = tabella_cicloni try: self.conn = psycopg2.connect("dbname=sparc user=postgres") except Exception as e: print e.message self.cur = self.conn.cursor() def lista_admin0(self): numFeatures = self.layer.GetFeatureCount() lista_stati = [] for featureNum in range(numFeatures): feature = self.layer.GetFeature(featureNum) nome_paese = feature.GetField(self.campo_nome_paese) lista_stati.append(nome_paese) seen = set() seen_add = seen.add lista_pulita = [x for x in lista_stati if not (x in seen or seen_add(x))] lista_admin0 = sorted(lista_pulita) return tuple(lista_admin0) def lista_admin2(self, country): country_capitalized = country.capitalize() self.layer.SetAttributeFilter(self.campo_nome_paese + " = '" + country_capitalized + "'") listone={} lista_iso = [] lista_clean = [] lista_admin2 = [] for feature in self.layer: cod_admin = feature.GetField(self.campo_iso_admin) nome_zozzo = feature.GetField(self.campo_nome_admin) unicode_zozzo = nome_zozzo.decode('utf-8') nome_per_combo = unicodedata.normalize('NFKD', unicode_zozzo) no_dash = re.sub('-', '_', nome_zozzo) no_space = re.sub(' ', '', no_dash) no_slash = re.sub('/', '_', no_space) no_apice = re.sub('\'', '', no_slash) no_bad_char = re.sub(r'-/\([^)]*\)', '', no_apice) unicode_pulito = no_bad_char.decode('utf-8') nome_pulito = unicodedata.normalize('NFKD', unicode_pulito).encode('ascii', 'ignore') lista_iso.append(cod_admin) lista_clean.append(nome_pulito) lista_admin2.append(nome_per_combo) for i in range(len(lista_iso)): listone[lista_iso[i]] = {'name_orig': lista_admin2[i],'name_clean':lista_clean[i]} return lista_admin2, listone def creazione_struttura(self, admin_global): # Check in data structures exists and in case not create the directory named # after the country and all the directories UtilitieSparc.proj_dir os.chdir(UtilitieSparc.proj_dir) country_low = str(self.paese).lower() if os.path.exists(country_low): os.chdir(UtilitieSparc.proj_dir + country_low) admin_low = str(self.admin).lower() if os.path.exists(admin_low): pass else: os.mkdir(admin_low) os.chdir(UtilitieSparc.proj_dir + country_low + "/" + admin_global + "/") os.mkdir("out") else: os.chdir(UtilitieSparc.proj_dir) os.mkdir(country_low) os.chdir(UtilitieSparc.proj_dir + country_low) admin_low = str(self.admin).lower() if os.path.exists(admin_low): pass else: os.mkdir(admin_low) os.chdir(UtilitieSparc.proj_dir + country_low + "/" + admin_global + "/") os.mkdir("out") return "Project created......\n" def create_template(self,dir_template): os.chdir(dir_template) tabella = dbf.Table('monthly_values', 'month C(10);mean N(5,1)') return tabella.filename def geolocate_accidents(self): accidents = {} with open(self.historical_table, 'rb') as csvfile: luoghi_splittati = csv.reader(csvfile, delimiter=",", quotechar='"') for row in luoghi_splittati: if row[2] == self.paese: id_incidente = str(row[9]) accidents[id_incidente] = {} accidents[id_incidente]['paese'] = str(row[2]) accidents[id_incidente]['killed'] = str(row[6]) accidents[id_incidente]['affected'] = str(row[7]) accidents[id_incidente]['locations'] = {} gruppo = str(row[3]).split(",") quante_locations = len(gruppo) for i in range(0, quante_locations): accidents[id_incidente]['locations'][i] = gruppo[i].strip() totali = 0 successo = 0 insuccesso = 0 geocoding_success_file = "classes/geocodifica/text/" + self.paese + ".txt" geocoding_fail_file = "classes/geocodifica/text/" + self.paese + "_fail.txt" # Control if accidents have been geocoded already if os.path.exists(geocoding_success_file): print "Geocoded already!!" pass else: geocoding_success = open(geocoding_success_file, "wb+") geocoding_fail = open(geocoding_fail_file, "wb+") geocoding_success.write("id,lat,lon\n") geocoding_fail.write("id,lat,lon\n") try: for incidente in accidents.iteritems(): for location_non_geocoded in incidente[1]['locations'].iteritems(): totali += 1 posto_attivo = location_non_geocoded[1] if posto_attivo != 'NoData': try: print("Geocoding " + posto_attivo) #location_geocoded = self.geolocator.geocode(posto_attivo, timeout=30) location_geocoded = self.geolocator_geonames.geocode(posto_attivo,timeout=30) if location_geocoded: scrittura = posto_attivo + "," + str(location_geocoded.longitude) + "," + str(location_geocoded.latitude) + "\n" geocoding_success.write(scrittura) successo += 1 else: geocoding_fail.write(posto_attivo + "," + str(0) + "," + str(0) + "\n") insuccesso += 1 except ValueError as e: print e.message print "Total of %s events with %s successful %s unsuccessful and %d NULL" % ( str(totali), str(successo), str(insuccesso), (totali - successo - insuccesso)) perc = float(successo) / float(totali) * 100.0 print "Percentage %.2f of success" % perc except: print "No response from geocoding server" pass def create_validated_coords(self): def calc_poligono_controllo(): poligono = sf.bbox global poligono_controllo poligono_controllo = ((poligono[2],poligono[1]), (poligono[2],poligono[3]), (poligono[0],poligono[3]), (poligono[0],poligono[1])) global n n = len(poligono_controllo) def punti_dentro_poligono_di_controllo(x,y): inside = False p1x, p1y = poligono_controllo[0] for i in range(n + 1): p2x, p2y = poligono_controllo[i % n] if y > min(p1y, p2y): if y <= max(p1y, p2y): if x <= max(p1x, p2x): if p1y != p2y: xinters = (y - p1y) * (p2x - p1x) / (p2y - p1y) + p1x if p1x == p2x or x <= xinters: inside = not inside p1x, p1y = p2x, p2y return inside def extract_country_shp(): # Get the input Layer inShapefile = "C:/data/input_data/gaul_2014_2008_2/gaul_wfp.shp" inDriver = ogr.GetDriverByName("ESRI Shapefile") inDataSource = inDriver.Open(inShapefile, 0) inLayer = inDataSource.GetLayer() inLayer.SetAttributeFilter("ADM0_NAME = '" + self.paese + "'") # Create the output LayerS outShapefile = "C:/data/input_data/countries/" + self.paese + ".shp" outDriver = ogr.GetDriverByName("ESRI Shapefile") # Remove output shapefile if it already exists if os.path.exists(outShapefile): outDriver.DeleteDataSource(outShapefile) # Create the output shapefile outDataSource = outDriver.CreateDataSource(outShapefile) out_lyr_name = os.path.splitext(os.path.split(outShapefile)[1])[0] outLayer = outDataSource.CreateLayer(out_lyr_name, geom_type=ogr.wkbMultiPolygon) # Add input Layer Fields to the output Layer if it is the one we want inLayerDefn = inLayer.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) # Get the output Layer's Feature Definition outLayerDefn = outLayer.GetLayerDefn() # Add features to the ouput Layer for inFeature in inLayer: # Create output Feature outFeature = ogr.Feature(outLayerDefn) # Add field values from input Layer for i in range(0, outLayerDefn.GetFieldCount()): fieldDefn = outLayerDefn.GetFieldDefn(i) outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) # Set geometry as centroid geom = inFeature.GetGeometryRef() outFeature.SetGeometry(geom.Clone()) # Add new feature to output Layer outLayer.CreateFeature(outFeature) # Close DataSources inDataSource.Destroy() outDataSource.Destroy() dentro = 0 fuori = 0 coords_check_file_in = "classes/geocodifica/text/" + self.paese + ".txt" coords_validated_file_out = str('classes/geocodifica/csv/' + str(self.paese) + '.csv') if os.path.exists("C:/data/input_data/countries/" + self.paese + ".shp"): sf = shapefile.Reader("C:/data/input_data/countries/" + str(self.paese).lower() + ".shp") calc_poligono_controllo() else: extract_country_shp() sf = shapefile.Reader("C:/data/input_data/countries/" + str(self.paese).lower() + ".shp") calc_poligono_controllo() with open(coords_check_file_in) as csvfile_in: lettore_comma = csv.reader(csvfile_in, delimiter=",", quotechar='"') next(lettore_comma) with open(coords_validated_file_out, 'wb') as csvfile_out: scrittore = csv.writer(csvfile_out, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) intestazioni = "id", "lat", "lon" scrittore.writerow(intestazioni) for row in lettore_comma: if (punti_dentro_poligono_di_controllo(float(row[1]), float(row[2]))): stringa = str(row[0]), str(row[1]), str(row[2]) scrittore.writerow(stringa) dentro += 1 else: fuori += 1 csvfile_out.close() csvfile_in.close() #print "dentro %d" % dentro, "fuori %d" % fuori def creazione_file_shp(self): # Remove output shapefile if it already exists if os.path.exists(self.outShp): self.outDriver.DeleteDataSource(self.outShp) #Set up blank lists for data x, y, nomeloc=[], [], [] #read data from csv file and store in lists with open('classes/geocodifica/csv/'+str(self.paese) + '.csv', 'rb') as csvfile: r = csv.reader(csvfile, delimiter=';') for i, row in enumerate(r): if i > 0: #skip header divisa = row[0].split(",") #print divisa[0] nomeloc.append(divisa[0]) x.append(float(divisa[1])) y.append(float(divisa[2])) #date.append(''.join(row[1].split('-')))#formats the date correctly #target.append(row[2]) #Set up shapefile writer and create empty fields w = shapefile.Writer(shapefile.POINT) w.autoBalance = 1 #ensures gemoetry and attributes match w.field('ID','N') w.field('location','C', 50) # w.field('Date','D') # w.field('Target','C',50) # w.field('ID','N') #loop through the data and write the shapefile for j,k in enumerate(x): w.point(k,y[j]) #write the geometry w.record(k,nomeloc[j]) #write the attributes #Save shapefile w.save(self.outShp) def cancella_tabella(self): comando_delete_table = "DROP TABLE " + self.schema + "." + self.tabella_pesi + " CASCADE;" try: self.cur.execute(comando_delete_table) return "Table deleted" except psycopg2.Error as delErrore: errore_delete_tabella = delErrore.pgerror return errore_delete_tabella def crea_tabella(self): try: comando = "CREATE TABLE " + self.schema + "." + self.tabella_pesi + " (id serial PRIMARY KEY,month integer,weight double precision);" print comando self.cur.execute(comando) #comando = "CREATE TABLE " + self.schema + "." + self.tabella_pesi + " (id serial PRIMARY KEY,month integer,weight double precision);" #self.cur.execute(comando) return "Table created" except psycopg2.Error as createErrore: descrizione_errore = createErrore.pgerror codice_errore = createErrore.pgcode return descrizione_errore, codice_errore #pass def updata_tabella(self): pass # for chiave, valore in val_prec.items(): # inserimento = "INSERT INTO " + self.schema + "." + self.nome_tabella + " (month, weight) VALUES (" + str(chiave) + "," + str(valore) + ");" # self.cur.execute(inserimento) def leggi_tabella(self): conn_locale = psycopg2.connect("dbname=sparc_old user=postgres") cur_locale = conn_locale.cursor() comando_leggi_table = "SELECT ogc_fid FROM " + self.schema + "." + self.tabella_cicloni + ";" try: cur_locale.execute(comando_leggi_table) records = cur_locale.fetchall() return records except psycopg2.Error as delErrore: errore_delete_tabella = delErrore.pgerror return errore_delete_tabella def salva_cambi(self): try: self.cur.close() self.conn.commit() self.conn.close() return "Changes saved" except: return "Problem in saving"
class GeoLocate(pygeoip.GeoIP): """ Geolocation class that inherits from GeoIP. It also has an interface to geopy which all in all seems a bit hacky to me. You decide. """ def __init__(self, filename, geo_identity=None): """ Initializes the class. Keyword arguments: filename -- string representation of the file containing the geodata geo_identity -- string representation of the identity in geonames """ pygeoip.GeoIP.__init__(self, filename) self._setup_segments() if geo_identity: self.gnames = GeoNames(None, geo_identity) def getOwnAddress(self): """Gets own address based on the IP Address of the user.""" self.address = str(self._getIPAddress())[2:-1] (self.country, self.city, self.lat, self.lon) = self.coordsFromAddr(*self._locateAddress()) def _getIPAddress(self): """ Gets own IP address using a web service. Is that too sloppy? Returns: string -- IP address """ return request.urlopen("http://bot.whatismyipaddress.com/").read() def _locateAddress(self): """ Gets the city and country name for a certain location. Returns: Tuple of: string -- city name string -- country name """ return (self.record_by_addr(self.address)['city'], self.record_by_addr(self.address)['country_name']) def coordsFromAddr(self, cityname, countryname): """ Gets the coordinates for a certain city name. Currently problematic with Korea; geonames seems to have issues in that area. Keyword Arguments: cityname -- the name of the city searched for countryname -- the name of the country searched for Returns: List of: countryname -- country name provided cityname -- city name provided lat -- latitude of location (default None) lon -- longitude of location (default None) """ try: place, (lat, lon) = self.gnames.geocode(str(cityname) + ", " + str(countryname)) return [countryname, cityname, lat, lon] except (TypeError, exc.GeopyError): return [countryname, cityname, None, None] def ownCoordsFromAddr(self, cityname, countryname): """ Gets the coordinates for the own city name and makes the place found the own location. Currently problematic with Korea; geonames seems to have issues in that area. Keyword Arguments: cityname -- the name of the city searched for countryname -- the name of the country searched for Returns: Boolean -- Indicates whether the coordinates could be parsed """ self.city = cityname self.country = countryname try: place, (lat, lon) = self.gnames.geocode(str(cityname) + ", " + str(countryname)) self.lat = lat self.lon = lon return True except (TypeError, exc.GeopyError): self.lat = None self.lon = None return False def lookForDup(self, location_list, location, treshold): """ Searches for points to be merged in a list of locations within a certain treshold. Keyword Arguments: location_list -- list of known locations location -- the location to be tested treshold -- the treshold for the test Returns: Tuple of: - Boolean -- Indicates whether the location could be merged - ind -- Index of the location which fits (default None) Known Exceptions: Value Error: Raised when gnames does not know the location or is not set """ if not self.gnames: raise ValueError try: loc_tuple = Point(location[2], location[3]) d = distance.distance ind = 0 for test in location_list: test_tuple = Point(test[2], test[3]) if float(d(loc_tuple, test_tuple).miles) < treshold: return False, ind ind += 1 return True, None except ValueError: return True, None
class GeocodingEmDat(object): def __init__(self,paese): self.paese = paese self.historical_table = "c:/data/tools/sparc/input_data/historical_data/floods - refine.csv" self.geolocator = Nominatim() self.geolocator_geonames = GeoNames(country_bias=self.paese, username='******', timeout=1) self.outDriver = ogr.GetDriverByName("ESRI Shapefile") self.countries_shp_location = os.getcwd() + '/input_data/countries' self.outShp = os.getcwd() + "/input_data/geocoded/shp/" + self.paese + ".shp" self.events_location = os.getcwd() + '/input_data/geocoded/shp/' self.risk_map_location = os.getcwd() + '/input_data/geocoded/risk_map/' def geolocate_accidents(self): accidents = {} with open(self.historical_table, 'rb') as csvfile: luoghi_splittati = csv.reader(csvfile, delimiter=",", quotechar='"') for row in luoghi_splittati: if row[2] == self.paese: id_incidente = str(row[9]) accidents[id_incidente] = {} accidents[id_incidente]['paese'] = str(row[2]) accidents[id_incidente]['killed'] = str(row[6]) accidents[id_incidente]['affected'] = str(row[7]) accidents[id_incidente]['locations'] = {} gruppo = str(row[3]).split(",") quante_locations = len(gruppo) for i in range(0, quante_locations): accidents[id_incidente]['locations'][i] = gruppo[i].strip() totali = 0 successo = 0 insuccesso = 0 geocoding_success_file = "C:/data/tools/sparc/input_data/geocoded/text/" + self.paese + ".txt" geocoding_fail_file = "C:/data/tools/sparc/input_data/geocoded/text/" + self.paese + "_fail.txt" # Control if accidents have been geocoded already if os.path.exists(geocoding_success_file): return "Geocoded already!!" pass else: geocoding_success = open(geocoding_success_file, "wb+") geocoding_fail = open(geocoding_fail_file, "wb+") geocoding_success.write("id,lat,lon\n") geocoding_fail.write("id,lat,lon\n") try: for incidente in accidents.iteritems(): for location_non_geocoded in incidente[1]['locations'].iteritems(): totali += 1 posto_attivo = location_non_geocoded[1] if posto_attivo != 'NoData': try: print("Geocoding " + posto_attivo) #location_geocoded = self.geolocator.geocode(posto_attivo, timeout=30) location_geocoded = self.geolocator_geonames.geocode(posto_attivo,timeout=30) if location_geocoded: scrittura = posto_attivo + "," + str(location_geocoded.longitude) + "," + str(location_geocoded.latitude) + "\n" geocoding_success.write(scrittura) successo += 1 else: geocoding_fail.write(posto_attivo + "," + str(0) + "," + str(0) + "\n") insuccesso += 1 except ValueError as e: print e.message print "Total of %s events with %s successful %s unsuccessful and %d NULL" % ( str(totali), str(successo), str(insuccesso), (totali - successo - insuccesso)) perc = float(successo) / float(totali) * 100.0 print "Percentage %.2f of success" % perc except: print "No response from geocoding server" pass def create_validated_coords(self): def calc_poligono_controllo(): poligono = sf.bbox global poligono_controllo poligono_controllo = ((poligono[2],poligono[1]), (poligono[2],poligono[3]), (poligono[0],poligono[3]), (poligono[0],poligono[1])) global n n = len(poligono_controllo) def punti_dentro_poligono_di_controllo(x,y): inside = False p1x, p1y = poligono_controllo[0] for i in range(n + 1): p2x, p2y = poligono_controllo[i % n] if y > min(p1y, p2y): if y <= max(p1y, p2y): if x <= max(p1x, p2x): if p1y != p2y: xinters = (y - p1y) * (p2x - p1x) / (p2y - p1y) + p1x if p1x == p2x or x <= xinters: inside = not inside p1x, p1y = p2x, p2y return inside def extract_country_shp(): # Get the input Layer inShapefile = "input_data/gaul/gaul_wfp.shp" inDriver = ogr.GetDriverByName("ESRI Shapefile") inDataSource = inDriver.Open(inShapefile, 0) inLayer = inDataSource.GetLayer() inLayer.SetAttributeFilter("ADM0_NAME = '" + self.paese + "'") # Create the output LayerS outShapefile = "input_data/countries/" + self.paese + ".shp" outDriver = ogr.GetDriverByName("ESRI Shapefile") # Remove output shapefile if it already exists if os.path.exists(outShapefile): outDriver.DeleteDataSource(outShapefile) # Create the output shapefile outDataSource = outDriver.CreateDataSource(outShapefile) out_lyr_name = os.path.splitext(os.path.split(outShapefile)[1])[0] outLayer = outDataSource.CreateLayer(out_lyr_name, geom_type=ogr.wkbMultiPolygon) # Add input Layer Fields to the output Layer if it is the one we want inLayerDefn = inLayer.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) # Get the output Layer's Feature Definition outLayerDefn = outLayer.GetLayerDefn() # Add features to the ouput Layer for inFeature in inLayer: # Create output Feature outFeature = ogr.Feature(outLayerDefn) # Add field values from input Layer for i in range(0, outLayerDefn.GetFieldCount()): fieldDefn = outLayerDefn.GetFieldDefn(i) outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) # Set geometry as centroid geom = inFeature.GetGeometryRef() outFeature.SetGeometry(geom.Clone()) # Add new feature to output Layer outLayer.CreateFeature(outFeature) # Close DataSources inDataSource.Destroy() outDataSource.Destroy() dentro = 0 fuori = 0 coords_check_file_in = "input_data/geocoded/text/" + self.paese + ".txt" coords_validated_file_out = str('input_data/geocoded/csv/' + str(self.paese) + '.csv') if os.path.exists("input_data/countries/" + self.paese + ".shp"): sf = shapefile.Reader("input_data/countries/" + str(self.paese).lower() + ".shp") calc_poligono_controllo() else: extract_country_shp() sf = shapefile.Reader("input_data/countries/" + str(self.paese).lower() + ".shp") calc_poligono_controllo() with open(coords_check_file_in) as csvfile_in: lettore_comma = csv.reader(csvfile_in, delimiter=",", quotechar='"') next(lettore_comma) with open(coords_validated_file_out, 'wb') as csvfile_out: scrittore = csv.writer(csvfile_out, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) intestazioni = "id", "lat", "lon" scrittore.writerow(intestazioni) for row in lettore_comma: if (punti_dentro_poligono_di_controllo(float(row[1]), float(row[2]))): stringa = str(row[0]), str(row[1]), str(row[2]) scrittore.writerow(stringa) dentro += 1 else: fuori += 1 csvfile_out.close() csvfile_in.close() #print "dentro %d" % dentro, "fuori %d" % fuori def creazione_file_shp(self): # Remove output shapefile if it already exists if os.path.exists(self.outShp): self.outDriver.DeleteDataSource(self.outShp) #Set up blank lists for data x, y, nomeloc=[], [], [] #read data from csv file and store in lists with open('input_data/geocoded/csv/'+str(self.paese) + '.csv', 'rb') as csvfile: r = csv.reader(csvfile, delimiter=';') for i, row in enumerate(r): if i > 0: #skip header divisa = row[0].split(",") #print divisa[0] nomeloc.append(divisa[0]) x.append(float(divisa[1])) y.append(float(divisa[2])) #date.append(''.join(row[1].split('-')))#formats the date correctly #target.append(row[2]) #Set up shapefile writer and create empty fields w = shapefile.Writer(shapefile.POINT) w.autoBalance = 1 #ensures gemoetry and attributes match w.field('ID','N') w.field('location','C', 50) # w.field('Date','D') # w.field('Target','C',50) # w.field('ID','N') #loop through the data and write the shapefile for j,k in enumerate(x): w.point(k,y[j]) #write the geometry w.record(k,nomeloc[j]) #write the attributes #Save shapefile w.save(self.outShp) def plot_mappa(self): def GetExtent(gt,cols,rows): ext=[] xarr=[0, cols] yarr=[0, rows] for px in xarr: for py in yarr: x=gt[0]+(px*gt[1])+(py*gt[2]) y=gt[3]+(px*gt[4])+(py*gt[5]) ext.append([x,y]) #print x,y yarr.reverse() return ext pathToRaster = "input_data/geocoded/risk_map/" + self.paese + ".tif" from mpl_toolkits.basemap import Basemap import matplotlib.pyplot as plt import numpy as np from osgeo import gdal raster = gdal.Open(pathToRaster, gdal.GA_ReadOnly) array = raster.GetRasterBand(1).ReadAsArray() msk_array = np.ma.masked_equal(array, value=65535) # print 'Raster Projection:\n', raster.GetProjection() geotransform = raster.GetGeoTransform() cols = raster.RasterXSize rows = raster.RasterYSize ext = GetExtent(geotransform, cols, rows) #print ext[1][0], ext[1][1] #print ext[3][0], ext[3][1] #map = Basemap(projection='merc',llcrnrlat=-80, urcrnrlat=80, llcrnrlon=-180,urcrnrlon=180,lat_ts=20,resolution='c') map = Basemap(projection='merc', llcrnrlat=ext[1][1], urcrnrlat=ext[3][1], llcrnrlon=ext[1][0], urcrnrlon=ext[3][0],lat_ts=20, resolution='c') # Add some additional info to the map map.drawcoastlines(linewidth=1.3, color='white') #map.drawrivers(linewidth=.4, color='white') map.drawcountries(linewidth=.75, color='white') #datain = np.flipud(msk_array) datain = np.flipud(msk_array) map.imshow(datain)#,origin='lower',extent=[ext[1][0], ext[3][0],ext[1][1],ext[3][1]]) plt.show() def add_prj(self): env.workspace = self.events_location inData = self.paese + ".shp" print "Proietto " + inData try: coordinateSystem = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]" arcpy.DefineProjection_management(inData, coordinateSystem) except arcpy.ExecuteError: print arcpy.GetMessages(2) arcpy.AddError(arcpy.GetMessages(2)) except Exception as e: print e.args[0] arcpy.AddError(e.args[0]) def create_heat_map(self): # Local variables: event_file_shp = self.events_location + self.paese + ".shp" krn_map_file = self.risk_map_location + self.paese + ".tif" try: # Process: Kernel Density arcpy.gp.KernelDensity_sa(event_file_shp, "NONE", krn_map_file, "0.02", "", "SQUARE_MAP_UNITS") except arcpy.ExecuteError: print "Errore" + self.paese print arcpy.GetMessages(2) arcpy.AddError(arcpy.GetMessages(2)) except Exception as e: print "Exception " + self.paese print e.args[0] arcpy.AddError(e.args[0])
def get_sunset(address, from_grid=True): """Get sunset quality and parse into message""" # Load Sunburst API credentials EMAIL = os.getenv("SUNBURST_EMAIL") PASSWORD = os.getenv("SUNBURST_PW") url = "https://sunburst.sunsetwx.com/v1/login" # Get Sunburst API token via POST res = requests.post(url, auth=(EMAIL, PASSWORD)) # res = requests.post(url, data=payload) result = re.findall(r'token\":\"[0-9a-xA-Z-]*', res.text) token = "Bearer " + result[0][8:] # Get sunset quality via Sunburst GET headers = {"Authorization": token} url = "https://sunburst.sunsetwx.com/v1/quality" # Return if invalid coords coords = address_to_coord(address) if coords == -1: return "Invalid location. Please enter valid address." total = 0 # Get coordinates and quality at each coord coords_list = [] # If calculate quality from grid, false if calculate from single coord if from_grid: coords_list = generate_grid(coords) if len(coords_list) == 0: coords_list = [str(coords[0]) + "," + str(coords[1])] else: coords_list = [str(coords[0]) + "," + str(coords[1])] for coord in coords_list: data = {"geo": coord} res = requests.get(url, headers=headers, params=data) try: quality_percent = re.findall(r'quality_percent\":\d*\.\d*', res.text)[0][17:] except: return "Too many Sunburst requests. Try again later." total += float(quality_percent) quality_percent = total / float(len(coords_list)) quality = "" if quality_percent < 25: quality = "Poor" elif quality_percent < 50: quality = "Fair" elif quality_percent < 75: quality = "Good" else: quality = "Great" # Get today's sunset in local time sun = Sun(coords[0], coords[1]) today_ss = sun.get_sunset_time() # Convert time zone GEO_USERNAME = os.getenv("GEONAMES_USERNAME") geolocator = GeoNames(username=GEO_USERNAME) timezone = geolocator.reverse_timezone(coords) from_zone = tz.gettz("UTC") to_zone = tz.gettz(str(timezone)) today_ss = today_ss.replace(tzinfo=from_zone) sunset_time = today_ss.astimezone(to_zone) # Get day of week day_list = [ "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday" ] day = day_list[datetime.datetime.today().weekday()] # Create message message = "Quality: " + quality + " " + str(round( quality_percent, 2)) + "%\nSunset at {}pm".format( sunset_time.strftime("%H:%M")) + "\n\n" + day + " at " + address return message
feature['type'] = "Feature" data = dict() geometry = dict() properties = dict() data['location'] = this_location data['type'] = "Point" data['coordinates'] = [this_location_details[1], this_location_details[0]] feature['geometry'] = data feature_list.append(feature) features['features'] = feature_list with open("locations_full2.geojson", 'w+') as outfile: json.dump(features, outfile) #Main code geolocator = GeoNames(username="******") #replace with name of location file, or add in file input as arg f = open("Locations4") fail_list = list() # for recording a list of any place names that do not return a result location_dict = dict() #output_list = list() for line in f: unicode_line = unicode(line, "utf-8") unicode_line = unicode_line [3:-3] try: location=geolocator.geocode(unicode_line ) coords = ((location.latitude, location.longitude)) location_dict[line] = coords print unicode_line print coords except: