def test_sdk(self): res = { 'lastPosition': {'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [9.65457, 49.96119, 21]}, 'properties': {'updatedAt': '2021-03-29T05:16:10Z', 'heading': 126, 'type': 'Estimated'}}, 'preconditionning': { 'airConditioning': {'updatedAt': '2021-04-01T16:17:01Z', 'status': 'Disabled', 'programs': [ {'enabled': False, 'slot': 1, 'recurrence': 'Daily', 'start': 'PT21H40M', 'occurence': {'day': ['Sat']}}]}}, 'energy': [{'updatedAt': '2021-02-23T22:29:03Z', 'type': 'Fuel', 'level': 0}, {'updatedAt': '2021-04-01T16:17:01Z', 'type': 'Electric', 'level': 70, 'autonomy': 192, 'charging': {'plugged': True, 'status': 'InProgress', 'remainingTime': 'PT0S', 'chargingRate': 20, 'chargingMode': 'Slow', 'nextDelayedTime': 'PT21H30M'}}], 'createdAt': '2021-04-01T16:17:01Z', 'battery': {'voltage': 99, 'current': 0, 'createdAt': '2021-04-01T16:17:01Z'}, 'kinetic': {'createdAt': '2021-03-29T05:16:10Z', 'moving': False}, 'privacy': {'createdAt': '2021-04-01T16:17:01Z', 'state': 'None'}, 'service': {'type': 'Electric', 'updatedAt': '2021-02-23T21:10:29Z'}, '_links': {'self': { 'href': 'https://api.groupe-psa.com/connectedcar/v4/user/vehicles/myid/status'}, 'vehicles': { 'href': 'https://api.groupe-psa.com/connectedcar/v4/user/vehicles/myid'}}, 'timed.odometer': {'createdAt': None, 'mileage': 1107.1}, 'updatedAt': '2021-04-01T16:17:01Z'} api = ApiClient() status: psacc.models.status.Status = api._ApiClient__deserialize(res, "Status") geocode_res = reverse_geocode.search([(status.last_position.geometry.coordinates[:2])[::-1]])[0] assert geocode_res["country_code"] == "DE" get_new_test_db() car = Car("XX", "vid", "Peugeot") car.status = status myp = MyPSACC.load_config(DATA_DIR + "config.json") myp.record_info(car) assert "features" in json.loads(Database.get_recorded_position()) # electric should be first assert car.status.energy[0].type == 'Electric'
def metaNaming(self): """Use terminal commands to extract location from EXIF data and update destDir file names accordingly""" count = 0 pic_file = pathlib.Path(self.destDir).glob('*.*') for file in pic_file: baseName, ext = os.path.splitext(file) if ext in FILETYPES: EXIFdata = self.getEXIF(file) # Get geolocation location = '' lat = EXIFdata['latitude'] lon = EXIFdata['longitude'] if lon != '(null)' and lat != '(null)': result = reverse_geocode.search([(lat, lon)]) location = '_' + result[0]['city'] + result[0][ 'country_code'] # Rename file newBaseName = f"{EXIFdata['date']}_{EXIFdata['time']}{location}" absPath = os.path.join(self.destDir, file) self.renameFile(absPath, newBaseName) count += 1 else: print('file type not included: ', file) print(f"[*] {count} files renamed")
def _reverse_gcode(latitude, longitude): if latitude: cities = reverse_geocode.search([(latitude, longitude)]) if cities: city = FacebookArchiveReader._sanitize(cities[0]['city'] + ", " + cities[0]['country']) return city return None
def getCountryFromCoord(): latlon_country = [] for i in range(0, len(df_pollution)): coordinates = (df_pollution['lat'][i], df_pollution['long'][i]), latlon_country.append(reverse_geocode.search(coordinates)[0]['country']) loading = False return latlon_country
def gps2country(gps): import reverse_geocode try: info=reverse_geocode.search([gps]) return info[0]['country_code'] except IndexError: return 'UNDEFINED'
def do_print(lat, lon): print( lat, lon, reverse_geocode.search([lat, lon])[0], ":", TestLocations.a.reverse(f"{lat},{lon}").address.split(', ')[-5:-4], ":", reverse_geocoder.search([(lat, lon)])[0])
def country(lat, lng): """ returns the country name of the given coordinates """ coordinates = [(lat, lng)] info = reverse_geocode.search(coordinates)[0] return info["country"]
def get_location(): latitude = float(request.args.get('latitude')) longitude = float(request.args.get('longitude')) coordinates = [(latitude, longitude)] output = rg.search(coordinates) return '\"' + output[0]['country'].lower() + '\"'
def get_co2_per_kw(start: datetime, end: datetime, latitude, longitude): location = reverse_geocode.search([(latitude, longitude)])[0] country_code = location["country_code"] # todo implement other countries if country_code == 'FR': co2_per_kw = Ecomix.get_data_france(start, end) else: co2_per_kw = None return co2_per_kw
def main(): args = parse_args() with open(args.output, 'wb') as csv_file: fieldnames = [ 'id', 'name', 'name:en', 'type', 'int_name', 'old_name', 'old_name:en', 'country', 'city', 'lat', 'long', 'website', 'date_added', 'description' ] csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames) csv_writer.writeheader() num_rows = 0 entry = create_entry() for event, elem in ET.iterparse(args.input, events=("start", "end")): if event == 'start': if elem.tag == 'node': if 'id' in elem.attrib: entry['id'] = elem.attrib['id'] if 'lat' in elem.attrib: entry['lat'] = elem.attrib['lat'] if 'lon' in elem.attrib: entry['long'] = elem.attrib['lon'] if 'timestamp' in elem.attrib: entry['date_added'] = elem.attrib['timestamp'] coords = [(float(elem.attrib['lat']), float(elem.attrib['lon']))] location = reverse_geocode.search(coords)[0] entry['country'] = location['country'] entry['city'] = location['city'] elif event == 'end': if elem.tag == 'tag': if 'k' in elem.attrib and elem.attrib['k'] == 'name': entry['name'] = elem.attrib['v'] if 'k' in elem.attrib and elem.attrib['k'] == 'name:en': entry['name:en'] = elem.attrib['v'] if 'k' in elem.attrib and is_type_tag(elem.attrib['k']): entry['type'] = elem.attrib['v'] if 'k' in elem.attrib and elem.attrib['k'] == 'int_name': entry['int_name'] = elem.attrib['v'] if 'k' in elem.attrib and elem.attrib['k'] == 'old_name': entry['old_name'] = elem.attrib['v'] if 'k' in elem.attrib and elem.attrib['k'] == 'old_name:en': entry['old_name:en'] = elem.attrib['v'] # if 'k' in elem.attrib and elem.attrib['k'] == 'addr:country': entry['country'] = elem.attrib['v'] # if 'k' in elem.attrib and elem.attrib['k'] == 'addr:city': entry['city'] = elem.attrib['v'] if 'k' in elem.attrib and elem.attrib['k'] == 'website': entry['website'] = elem.attrib['v'] if 'k' in elem.attrib and elem.attrib['k'] == 'description': entry['description'] = elem.attrib['v'] elif elem.tag == 'node': # add to csv csv_writer.writerow(entry) num_rows += 1 entry = create_entry() print('wrote {} rows to {}'.format(num_rows, args.output))
def extract_data_from_cord(self): temp = [] for x in self.aff: if x["latitude"]: temp.append(reverse_geocode.search([(x["latitude"], x["longitude"])])[0]) else: temp.append({}) self.aff["Temp"] = temp self.aff["Country"] = self.aff.apply(lambda x: x["Country"] if x["Country"]!="" and x["Country"] is not None else x["Temp"]["country"] if x["Temp"]!={} and 'country' in x["Temp"] else "" ) self.aff["City"] = self.aff.apply(lambda x: x["Temp"]["city"] if x["Temp"]!={} and 'country' in x["Temp"] else "" ) self.aff= self.aff.remove_columns(["Temp"])
def set_user_location(request): lat = request.POST['lat'] lon = request.POST['lon'] coordinates = [(lat, lon)] result = rg.search(coordinates) user_loc = set_user_location() user_loc.lat = lat user_loc.lon = lon user_loc.city = result[0]['city'] user_loc.country = result[0]['country'] user_loc.date = date.today()
def update_df(cord,df,colum): result=reverse_geocode.search(cord) country=result[0]['country'] ab=df.index[df['COUNTRY']== country] if ab.size>0: # print("Country name is",country,"index is",ab) df.at[ab[0],colum]+=1 else: print("This location", country "does not have country name") return df
def importMonitoringSample(self, monitoringSampleFileName): #Monitoring samples file name fileName = self.rootPath+"inputFiles/BikeSensor/"+monitoringSampleFileName #Monitoring samples list monitoringSamples = list() #Max of possible variables in monitoring system maxMonitoringData = 5 #Identification sample data number nSampleIDdata = 3 with open(fileName) as infile: sampleData = csv.reader(infile, delimiter='\t') #Variable code dataTypes = list() nSample = 1 #Gets device ID and date from filename deviceID = monitoringSampleFileName[0:monitoringSampleFileName.find("_")] date = monitoringSampleFileName[(monitoringSampleFileName.find("_") + 1):len(monitoringSampleFileName)] date = date.replace(".csv", "") date = date.replace("-", "/") for sample in sampleData: #First read, variables codes if len(dataTypes) == 0: dataTypes = sample.copy() dataTypes = [int(type) for type in dataTypes] print(" > Variables types: "+str(dataTypes)) #Samples data else: #Gets city by GPS coordinate coordinate = [float(sample[1]), float(sample[2])],[0, 0] reverseCoordinate = reverse_geocode.search(coordinate)[0] city = reverseCoordinate['city'] + "-" + reverseCoordinate['country_code'] city = city.replace(' ', '_') #Creates a data list with each variable on its position code auxData = sample[nSampleIDdata:maxMonitoringData+nSampleIDdata] data = [None]*maxMonitoringData dataPosition = 0 for index in dataTypes: data[int(index) - 1] = float(auxData[dataPosition]) dataPosition += 1 print(" > Sample "+str(nSample)+": "+sample[0]) print(" - Coordinate: "+str(coordinate[0])) print(" - City: "+city) print(" - Data: "+str(data)) monitoringSamples.append(MonitoringSample(deviceID, date, sample[0], city, coordinate[0], data)) nSample += 1 return monitoringSamples
def on_handleLatLng(sid, data): coordinates = (data.get('lat'), data.get('lng')), location_info = reverse_geocode.search(coordinates)[0] country = location_info.get('country') try: country_info = CountryInfo(country).info() except: country_info = 'Unknow Country' SIO.emit('country_info', { 'country_info': country_info, 'location_info': location_info }, room=sid)
def get_servers(country: str, city: str = None): servers = requests.request("GET", NORD_API_BASE + "/server") filtered = [ srv for srv in servers.json() if srv['country'].lower() == country.lower() ] for srv in filtered: srv["city"] = reverse_geocode.search([ (float(srv["location"]["lat"]), float(srv["location"]["long"])) ])[0]["city"] if city is not None and srv["city"] != city: filtered.remove(srv) return filtered
def get_country(latitude, longitude, country_code_default): try: location = reverse_geocode.search([(latitude, longitude)])[0] country_code = location["country_code"] return country_code except (UnicodeDecodeError, IndexError): logger.error("Can't find country for %s %s", latitude, longitude) # return None country_code = country_code_default logger.warning( "Using country of origin : %s (wrong co2 when traveling abroad)", country_code) return country_code
def save(self, *args, **kwargs): user = get_current_user() if user and not user.pk: user = None user_profile = user.profile self.admin = user_profile if not self.online: switch_coordinates = [(self.geom[1], self.geom[0])] rev_res = reverse_geocode.search(switch_coordinates) if rev_res: self.country = rev_res[0]['country_code'] super(Event, self).save(*args, **kwargs) self.participants.add(user_profile)
def get_co2_per_kw(start: datetime, end: datetime, latitude, longitude): try: location = reverse_geocode.search([(latitude, longitude)])[0] country_code = location["country_code"] except UnicodeDecodeError: logger.error("Can't find country for %s %s", latitude, longitude) country_code = None except IndexError: country_code = None # todo implement other countries if country_code == 'FR': co2_per_kw = Ecomix.get_data_france(start, end) else: co2_per_kw = None return co2_per_kw
def set_name_by_closestcity(df_all_generators, colname="name"): """ Function to set the name column equal to the name of the closest city """ # get cities name list_cities = rg.search([g.coords[0] for g in df_all_generators.geometry]) # replace name df_all_generators.loc[:, colname] = [ l["city"] + "_" + str(id) + " - " + c_code for (l, c_code, id) in zip( list_cities, df_all_generators.country, df_all_generators.index) ] return df_all_generators
def get_country_name(lat, lon): # Input: # -- lat: location latitude # -- lon: location longitude # Output: # -- Nan or iso alpha 3 country code # Convert latitude and longitude to iso alpha 2 code country_code = reverse_geocode.search( ((lat, lon), (lat, lon)))[0].get('country_code') if country_code == 'IM': country_code = 'GB' # Return country name return alpha2_to_name(country_code)
def convertC2A(): import time import reverse_geocode as rg file = open("container.txt", "r") # google_key = "AIzaSyAhga--pb8JBMGs0zAdbIQtSiHAYNOLKE0" latitude = float(file.readline()) longitude = float(file.readline()) file.close() result = rg.search([(latitude, longitude)]) city = result[0]["city"] country = result[0]["country"] f = open("container.txt", "w") f.write(city) f.write("\n") f.write(country) f.close()
def main(): country_data_file = 'country_vs_names.log' city_data_file = 'city_vs_names.log' with open('../data/geo_entity_data.json') as f: data = json.load(f) name_frequency = collections.defaultdict(int) for name in data: name_frequency[name] += len(data[name]) place_frequency = collections.defaultdict(int) for name, name_data in data.items(): if name_data: for data_item in name_data: data_item[1] = data_item[1].strip() place_frequency[data_item[1]] += 1 sorted_place_frequency = sorted(place_frequency.items(), key = lambda item: item[1], reverse = True) country_frequency = collections.defaultdict(int) city_frequency = collections.defaultdict(int) geolocator = Nominatim(user_agent="CS_492") # from geopy.extra.rate_limiter import RateLimiter # geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1) for place in sorted_place_frequency: location = geolocator.geocode(place[0]) print(location) if location: coordinates = [(location.latitude, location.longitude)] geo_data = reverse_geocode.search(coordinates) country_frequency[geo_data[0]['country']] += place[1] city_frequency[geo_data[0]['city']] += place[1] time.sleep(1) sorted_country_frequency = sorted(country_frequency.items(), key = lambda item: item[1], reverse = True) print(sorted_city_frequency) sorted_city_frequency = sorted(city_frequency.items(), key = lambda item: item[1], reverse = True) print(sorted_city_frequency)
def extract_wiki_cordinates(self): wiki_pages = self.aff[(self.aff["latitude"]== None) | (self.aff["latitude"]=="")][['AffiliationId','WikiPage']] cords = get_wikipedia_cordinates_parallel(wiki_pages['WikiPage'], self._max_workers) res_geo = [reverse_geocode.search([line])[0] if line!={} else {} for line in cords] cords = [[str(line[0]), str(line[1])] if line!={} else [] for line in cords ] wiki_pages["geo"] = res_geo wiki_pages["geo2"] = cords self.aff = self.aff.join(wiki_pages) self.aff = self.aff.fillna('geo', {}) self.aff["Country"] = self.aff.apply(lambda x: x["Country"] if x["Country"]!="" and x["Country"] is not None else x["geo"]["country"] if x["geo"]!={} and 'country' in x["geo"] else "" ) self.aff["City"] = self.aff.apply(lambda x: x["City"] if x["City"]!="" and x["City"] is not None else x["geo"]["city"] if x["geo"]!={} and 'city' in x["geo"] else "" ) self.aff["latitude"] = self.aff.apply(lambda x: x["geo2"][0] if( x["latitude"]=="" or x["latitude"] is None) and x["geo2"] else x["latitude"]) self.aff["longitude"] = self.aff.apply(lambda x: x["geo2"][1] if( x["longitude"]=="" or x["longitude"] is None) and x["geo2"] else x["longitude"]) self.aff= self.aff.remove_columns(["geo", "geo2"])
def process_chunk(df, chunk): # Analyze chunk and count country occurences # Convert lat and long columns to a tuple of tuples try: coords = tuple(zip(chunk['latitude'], chunk['longitude'])) except: coords = tuple( zip(chunk['estimated_latitude'], chunk['estimated_longitude'])) results_rg = rg.search(coords) codes = [x.get('country_code') for x in results_rg] # Insert codes new chunk column chunk['codes'] = codes # Count number of occurences counts = chunk.codes.value_counts() for (code, count) in counts.iteritems(): df.loc[df['Alpha-2'] == code, "Count"] += int(count)
def mapClustersToCities(kmeans_centers): ''' Input: kmeans_centers - center points for all clusters Output: dictionary with cluster city data ''' # To get reverse geocodes import reverse_geocode # Create an empthy dictionary for the cities dict_cities = {} # Geocode the cluster center to a city and country name geocode_data = reverse_geocode.search(kmeans_centers) # Iterate over all centers for i, (center, (lat, lng)) in enumerate(zip(geocode_data, kmeans_centers)): # Get the data city = center['city'].replace('/', '_') code = center['country_code'] country = center['country'] # Create city-country name name = ', '.join([city, country]) # Add the data to the dictionary dict_cities[i] = { 'Name': name, 'Code': code, 'City': city, 'Country': country, 'Lat': lat, 'Lng': lng, 'Center': i } return dict_cities
def set_country(self): """ Compute country (and associated alpha) from current internal position Warning: This function can take some time to find country info on slow device like raspi 1st generation (~15secs) """ # get position position = self._get_config_field("position") if not position["latitude"] and not position["longitude"]: self.logger.debug( "Unable to set country from unspecified position (%s)" % position) return # get country from position country = {"country": None, "alpha2": None} try: # search country coordinates = ((position["latitude"], position["longitude"]), ) # need a tuple geo = reverse_geocode.search(coordinates) self.logger.debug("Found country infos from position %s: %s" % (position, geo)) if (geo and len(geo) > 0 and "country_code" in geo[0] and "country" in geo[0]): country["alpha2"] = geo[0]["country_code"] country["country"] = geo[0]["country"] # save new country if not self._set_config_field("country", country): raise CommandError("Unable to save country") # send event self.country_update_event.send(params=country) except CommandError: raise except Exception: self.logger.exception("Unable to find country for position %s:" % position)
def get_target_content(self, result_set): total_list = [] for row in result_set: year, month = row[0].split('-') dt = datetime.datetime(int(year), int(month), 1, 0, 0, 0) timestamp = int((time.mktime(dt.timetuple())) * 1000) coordinates = (row[5], row[6]), (33.0, 65.0) if (row[3] == " "): country_code = reverse_geocode.search( coordinates)[0]["country_code"] dict_nation = { "target": country_code, "datapoints": [[row[1], timestamp]] } else: dict_nation = { "target": row[3].strip(), "datapoints": [[row[1], timestamp]] } total_list.append(dict_nation.copy()) return total_list
def main(): with open("prova.csv", newline='') as input_row: df = list(csv.reader(input_row)) continents = { 'NA': 'America', 'SA': 'America', 'AS': 'Asia', 'OC': 'Oceania', 'AF': 'Africa', 'EU': 'Europa', 'AN': 'Antartide' } coordinates = (df[0][2], df[0][3]), (33.0, 65.0) country_code = reverse_geocode.search(coordinates)[0]["country_code"] continent_code = pc.country_alpha2_to_continent_code(country_code) df[0].append(continents[continent_code]) print(df) with open("output.csv", "w") as output_row: writer = csv.writer(output_row) writer.writerows(df)
def get_best_static_label(lat_long): """ Get the best label we can from a static database. :param tuple[float, float] lat_long: a tuple of (latitude, longitude) :rtype: str :return: a city string """ loc = reverse_geocode.search((lat_long, )) if not loc: return None loc = loc[0] if "city" in loc: return loc["city"] if "country" in loc: return loc["country"] return None
def getCountry(self, filename): print("Start: " + filename) tweets = self.loadData(filename) print("Getting country for each tweet in file " + filename) out = open(self.output_path + filename, "w") data = [] c = 0 for i in range(len(tweets)): try: lat, lng = tuple(tweets[i][1].split(",")) result = reverse_geocode.search([(lat, lng)])[0] # Timestamp timeFormat = '%Y-%m-%d %H:%M:%S' ts = time.strftime(timeFormat, time.strptime(tweets[i][2], '%a %b %d %H:%M:%S +0000 %Y')) timestamp = str(time.mktime(datetime.strptime(ts, timeFormat).timetuple()))[:-2] if result['country'] != "": data_str = "\t".join([tweets[i][0], timestamp, tweets[i][1], result['city'], result['country_code'], result['country'], tweets[i][3], "\n"]) data.append(data_str) elif result['country_code'] == "XK": data_str = "\t".join([tweets[i][0], timestamp, tweets[i][1], result['city'], result['country_code'], result['country'], tweets[i][3], "\n"]) data.append(data_str) else: self.not_usable_files.write(str(tweets[i][0]) + "\n") print(result) except: c+=1 print(sys.exc_info(), "Lola: ", tweets[i][2]) self.not_usable_files.write(filename + ": " + str(tweets[i]) + "\n") print("Errors in file:", c) for d in data: out.write(d)