def koordinatlamav3(df): #HAM DATA >> CDS MATRİSİ >> KOORDİNALAR >> CDS >> HAM DATA print(TOLERANCE, MAX, Aaa, Bbb, Ccc) cds = [] for i in range(4): cds.append([]) print('datadaki iso3 ülke kodları iso2 ye çevriliyor..') for i in tqdm(range(len(df))): for j in range(len(cd)): if df.at[i, 'Destination Country Key'] == cd.at[j, 'iso3']: df.at[i, 'Destination Country Key'] = cd.at[j, 'iso2'] if df.at[i, 'Origin Country Key'] == cd.at[j, 'iso3']: df.at[i, 'Origin Country Key'] = cd.at[j, 'iso2'] if df.at[i, 'Destination Country Key'] == 'UK': df.at[i, 'Destination Country Key'] = 'GB' if df.at[i, 'Origin Country Key'] == 'UK': df.at[i, 'Origin Country Key'] = 'GB' for i in range(len(df)): #posta kodu ve ülkü datası toplanıyor if df.at[i, 'Origin Postal Code'] not in cds[0] : cds[0].append(df.at[i, 'Origin Postal Code']) cds[1].append(df.at[i, 'Origin Country Key']) if df.at[i, 'Destination Postal Code'] not in cds[0] : cds[0].append(df.at[i, 'Destination Postal Code']) cds[1].append(df.at[i, 'Destination Country Key']) print('koordinatlar postakodlarına yazılıyor..') for i in tqdm(range(len(cds[0]))): if cds[1][i] != "GR": cds[2].append(pgeocode.Nominatim(cds[1][i]).query_postal_code(cds[0][i]).latitude) cds[3].append(pgeocode.Nominatim(cds[1][i]).query_postal_code(cds[0][i]).longitude) else: cds[2].append(38.021332) cds[3].append(23.798630) print('koordinatlar dataya ekleniyor..') for i in tqdm(range(len(df))): for j in range(len(cds[0])): if df.at[i, 'Origin Postal Code'] == cds[0][j]: df.at[i,'lat'] = cds[2][j] df.at[i,'lon'] = cds[3][j] if df.at[i, 'Destination Postal Code'] == cds[0][j]: df.at[i,'lat2'] = cds[2][j] df.at[i,'lon2'] = cds[3][j] fals = [] for i in range(len(df)): df.at[i,'distance'] = distanceChecker(i,-1) #print('data ayiklaniyor..') for i in range(len(df)): if abs((df.at[i,'Delivery Date'] - df.at[i,'Goods issue date']).total_seconds()) == 0: fals.append(i) df.drop(index=fals, inplace=True) df = df[df.lat.notnull()] df = df[df.lat2.notnull()] df.to_excel('./ayik.xlsx', index = False) #df = pd.read_excel(r'/ayik.xlsx', index = False) df = pd.read_excel(path2, index = False) os.remove(path2) df = df.sort_values('Goods issue date') return df
def get_lat_lon_by_postalcode_country(self, postal_code, country='US', return_result_object=False, db_file_dir=None): if country == "US": if db_file_dir: search = SearchEngine(simple_zipcode=True, db_file_dir=db_file_dir) else: search = SearchEngine(simple_zipcode=True) zipcode = search.by_zipcode(postal_code) if zipcode.lat is None or zipcode.lng is None: raise InvalidZipCodeError('Invalid ZIP Code') if return_result_object: return zipcode.lat, zipcode.lng, zipcode return zipcode.lat, zipcode.lng else: nomi = pgeocode.Nominatim(country) query_results = nomi.query_postal_code(postal_code) if math.isnan(query_results.latitude) or math.isnan( query_results.longitude): raise InvalidZipCodeError('Invalid ZIP Code') if return_result_object: return query_results.latitude, query_results.longitude, query_results return query_results.latitude, query_results.longitude
def get_data_from_json(): directory = os.getcwd() + '/app/raw-data' cnt = 0 for filename in os.listdir(directory): if filename.endswith(".json") and cnt == 0: with open(os.path.join(directory, filename)) as json_file: d_temp = json.load(json_file) data = d_temp cnt += 1 continue elif filename.endswith(".json"): with open(os.path.join(directory, filename)) as json_file: d_temp = json.load(json_file) data = data + d_temp cnt += 1 continue else: print('none') a = pd.io.json.json_normalize(data) # Unique zip codes # a['data.locator'].unique() nomi = pgeocode.Nominatim('ch') geo_loc_df = nomi.query_postal_code(a['data.locator'].tolist()) df_out = a.join(geo_loc_df) json_out = df_out.to_json(orient="records") return json_out, df_out
def county(self, options): out = [] county_votes = {} nomi = pgeocode.Nominatim('ES') for opt in options: for cp, votes in opt['votes'].items(): county = nomi.query_postal_code(cp)['county_name'] if county in county_votes: county_votes[ county] = county_votes[county] + opt['votes'][cp] else: county_votes[county] = opt['votes'][cp] for opt in options: result = 0 for cp, votes in opt['votes'].items(): county = nomi.query_postal_code(cp)['county_name'] county_percent = round(votes / county_votes[county] * 100) result += county_percent out.append({ **opt, 'postproc': result, }) out.sort(key=lambda x: -x['postproc']) return Response(out)
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) vid = kwargs.get('voting_id', 0) try: r = mods.get('voting', params={'id': vid}) # Casting numbers to string to manage in javascript with BigInt # and avoid problems with js and big number conversion for k, v in r[0]['pub_key'].items(): r[0]['pub_key'][k] = str(v) context['voting'] = json.dumps(r[0]) except: raise Http404 context['KEYBITS'] = settings.KEYBITS #The voter json is supposed to come from the authentication module voter_json = '{"username": "******", "postal_code": "11368"}' voter = json.loads(voter_json) nomi = pgeocode.Nominatim('es') context['province'] = nomi.query_postal_code( voter['postal_code'])['county_name'] return context
def equalityProvince(self, options): out = [] county_votes = {} nomi = pgeocode.Nominatim('ES') mapping = self.get_map() try: for opt in options: #Comprobamos que tiene el parametro que necesitamos if 'postal_code' in opt: votes = opt['votes'] coef = float(0.01) position = float((mapping[nomi.query_postal_code( opt['postal_code'])['county_name']])) votes = float(votes) + float(votes) * coef * position votes = int(votes) out.append({ **opt, 'postproc': votes, }) out.sort(key=lambda x: -x['postproc']) if len(options) == 0: #Controlamos que no vengan datos vacios print("An exception occurred with equality province method") out.append({'error': 'The Data is empty'}) except: if len(options) > 0: print("An exception occurred with equality province method") out.append({ 'error': 'An exception occurred with equality province method' }) return Response(out)
def main(): print("Starting.") names = [ 'CREDIT_SCORE ', 'FIRST_PAYMENT_DATE', 'FIRST_TIME_HOMEBUYER_FLAG', 'MATURITY_DATE', 'METROPOLITAN_STATISTICAL_AREA', 'MORTGAGE_INSURANCE_PERCENTAGE', 'NUMBER_OF_UNITS', 'OCCUPANCY_STATUS', 'ORIGINAL_COMBINED_LOAN-TO-VALUE', 'ORIGINAL_DEBT_TO_INCOME_RATIO', 'ORIGINAL_UPB', 'ORIGINAL_LOAN-TO-VALUE', 'ORIGINAL_INTEREST_RATE', 'CHANNEL', 'PREPAYMENT_PENALTY_MORTGAGE_(PPM)_FLAG', 'PRODUCT_TYPE', 'PROPERTY_STATE', 'PROPERTY_TYPE', 'POSTAL_CODE', 'LOAN_SEQUENCE_NUMBER', 'LOAN_PURPOSE', 'ORIGINAL_LOAN_TERM', 'NUMBER_OF_BORROWERS', 'SELLER_NAME', 'SERVICER_NAME', 'SUPER_CONFORMING_FLAG', 'Pre_HARP_LOAN_SEQUENCE_NUMBER' ] path = 'C:\\Users\\Abhilash\\Desktop\\scikit-learn\\SampleInputFiles\\sample_orig_*.txt' sep = '|' nomi = pgeocode.Nominatim('us') df = load_data(path, sep, names) post_code_series = df['POSTAL_CODE'].value_counts() df_post_code = pd.DataFrame() df_post_code['POSTAL_CODE'] = post_code_series.index df_post_code['COUNT'] = post_code_series.values df_longi_lati = add_longi_lati(df_post_code, nomi) df_longi_lati.to_csv('post_code_longi_lati.csv', index=False) print("Process Completed.")
def __init__(self, token, boss_id) -> None: self.timeout = {} self.gSym = {} self.gChatId = [] self.userStatus = {} self.userThread = {} self.vaccine_log = {} self.symCachePath = {} self.threadIndex = 0 self.boss_id = int(boss_id) self.queue = queue.Queue() self.nomi = pgeocode.Nominatim('us') self.updater = Updater(token=token, use_context=True) self.dispatcher = self.updater.dispatcher self.updater.start_polling() self.AddCommandHandler("start", self.CommandStart) self.AddCommandHandler("remove_from_watchlist", self.CommandRemoveFromWatchList) self.AddCommandHandler("show_latest_result", self.CommandShowWatchlist) self.AddCommandHandler("source_code", self.CommandSourceCode) self.AddCommandHandler("help", self.CommandHelp) self.AddMessageHandler(self.MessageUnknowText) self.AddCallbackQueryHandler(self.CallbackResult) self.initLogger() self.initCache()
def getinfo(): f = open("JobCentres", "r") # opening job centre document, that contains all the relevant data, to read for line in f: line = f.readline() line = str(line) # casting each line to a string and then removing the last 5 characters before splitting words centreid = line[:5] centreid = centreid.strip() add = line.split("\t") string = "" for i in range(0, len(add)): if i != 0: address = str(add[i]) string = string + " " + address # appending each word in the line without invalid characters into a long string string = string.strip() postcode = add[len(add)-1] # removing last character postcode = str(postcode) postcode1 = postcode.strip() try: nomi = pgeocode.Nominatim('GB') info = nomi.query_postal_code(postcode1) # get all the data about the postcode lat = info[9] # attaining latitude and longitude as an independant variable longit = info[10] except: lat = 0 longit = 0 handle = sql.connect("JobCentreData.db") # inserting all the data about one job centre into the database, per iteration cursor = handle.cursor() cursor.execute("INSERT INTO DATA VALUES(?,?,?,?,?)", (centreid, longit, lat, postcode1, string)) handle.commit() handle.close()
def zip_lookup_lat_long(self, zip_): # using a library called pgeocode nomi = pgeocode.Nominatim('US') geoinfo = nomi.query_postal_code(zip_) lat = geoinfo.loc['latitude'] long = geoinfo.loc['longitude'] return lat, long
def add_canada_lat_long(metadata, lat_long): """ Use pgeocode to add lat and longs for canadian locations """ # get all locations in lat long file to check which need added loc_lat_longs = set(lat_longs.query('geo_scale=="location"')['geo_loc'].unique()) # get canada locs that are postcodes and not in lat long already canada = metadata.query('country=="Canada"') postcodes = canada.loc[canada['location'].str.match("^[a-zA-Z][0-9][a-zA-Z]$").fillna(False), 'location'].unique() postcodes = set(postcodes) - loc_lat_longs # get geocode database geocode = pgeocode.Nominatim('CA') postcode_lat_long = {'geo_scale': [], 'geo_loc': [], 'lat': [], 'long': []} for postcode in postcodes: geoloc_for_postcode = geocode.query_postal_code(postcode) postcode_lat_long['geo_scale'].append('location') postcode_lat_long['geo_loc'].append(postcode) postcode_lat_long['lat'].append(geoloc_for_postcode['latitude']) postcode_lat_long['long'].append(geoloc_for_postcode['longitude']) # update lat_longs with postcode locations updated_lat_longs = lat_longs.append(pd.DataFrame(postcode_lat_long)) return updated_lat_longs
def kod(): nomi = pgeocode.Nominatim('PL') output = nomi.query_postal_code(entry.get()) if (int(len(entry.get())!=6)): messagebox.showwarning('', 'Wrong input lenght, please correct it.') else: label4.config(text=output[:-1]) return output[:-1]
def __init__(self, mongodb_user: str, mongodb_password: str, mongodb_host: str, mongodb_port: str, my_turn_api_key: str): self.nomi = pgeocode.Nominatim('us') self.mongodb = pymongo.MongoClient( f'mongodb://{mongodb_user}:{mongodb_password}@{mongodb_host}:{mongodb_port}' ) self.my_turn_ca = MyTurnCA(api_key=my_turn_api_key) self.logger = logging.getLogger(__name__)
def save(self, *args, **kwargs): if self.postcode: nomi = pgeocode.Nominatim('us') details = nomi.query_postal_code(str(self.postcode)) self.longitude = details['longitude'] self.latitude = details['latitude'] super(Client_Register, self).save(*args, **kwargs)
def lat_long_convert(pincode): nomi = pgeocode.Nominatim('in') data_cap = nomi.query_postal_code(pincode) lat = data_cap.get('latitude') long_value = data_cap.get('longitude') print('lat: ', lat) print('long_value: ', long_value) return str(lat), str(long_value)
def getlatandlon(queryset_array): latitude = list() longitude = list() nomi = pgeocode.Nominatim('MY') for i in queryset_array: latitude.append(nomi.query_postal_code(i.pcode).latitude) longitude.append(nomi.query_postal_code(i.pcode).longitude) return latitude, longitude
def clean_country(self): cd = self.cleaned_data try: nomi = pgeocode.Nominatim(cd.get('country')) except: raise forms.ValidationError( "Your country is not supported currently") return cd.get('country')
def location(country, zip): ''' Returns the latitude, longitude of the entered zipcode/country ''' nomi = pgeocode.Nominatim(country) query_res = nomi.query_postal_code(zip) return (f"{str(query_res.latitude)},{str(query_res.longitude)}", query_res.place_name)
def pcgraphs(request): print("pcgraphs") if request.method == 'POST': # Extract Form Data form = PlasticCracksForm(request.POST) zip_code = form.data['zip_code'] concrete_temperature = form.data['concrete_temperature'] # Calculate Latitude and Longitude from provided Zip Code nomi = pgeocode.Nominatim('us') nomi_dict = nomi.query_postal_code(zip_code) lat = nomi_dict['latitude'] lon = nomi_dict['longitude'] # Make GET request for 7-day forecast from OpenWeatherMap API base_url = 'https://api.openweathermap.org/data/2.5/onecall?' api_key = '04eb51a7a1f05c135efcddc8a13d23e8' full_url = base_url + 'lat=' + str(lat) + '&lon=' + str(lon) + '&units=imperial&appid=' + str(api_key) response = requests.get(full_url) dates = [] temperatures = [] relative_humidities = [] wind_speeds = [] evaporation_rates = [] for item in response.json()['daily']: date = datetime.date.fromtimestamp(item['dt']) date.strftime("%d %b, %Y") dates.append(date) temperature = item['temp']['day'] temperatures.append(temperature) relative_humidity = item['humidity'] relative_humidities.append(relative_humidity) wind_speed = item['wind_speed'] wind_speeds.append(wind_speed) evap_1 = ((float(concrete_temperature) ** 2.5) - ((float(relative_humidity) / 100) * (float(temperature) ** 2.5))) evap_2 = (1 + (0.4 * float(wind_speed))) * (10 ** -6) evaporation_rate = abs(evap_1 * evap_2) evaporation_rates.append(evaporation_rate) print('Temp: ' + str(temperature)) print('Relative Humidity: ' + str(relative_humidity)) print('Wind Speed: ' + str(wind_speed)) print('Evaporation Rate: ' + str(evaporation_rate)) return render(request, 'pccalc/pcgraphs.html', { 'dates': dates, 'risks': evaporation_rates, 'form': form }) else: print("pcgraphs2") return render(request, 'pccalc/index.html')
def extract_pincode(temp_line_text): address = { "postal_code": " ", "place_name": " ", "state_name": " ", "city_name": " ", "area_name": " ", "longitude": " ", "latitude": "" } pincode = "" if re.search(r"\b\d{3}\s{0,1}\d{3}\b", temp_line_text): ind_pincode = str( re.search(r"\b\d{3}\s{0,1}\d{3}\b", temp_line_text)[0]) if int(ind_pincode[0]) > 0 and int(ind_pincode[0]) < 9: pincode = ind_pincode if re.search(r"\b\d{5}\b", temp_line_text): us_zipcode = re.search(r"\b\d{5}\b", temp_line_text)[0] pincode = us_zipcode if pincode: if len(str(pincode)) == 6: nomi = pgeocode.Nominatim('in') #print(nomi.query_postal_code(str(pincode))["place_name"]) address["postal_code"] = nomi.query_postal_code( str(pincode))["postal_code"] address["place_name"] = nomi.query_postal_code( str(pincode))["place_name"] address["state_name"] = nomi.query_postal_code( str(pincode))["state_name"] address["city_name"] = nomi.query_postal_code( str(pincode))["county_name"] address["area_name"] = nomi.query_postal_code( str(pincode))["community_name"] address["longitude"] = str( nomi.query_postal_code(str(pincode))["longitude"]) address["latitude"] = str( nomi.query_postal_code(str(pincode))["latitude"]) if address["postal_code"] == " ": location_extra = [] #print(skill_text) try: nlp_new = nlp(temp_line_text.lower()) matches = locationmatcher(nlp_new) for match_id, start, end in matches: span = nlp_new[start:end] location_extra.append(span.text) address["place_name"] = span.text address["city_name"] = span.text except: location_extra = [] return address
def get_lat_lon(data): noim = pgeocode.Nominatim('IN') lat_lon = [] for k, j in data.iterrows(): # printj.pincode) xv = noim.query_postal_code(str(int(j.pincode))) lat_lon.append([xv.latitude, xv.longitude]) return lat_lon
def getLatitudeLongitude(): print('starting lat and long process') nomi = pgeocode.Nominatim('GB') for p in filtered_data_set: if (np.isnan(nomi.query_postal_code(p).latitude) == False or np.isnan(nomi.query_postal_code(p).longitude) == False): latitude.append(nomi.query_postal_code(p).latitude) longitude.append(nomi.query_postal_code(p).longitude) print(longitude, latitude)
def is_zipcode_valid(zipcode: str, country_code) -> bool: try: data = pgeocode.Nominatim(country_code.upper()) place_name = data.query_postal_code(zipcode).place_name if isinstance(place_name, str): return True else: return False except Exception as e: return False
def distance_calculator(zipcode): distance_list = [] nomi = pgeocode.Nominatim('us') zip_loc = (nomi.query_postal_code(zipcode).latitude, nomi.query_postal_code(zipcode).longitude) for i in range(0, len(df_business)): business_loc = (df_business['latitude'][i], df_business['longitude'][i]) distance_list.append(geodesic(zip_loc, business_loc).miles) df_business['distance'] = distance_list
def geo_location(self, postal_code): # pgeocode.Nominatim.query_postal_code only uses the first three characters to do the lookup for Canadian postal codes postalcode = postal_code[:3] try: nomi = pgeocode.Nominatim('ca') location = nomi.query_postal_code(postalcode) except Exception as e: raise KijijiApiException(f'Error acquiring geo location data: {e}') else: return location
def get_data_from_json(): directory = os.environ['APP_BASE_DIR'] + '/app/raw-data' cnt = 0 for filename in os.listdir(directory): if filename.endswith(".json") and cnt == 0: with open(os.path.join(directory, filename)) as json_file: d_temp = json.load(json_file) data = d_temp cnt += 1 continue elif filename.endswith(".json"): with open(os.path.join(directory, filename)) as json_file: d_temp = json.load(json_file) data = data + d_temp cnt += 1 continue else: print('none') a = pd.io.json.json_normalize(data) # Unique zip codes # a['data.locator'].unique() nomi = pgeocode.Nominatim('ch') geo_loc_df = nomi.query_postal_code(a['data.locator'].tolist()) locator = [] diagnostic0 = [] diagnostic1 = [] diagnostic2 = [] diagnostic3 = [] diagnostic4 = [] diagnostic5 = [] for i in a['data.locator'].unique(): res = a[a['data.locator'] == i]['data.diagnostic'] locator.append(i) diagnostic0.append(res[res == 0].count()) diagnostic1.append(res[res == 1].count()) diagnostic2.append(res[res == 2].count()) diagnostic3.append(res[res == 3].count()) diagnostic4.append(res[res == 4].count()) diagnostic5.append(res[res == 5].count()) df = pd.DataFrame({'data.locator': locator, 'diagnostic0': diagnostic0, 'diagnostic1': diagnostic1, 'diagnostic2': diagnostic2, 'diagnostic3': diagnostic3, 'diagnostic4': diagnostic4, 'diagnostic5': diagnostic5}) geo_loc_df_summary = nomi.query_postal_code(df['data.locator'].tolist()) df_out_summary = df.join(geo_loc_df_summary) df_out = a.join(geo_loc_df) json_out = df_out.to_json(orient="records") json_out_summary = df_out_summary.to_json(orient="records") return json_out, df_out, json_out_summary, df_out_summary
def getCordinates(srcPincode, desPincode): latitudearr = [] longitudearr = [] nomi = pgeocode.Nominatim('in') res = nomi.query_postal_code([srcPincode, desPincode]) latitudearr = res.loc[:, "latitude"] longitudearr = res.loc[:, "longitude"] return { 'src': [latitudearr[0], longitudearr[0]], 'des': [latitudearr[1], longitudearr[1]] }
def get_state_code_df(uniq_zip): nomi = pgeocode.Nominatim("us") mapping = [] for zip in uniq_zip: mapping.append({ "zip": zip, "state_code": nomi.query_postal_code(zip)["state_code"] }) return pd.DataFrame(mapping)
def dist(): dist = pgeocode.GeoDistance('PL') nomi = pgeocode.Nominatim('PL') output3 = nomi.query_postal_code(entry2.get()) output4 = nomi.query_postal_code(entry3.get()) output2 = dist.query_postal_code(str(entry2.get()), str(entry3.get())) if ((int(len(entry2.get())!=6)) or (int(len(entry3.get())!=6))): messagebox.showwarning('', 'Wrong input lenght, please correct it.') else: label8.config(font=('Courier 13 bold'), text= str(output3[:-1]) + ('\n') + str(output4[:-1]) + ('\n') + ('\n') + 'The distance between these areas is ~' + str(round(output2, 2)) + ' km') return output2, output3[:-1], output4[:-1]
def request_user_loc(): """ Get the user_loc from the zip_code query parameter for the current request. Otherwise, return the GDC location. """ if "zip_code" in request.args: nomi = pgeocode.Nominatim("us") user_loc = nomi.query_postal_code(request.args.get("zip_code")) user_loc = (user_loc["latitude"], user_loc["longitude"]) else: user_loc = (30.286, -97.736) # GDC return user_loc