def north_east_to_lat_lon(north, east): """ Convert nor/east to lat/long, then return lat/long in WGS84 """ lons, lats = convert_lonlat([east], [north]) return lats[0], lons[0]
def convert_to_lonlat(easting, northing): eastings = [easting] northings = [northing] rle = convert_lonlat(eastings, northings) return rle[1][0], rle[0][0]
def convertToLatLong(areaId): yCord = (math.floor(areaId / 40)) * 50 xCord = (areaId - ((yCord / 50) * 40)) * 50 easting, northing = pixelToEastNorth(xCord, yCord) print(easting, northing) result = convert_lonlat(easting, northing) print(result)
def add_lat_lon(gps): # getting height elevation [lon, lat] = convert_lonlat(gps['Easting'].to_numpy(), gps['Northing'].to_numpy()) gps['lon'] = lon gps['lat'] = lat return gps
def _coords_from_shp(self,shp_file,coord_system,feature_number,getRecords=True): """ Imports British Grid shape files as LonLat coords. """ if coord_system == 'BNG': sf = shapefile.Reader(shp_file) BNGcoordinates = sf.shapes()[feature_number].points # split coords, convert and append to self.lat and self.lon for pair in BNGcoordinates: lola = convert_lonlat(pair[0],pair[1]) self.lon.append(float(lola[0][0])) self.lat.append(float(lola[1][0])) # get record entries and append to dictionary if getRecords==True: for i in range(1,len(sf.fields)): self.records[sf.fields[i][0]] = sf.records()[feature_number][i-1]
def convert_to_lat_long(df): """Descriptions: Converts 100,000+ coordinates in a dataframe into accurate longitude and latitude adding the columns where they are missing from a dataframe. Arguments: df {[type]} -- [description] file_name {[type]} -- [description] """ easting_np = np.array(df.Easting) northing_np = np.array(df.Northing) res_list_np = convert_lonlat(easting_np, northing_np) df['Longitude'], df['Latitude'] = res_list_np[0], res_list_np[1] # drop the easting and northing columns, now we are done with them. df = df.drop(columns=['Easting', 'Northing'], axis=1) return df
def get_place_coordinates(self): place_coordinates = np.zeros((self.num_places, 2), dtype=np.float32) non_home_activities = list( filter(lambda activity: activity != "Home", self.activity_names)) for activity_index, activity_name in enumerate(non_home_activities): activity_locations_df = self.locations[activity_name] # rename OS grid coordinate columns activity_locations_df = activity_locations_df.rename( columns={ "bng_e": "Easting", "bng_n": "Northing" }) # Convert OS grid coordinates (eastings and northings) to latitude and longitude if 'Easting' in activity_locations_df.columns and 'Northing' in activity_locations_df.columns: local_ids = activity_locations_df.loc[:, "ID"] eastings = activity_locations_df.loc[:, "Easting"] northings = activity_locations_df.loc[:, "Northing"] for local_place_id, easting, northing in tqdm( zip(local_ids, eastings, northings), desc=f"Processing coordinate data for {activity_name}" ): global_place_id = self.get_global_place_id( activity_name, local_place_id) long_lat = convert_lonlat([easting], [northing]) long = long_lat[0][0] lat = long_lat[1][0] place_coordinates[global_place_id] = np.array([lat, long]) # for homes: assign coordinates of random building inside MSOA area home_locations_df = self.locations["Home"] lats, lons = self.get_coordinates_from_buildings(home_locations_df) local_ids = home_locations_df.loc[:, "ID"] for local_place_id, lat, lon in tqdm( zip(local_ids, lats, lons), desc=f"Storing coordinates for homes"): global_place_id = self.get_global_place_id("Home", local_place_id) place_coordinates[global_place_id] = np.array([lat, lon]) return place_coordinates
def shp_as_ROI_list(shp_file,coord_system='BNG',getRecords=True): """ Returns a list of ROI objects for a shapefile - does this much faster than creating individual objects. Keyword Arguments: shp_file -- single .shp, .shx or .dbf file coord_system -- only supports BNG at present getRecords -- imports records as dictionary """ if coord_system == 'BNG': sf = shapefile.Reader(shp_file) # get record entries and append to dictionary if getRecords==True: fields = sf.fields records = sf.records() roiList = [] shapes = sf.shapes() for i in range(0,len(shapes)): roi = sentinelROI() BNGcoordinates = shapes[i].points # split coords, convert and append to self.lat and self.lon for pair in BNGcoordinates: lola = convert_lonlat(pair[0],pair[1]) roi.lon.append(lola[0][0]) roi.lat.append(lola[1][0]) # run all init functions # make coordinates attribute roi._leaflet_coords() # extract the ROI lat/lon roi._convert_to_grid() # calculate bouding box coordinates roi._boxROI_utm() if getRecords == True: for j in range(1,len(sf.fields)): roi.records[fields[j][0]] = records[i][j-1] # append to roiList roiList.append(roi) return(roiList)
# ============================================================================= route = pd.read_csv('../Data/BusSequences.csv') route = route[route.Run == 1] drop_attr = [ 'Run', 'Stop_Code_LBSL', 'Bus_Stop_Code', 'Naptan_Atco', 'Heading', 'Virtual_Bus_Stop' ] for i in drop_attr: route.pop(i) # Convert easting and northings to longitude and latitude easting = route['Location_Easting'].tolist() northing = route['Location_Northing'].tolist() coordinate = convert_lonlat(easting, northing) route['Long'] = coordinate[0] route['Lat'] = coordinate[1] route['Point'] = "" bus_routes = np.unique(route['Route'].tolist()) coord_points = [] # Loop to generates point for each sequence in a route for i in bus_routes: bus = route[route['Route'] == i] for j in bus.Sequence.tolist(): x = bus['Long'].loc[bus['Sequence'] == j]
def testConvertBNG(self): """ Test multithreaded BNG --> lon, lat function """ expected = ([-0.32822654, -2.01831267], [51.44533145, 54.58910534]) result = convert_lonlat([516276, 398915], [173141, 521545]) self.assertEqual(expected, result)
for row in csv.reader(input): convert_dict[row[1]] = int(row[0]) input.close() input = open('RailReferences.csv', 'r') for row in csv.reader(input): if row[0] in convert_dict: n_e_dict[convert_dict[row[0]]] = [int(row[1]), int(row[2])] input.close() check_stanox = 78215 print(n_e_dict[check_stanox]) print( convert_lonlat(float(n_e_dict[check_stanox][0]), float(n_e_dict[check_stanox][1]))) lat_lon_dict = {} for i in n_e_dict: lat_lon_dict[i] = (convert_lonlat(float(n_e_dict[i][0]),float(n_e_dict[i][1]))[1][0],\ convert_lonlat(float(n_e_dict[i][0]),float(n_e_dict[i][1]))[0][0]) print(lat_lon_dict[check_stanox]) with open('stanox_to_lonlat.json', 'w') as fp: json.dump(lat_lon_dict, fp)
def en2lola(eastings: list, northings: list): eastings = np.array(eastings) northings = np.array(northings) res_list_en = convert_lonlat(eastings, northings) return res_list_en
def lats(easting, northing): (longitudes, latitudes) = convert_lonlat(easting, northing) return latitudes
def convert_linestring(linestring): [eastings, northings] = list(numpy.array(linestring.coords).transpose()) return LineString( numpy.array(convert_lonlat(eastings, northings)).transpose())
def render_map(self, easting, northing, mintemp, maxtemp, meantemp, rainfall, date): date = datetime.datetime.strptime(date, '%Y-%M-%d') markers = '' centre = convert_lonlat([int(easting[0])], [int(northing[0])]) it = 0 for point in self.get_locations(easting, northing, 5000): it += 1 #print("POINT:", point) bng = self.traffic_locations[point] #print("BNG:", bng) coords = convert_lonlat([int(bng[0])], [int(bng[1])]) target = self.traffic.loc[(self.traffic['S Ref E'] == bng[0]) & ( self.traffic['S Ref N'] == bng[1])].iloc[0] mapping = self.category_map.loc[ (self.category_map['S Ref E'] == bng[0]) & (self.category_map['S Ref N'] == bng[1])].iloc[0] the_frame = self.generic_frame.copy() the_frame[target.loc['Road']] = 1 the_frame[mapping.loc['RCat']] = 1 the_frame['year'] = date.year the_frame['month'] = date.month the_frame['day'] = date.day the_frame['max_temp'] = maxtemp the_frame['min_temp'] = mintemp the_frame['mean_temp'] = meantemp the_frame['rainfall'] = rainfall prediction = int(self.model.predict(the_frame)[0]) #print("COORDS:", coords) markers += 'var marker{it} = new google.maps.Marker({{ position: {{ lat: {lat}, lng: {lng} }}, map: map, icon: {icon} }});'.format( lat=coords[1][0], lng=coords[0][0], it=it, icon=self.get_marker(prediction)) markers += 'var content{it} = \'<div id="content{it}"> <H1 class="infoTitle"> {name} </H1> <div class="bodyContent"> <p><b>Car Count: </b> {cars}</p> </div> </div>\';'.format( it=it, name=target.loc['Road'], cars=prediction) #markers += 'var content{it} = \'stufff herer \';'.format(it=it) markers += 'var infowindow{it} = new google.maps.InfoWindow({{content: content{it}}});'.format( it=it) markers += 'marker{it}.addListener("click",function(){{infowindow{it}.open(map,marker{it});}});'.format( it=it) google_map = ''' <h3>Traffic Points</h3> <!--The div element for the map --> <div id="map"></div> <script> // Initialize and add the map function initMap() {{ // The map, centered at target location var map = new google.maps.Map( document.getElementById('map'), {{zoom: {zoom}, center: {{ lat: {centre_lat}, lng: {centre_lon} }} }}); {markers} }} </script> <script async defer src="https://maps.googleapis.com/maps/api/js?callback=initMap"> </script> '''.format(height='900px', width='100%', zoom=9, centre_lon=centre[0][0], centre_lat=centre[1][0], markers=markers) return google_map
''' Converting location coordinates from (eastings, northings) to (lat, long) ''' import pandas as pd import numpy as np from convertbng.util import convert_lonlat idx = [ 0, 3, 6, 9, 10, 12, 13, 14, 15, 16, 22, 28, 35, 36, 37, 41, 45, 46, 48, 49, 50, 51, 53, 55, 58, 59, 63, 64, 65, 66, 67, 69, 70, 75, 76, 77, 78, 80, 82, 85, 86, 88, 92, 93, 94, 96, 98, 100, 102, 104, 105, 106, 107, 108, 109, 110 ] df_coord = pd.read_csv("Data/Model Data - Coordinates.csv") eastings = df_coord["Average of Easting"] northings = df_coord["Average of Northing"] list_lon_lat = convert_lonlat(eastings, northings) list_lon_lat = np.array(list(zip(list_lon_lat[0], list_lon_lat[1]))) np.savetxt("Data/coordinates.csv", list_lon_lat, delimiter=',') list_lon_lat_reduced = list_lon_lat[idx] np.savetxt(f"Data/coordinates-{len(idx)}loc.csv", list_lon_lat_reduced, delimiter=',')
[386800, 811210], [394200, 808100], [385000, 801100], [393100, 808000], [392640, 804000], [394000, 808500], [392400, 807600], [391102, 806220], [393800, 806000], [383400, 800930], [393170, 800230], [394740, 812500], [382860, 812325], [384070, 806365], [394220, 804670], [394510, 805770], [391050, 806950], [392440, 807260], [391610, 807330], [391100, 809200], [392800, 803800], [394220, 801500], [393020, 803350], [389000, 802770], [389500, 806670], [389100, 809800], [388800, 813900], [394000, 805580], [394610, 806300], [389000, 806640], [390000, 806680], [392000, 806800], [393000, 806910], [394000, 810990], [391722, 810505], [392880, 804000], [393640, 807320], [394240, 806750], [392000, 808000], [393050, 806000], [394350, 805030], [386000, 806180]] eastings, northings = list(zip(*coords)) res_list_en = convert_lonlat(eastings, northings) with open("lat_lng.csv", "a") as outfile: for x in range(0, 57): outfile.write( str(res_list_en[0][x]) + "," + str(res_list_en[1][x]) + "\n")
list_.append(df) frame = pd.concat(list_) frame.to_csv('cyclingdataframe.csv') frame2 = frame[[ 'Accident Ref.', 'Borough', 'Easting', 'Northing', 'Vehicle Type' ]].copy() df = frame2 onlypedals = df[df['Vehicle Type'].str.contains('Pedal')] onlypedals.to_csv('/users/charlotte1/documents/cfgproject/datanew.csv') eastings = onlypedals['Easting'] northings = onlypedals['Northing'] converted_coords = convert_lonlat(eastings, northings) df = pd.DataFrame(list(converted_coords), index=['Y', 'X']) df2 = df.T df2.to_csv('convertedcoords.csv') a = pd.read_csv('/users/charlotte1/documents/cfgproject/datanew.csv') b = pd.read_csv('/users/charlotte1/documents/cfgproject/convertedcoords.csv') data2 = pd.concat([a, b], axis=1) data2.to_csv('finaldata.csv') cyclemap = folium.Map(location=[51.5074, -0.1278], tiles='CartoDB positron', zoom_start=11, width=600, height=400)