def extract_locations(self): """Preprocess the location data""" default_pos_columns = common_cfg.coord_col_names if set(default_pos_columns).issubset(set(self._raw_data.columns)): print('Location data found') # check and drop units outside provided city boundary geometry = [ shapely.geometry.Point(xy) for xy in zip( self._raw_data[default_pos_columns[0]], # Long self._raw_data[default_pos_columns[1]]) ] # Lat b_within_boundary = np.array( list( map(lambda p: p.within(self.model_city.convhull), geometry))) if not all(b_within_boundary): print( '%s -- dropping %i units outside city.' % (self.servicetype, sum(np.bitwise_not(b_within_boundary)))) self._raw_data = self._raw_data.iloc[ b_within_boundary, :].reset_index() # store geolocations as geopy Point locations = [ geopy.Point(yx) for yx in zip( self._raw_data[default_pos_columns[1]], # Lat self._raw_data[default_pos_columns[0]]) ] # Long propert_data = self._raw_data.drop(default_pos_columns, axis=1) else: raise NotImplementedError('Locations not found - not implemented!') return propert_data, locations
def addTriangle(gwlat, gwlon, bearing, distance, features): if (distance < 1): return origin = geopy.Point(gwlat, gwlon) destination = geopy.distance.distance( kilometers=distance / 1000.0).destination( origin, ((360 + bearing - (0.5 / 2.0)) % 360)) lat, lon = destination.latitude, destination.longitude destination = geopy.distance.distance( kilometers=distance / 1000.0).destination( origin, ((360 + bearing + (0.5 / 2.0)) % 360)) lat2, lon2 = destination.latitude, destination.longitude feature = {} feature["type"] = "Feature" feature["geometry"] = {} feature["geometry"]["type"] = "Polygon" feature["geometry"]["coordinates"] = [[[gwlon, gwlat], [round(lon, 6), round(lat, 6)], [round(lon2, 6), round(lat2, 6)], [gwlon, gwlat]]] features.append(copy.deepcopy(feature))
import fiona from itertools import chain import geopy.distance distance = geopy.distance.vincenty baseFile = 'data/alameda-tracts-2000/alameda-tracts-2000' baseFile = 'data/Oakland_parcels/parcels' baseFile = 'data/Oakland_parcels_subset2/parcels' # radius of 1e-3 baseFile = 'data/Oakland_parcels_subset/parcels' # radius of 1e-2 # compute map bounds center = (37.8058428, -122.2399758) # (lat, long), Armenian Church radius = 1e-3 # should be 0.001 #--- origin = geopy.Point(lat1, lon1) destination = VincentyDistance(kilometers=d).destination(origin, b) lat2, lon2 = destination.latitude, destination.longitude #--- ll = (center[1] - radius * 10, center[0] - radius) ur = (center[1] + radius * 10, center[0] + radius) #shp = fiona.open(baseFile + '.shp') #bds = shp.bounds #shp.close() extra = 0.001 #ll = (bds[0], bds[1]) #ur = (bds[2], bds[3]) coords = list(chain(ll, ur)) w, h = coords[2] - coords[0], coords[3] - coords[1]
import time from datetime import datetime from pymongo import MongoClient import geopy from geopy.distance import VincentyDistance client = MongoClient() db = client.testDB recordsFutian = [] futianCenter = geopy.Point(22.543, 114.055) northLat = VincentyDistance(kilometers=2).destination(futianCenter, 0).latitude southLat = VincentyDistance(kilometers=2).destination(futianCenter, 180).latitude eastLon = VincentyDistance(kilometers=2).destination(futianCenter, 90).longitude westLon = VincentyDistance(kilometers=2).destination(futianCenter, 270).longitude print(northLat, southLat, eastLon, westLon) with open('./confidentialData/GPS_2016_01_02', 'rb') as file: while True: entry = file.readline().decode('utf-8') if entry == "": break else: infoArray = entry.split(',') # 01 . license plate # TODO: write regular expressions to check the data validity try: licensePlate = infoArray[0]
#!/usr/bin/env python # -*- coding: utf-8 -*- import geopy import geopy.distance import sys pt1 = geopy.Point(sys.argv[1].replace(",", "."), sys.argv[2].replace(",", ".")) pt2 = geopy.Point(sys.argv[3].replace(",", "."), sys.argv[4].replace(",", ".")) dist = geopy.distance.distance(pt1, pt2).km print dist
def test_centroid(igroup): assert igroup.centroid('abc') == geopy.Point(1, 1)
vehicle.simple_goto(targetLocation, groundspeed=1) while vehicle.mode.name == "GUIDED": #Stop action if we are no longer in guided mode. currentPoint = geopy.Point(vehicle.location.global_relative_frame.lat, vehicle.location.global_relative_frame.lon) remainingDistance = VincentyDistance(currentPoint, targetPoint).meters print "Distance to target: ", remainingDistance if remainingDistance <= 1.0: # Near enough print "Reached target" break time.sleep(2) print "home heading: ", vehicle.heading homeHeading = vehicle.heading homePoint = geopy.Point(vehicle.location.global_relative_frame.lat, vehicle.location.global_relative_frame.lon) # determine the marker locations frontRightMarker = VincentyDistance( meters=int(5 / sin(radians(45)))).destination(homePoint, (homeHeading + 45) % 360) frontLeftMarker = VincentyDistance(meters=10).destination( frontRightMarker, (homeHeading - 90) % 360) backLeftMarker = VincentyDistance(meters=10).destination( frontLeftMarker, homeHeading) backRightMarker = VincentyDistance(meters=10).destination( backLeftMarker, (homeHeading + 90) % 360) targetAltitude = 3 arm_and_takeoff(targetAltitude)
car_distance = 0.5 # km gpx_tag_attribute = {} directory_path = "/Users/TommyChang/Desktop/So Simple Mobility Simulation/src/examples/SSMS/exp/trajectories/" # will be appended with _0.gpx, _1.gpx, ... first_car_id = 10000 # Will be used as the file name. The second car will be first_car_id + 1, etc. # User input end ------------------------------------------------- gpx_list = [] bearing = calculate_bearing( (direction_coord["lat"], direction_coord["lng"]), (first_car_start_coord["lat"], first_car_start_coord["lng"])) for i in range(0, car_count): dist = geopy.distance.distance(kilometers=car_distance * i) coord = dist.destination(point=geopy.Point(first_car_start_coord["lat"], first_car_start_coord["lng"]), bearing=bearing) print(coord) start_coord = {"lat": coord[0], "lng": coord[1]} t = Track( start_time=start_time, end_time=end_time, start_coord=start_coord, direction_coord=direction_coord, trkpt_time_interval=trkpt_time_interval, # seconds moving_speed=moving_speed, # km/h bearing=None) gpx_list.append(t.create_gpx(gpx_tag_attribute=gpx_tag_attribute))
seis = read(stalist[s], format='PICKLE') if clean: for tr in seis.select(channel='BX*'): seis.remove(tr) print('delete channel ' + tr.stats['channel']) else: for tr in seis.select(channel='BX90*'): seis.remove(tr) print('delete channel ' + tr.stats['channel']) #While we are at it there is a mistake in data_processing_2 where the stats of seis[0] (vertical component) get overwritten by those of a horizontal component... fixing this here. seis[0].stats['channel'] = 'BHZ' # print(seis[0].stats['channel']) #Calculate receiver at 90 distance origin = geopy.Point(srcdict['latitude'], srcdict['longitude']) destination = VincentyDistance(kilometers=90 * (6371 * np.pi / 180.)).destination( origin, seis[0].stats['az']) receiver = instaseis.Receiver(latitude=destination.latitude, longitude=destination.longitude, network=seis[0].stats['network'], station=seis[0].stats['station']) eventtime = seis[0].stats['eventtime'] starttime = seis[0].stats['starttime'] endtime = seis[0].stats['endtime'] st = db.get_seismograms(source=source, receiver=receiver, kind='displacement', dt=0.1)
def index(self, document): """ The method that actually performs the indexing. :param document: The document as a memory file. """ from django.contrib.gis.geos import ( Point, LineString, MultiLineString) from obspy import read_events # Collect all indices in a list. Each index has to be a dictionary. indices = [] inv = read_events(document, format="quakeml") for event in inv: if event.origins: org = event.preferred_origin() or event.origins[0] else: org = None if event.magnitudes: mag = event.preferred_magnitude() or event.magnitudes[0] else: mag = None has_focal_mechanism = False has_moment_tensor = False if event.focal_mechanisms: has_focal_mechanism = True if any(mt for mt in event.focal_mechanisms): has_moment_tensor = True # Parse attributes in the baynet namespace. # The public attribute defaults to None, it can only be set to # True by utilizing the baynet namespace as of now. extra = event.get("extra", {}) if "public" in extra: public = extra["public"]["value"] if public.lower() in ["false", "f"]: public = False elif public.lower() in ["true", "t"]: public = True else: public = None else: public = None if "evaluationMode" in extra: evaluation_mode = extra["evaluationMode"]["value"] else: evaluation_mode = None # parse horizontal uncertainties if org and org.origin_uncertainty: org_unc = org.origin_uncertainty if org_unc.preferred_description == 'horizontal uncertainty': horizontal_uncertainty_max = org_unc.horizontal_uncertainty horizontal_uncertainty_min = org_unc.horizontal_uncertainty horizontal_uncertainty_max_azimuth = 0 elif org_unc.preferred_description == 'uncertainty ellipse': horizontal_uncertainty_max = \ org_unc.max_horizontal_uncertainty horizontal_uncertainty_min = \ org_unc.min_horizontal_uncertainty horizontal_uncertainty_max_azimuth = \ org_unc.azimuth_max_horizontal_uncertainty else: horizontal_uncertainty_max = None horizontal_uncertainty_min = None horizontal_uncertainty_max_azimuth = None else: horizontal_uncertainty_max = None horizontal_uncertainty_min = None horizontal_uncertainty_max_azimuth = None geometry = None if org: geometry = [Point(org.longitude, org.latitude)] if all(value is not None for value in ( horizontal_uncertainty_max, horizontal_uncertainty_min, horizontal_uncertainty_max_azimuth)): import geopy import geopy.distance start = geopy.Point(latitude=org.latitude, longitude=org.longitude) lines = [] for distance, azimuth in ( (horizontal_uncertainty_max, horizontal_uncertainty_max_azimuth), (horizontal_uncertainty_min, horizontal_uncertainty_max_azimuth + 90)): azimuth = azimuth % 180 distance = geopy.distance.VincentyDistance( kilometers=distance / 1e3) end1 = distance.destination( point=start, bearing=azimuth) end2 = distance.destination( point=start, bearing=azimuth + 180) line = LineString((end1.longitude, end1.latitude), (org.longitude, org.latitude), (end2.longitude, end2.latitude)) lines.append(line) geometry.append(MultiLineString(lines)) else: geometry.append(MultiLineString([])) # phase counts used_phase_count = None used_p = None used_s = None if org: if org.quality: used_phase_count = org.quality.used_phase_count if org.quality and org.quality.get('extra'): extra = org.quality.get('extra', {}) used_p = extra.get( 'usedPhaseCountP', {}).get('value', None) used_s = extra.get( 'usedPhaseCountS', {}).get('value', None) if used_p is not None: used_p = int(used_p) if used_s is not None: used_s = int(used_s) # set first/last pick times first_pick_time = None last_pick_time = None if event.picks: pick_times = [ pick.time for pick in event.picks if pick.time is not None] if pick_times: first_pick_time = str(min(pick_times)) last_pick_time = str(max(pick_times)) indices.append({ "quakeml_id": str(event.resource_id), "latitude": org.latitude if org else None, "longitude": org.longitude if org else None, "depth_in_m": org.depth if org else None, "origin_time": str(org.time) if org else None, "first_pick_time": first_pick_time, "last_pick_time": last_pick_time, "used_phase_count": used_phase_count, "used_p": used_p, "used_s": used_s, "magnitude": mag.mag if mag else None, "magnitude_type": mag.magnitude_type if mag else None, "agency": event.creation_info and event.creation_info.agency_id or None, "author": event.creation_info and event.creation_info.author or None, "public": public, "evaluation_mode": evaluation_mode, "event_type": event.event_type, "has_focal_mechanism": has_focal_mechanism, "has_moment_tensor": has_moment_tensor, # The special key geometry can be used to store geographic # information about the indexes geometry. Useful for very # fast queries using PostGIS. "geometry": geometry, "horizontal_uncertainty_max": horizontal_uncertainty_max, "horizontal_uncertainty_min": horizontal_uncertainty_min, "horizontal_uncertainty_max_azimuth": horizontal_uncertainty_max_azimuth, }) return indices
#import utm import math import geopy import geopy.distance from geographiclib.geodesic import Geodesic #FRASCA_WSG86_500POINT_LAT=64.93166666666667 #FRASCA_WSG86_500POINT_LON=25.359166666666667 FRASCA_WSG86_500POINT_LAT = 64.931388 FRASCA_WSG86_500POINT_LON = 25.375800 global point0 point0 = geopy.Point(FRASCA_WSG86_500POINT_LAT, FRASCA_WSG86_500POINT_LON) def from_frasca(x, y, force_point0=False): global point0 dx = x - 500.0 dy = y - 500.0 dc = math.sqrt(pow(dy, 2) + pow(dx, 2)) alpha = 0 if dy == 0.0 else math.atan(abs(dx) / abs(dy)) heading = (alpha if dy > 0 and dx >= 0 else (math.pi - alpha if dy <= 0 and dx > 0 else (alpha + math.pi if dy < 0 and dx <= 0 else (2 * math.pi - alpha if dy >= 0 and dx < 0 else -1)))) dist = geopy.distance.geodesic(nautical=dc) point1 = dist.destination(point=(force_point0 if force_point0 else point0), bearing=heading * (180.0 / math.pi)) return point1
def create_heatmap(list_of_lat_longs): ''' This function takes a list of tuples as a paremeter, I think thats what its called we don't particularly care here about the points themselves, that is handled by the placemarks as long as we keep them in pairs, we don't need to preserve any other information about them ''' #1. Create the square lats = [] longs = [] for p in list_of_lat_longs: lats.append(p[0]) longs.append(p[1]) lats.sort() longs.sort() max_lat = lats[-1] min_lat = lats[0] max_long = longs[-1] min_long = longs[0] #Start in SW corner (-,-) STEP_AMOUNT = 1#length of each square steps_right = 0 current_lat = float(min_lat) current_long = float(min_long) dist_xy = geopy.distance.geodesic(miles = STEP_AMOUNT) dist_z = geopy.distance.geodesic(miles = (math.sqrt(2)*STEP_AMOUNT)) dist_c = geopy.distance.geodesic(miles = (math.sqrt(2)*STEP_AMOUNT)/2)#sqrt(2n)/2 grids = [] # Find all the grid points while current_long < max_long and current_long >= min_long: #Since we always go to min_long I need = here if current_lat < max_lat: t_grid = [0,0,0,0] start = geopy.Point(current_lat, current_long) #Starting point at current_lat and lon dest_up = dist_xy.destination(start,0) # move upwards dest_right = dist_xy.destination(start,90) #move right dest_corner = dist_z.destination(start,45) #move diagonally t_grid[0] = (current_lat, current_long) t_grid[1] = (dest_up.latitude,dest_up.longitude) t_grid[2] = (dest_corner.latitude,dest_corner.longitude) t_grid[3] = (dest_right.latitude,dest_right.longitude) current_lat = dest_up.latitude grids.append(t_grid) elif current_lat > max_lat: steps_right +=1 current_lat = min_lat origin = geopy.Point(min_lat, min_long) dist_x = geopy.distance.geodesic(miles = STEP_AMOUNT*steps_right) dest_right = dist_x.destination(origin,90) current_long = dest_right.longitude center_points = [] #find all centers for grid in grids: corner = geopy.Point(grid[0][0], grid[0][1]) center = dist_c.destination(corner,45) center_points.append((center.latitude,center.longitude)) #print(center_points) #calculate a_values for each center R_FACTOR = 6 a_vals = [] for center in center_points: a_val = 0 for point in list_of_lat_longs: distance = geodesic(center,point).miles a_point_val = math.exp(-1*(distance**2/(R_FACTOR*STEP_AMOUNT))) a_val += a_point_val a_vals.append(a_val) print(a_val) #a_vals_ordered = a_vals #a_vals_ordered.sort() #max_a_val = a_vals_ordered[-1] #for x in range(len(a_vals)): # a_vals[x] = a_vals[x]/max_a_val #get a value between 0 and 1 export_file = open('test.kml','w') export_file.write('<?xml version="1.0" encoding="UTF-8"?>\n') export_file.write('<kml xmlns="http://earth.google.com/kml/2.0">\n') export_file.write(' <Document>\n') export_file.write(' <Style id="red">\n') export_file.write(' <PolyStyle>\n') export_file.write(' <color>6f0000ff</color>\n') export_file.write(' <outline>0</outline>\n') export_file.write(' </PolyStyle>\n') export_file.write(' </Style>\n') export_file.write(' <Style id="yellow">\n') export_file.write(' <PolyStyle>\n') export_file.write(' <color>6f00ffff</color>\n') export_file.write(' <outline>0</outline>\n') export_file.write(' </PolyStyle>\n') export_file.write(' </Style>\n') export_file.write(' <Style id="green">\n') export_file.write(' <PolyStyle>\n') export_file.write(' <color>6f00ff00</color>\n') export_file.write(' <outline>0</outline>\n') export_file.write(' </PolyStyle>\n') export_file.write(' </Style>\n') export_file.write(' <Style id="blue">\n') export_file.write(' <PolyStyle>\n') export_file.write(' <color>6fff0000</color>\n') export_file.write(' <outline>0</outline>\n') export_file.write(' </PolyStyle>\n') export_file.write(' </Style>\n') export_file.write(' <Style id="black">\n') export_file.write(' <PolyStyle>\n') export_file.write(' <color>00000000</color>\n') export_file.write(' <outline>0</outline>\n') export_file.write(' </PolyStyle>\n') export_file.write(' </Style>\n') counter = 0 for g in grids: #print(g) corners = g bot_left_s = '' top_left_s = '' top_right_s = '' bot_right_s = '' for i in reversed(corners[0]): bot_left_s = bot_left_s+str(i)+',' for i in reversed(corners[1]): top_left_s = top_left_s+str(i)+',' for i in reversed(corners[2]): top_right_s = top_right_s+str(i)+',' for i in reversed(corners[3]): bot_right_s = bot_right_s+str(i)+',' bot_left = ' '+bot_left_s+'0\n' top_left = ' '+top_left_s+'0\n' top_right = ' '+top_right_s+'0\n' bot_right = ' '+bot_right_s+'0\n' i = random.randint(1,5) export_file.write(' <Placemark>\n') if a_vals[counter] <= .1: export_file.write(' <styleUrl>#black</styleUrl>\n') elif a_vals[counter] <= .8: export_file.write(' <styleUrl>#blue</styleUrl>\n') elif a_vals[counter] <= 1.2: export_file.write(' <styleUrl>#green</styleUrl>\n') elif a_vals[counter] <= 1.4: export_file.write(' <styleUrl>#yellow</styleUrl>\n') else: export_file.write(' <styleUrl>#red</styleUrl>\n') export_file.write(' <Polygon> <outerBoundaryIs> <LinearRing> \n') export_file.write(' <coordinates>\n') export_file.write(bot_left) export_file.write(bot_right) export_file.write(top_right) export_file.write(top_left) export_file.write(bot_left) export_file.write(' </coordinates>\n') export_file.write(' </LinearRing> </outerBoundaryIs> </Polygon>\n') export_file.write(' </Placemark>\n') counter+=1 export_file.write(' </Document>\n') export_file.write('</kml>') export_file.close()
def subset(self, query): # Ensure we have an output folder that will be cleaned by tmpreaper if not os.path.isdir("/tmp/subset"): os.makedirs("/tmp/subset") working_dir = "/tmp/subset/" entire_globe = True # subset the globe? if 'min_range' in query: # Area explicitly specified entire_globe = False # Bounding box extents bottom_left = [float(x) for x in query.get('min_range').split(',')] top_right = [float(x) for x in query.get('max_range').split(',')] # Time range try: # Time is an index into timestamps array time_range = [int(x) for x in query.get('time').split(',')] except ValueError: # Time is in ISO 8601 format and we need the dataset quantum quantum = query.get('quantum') if quantum == 'day' or quantum == 'hour': def find_time_index(isoDate: datetime.datetime): for idx, date in enumerate(self.timestamps): # Only compare year, month, day. # Some daily/hourly average datasets have an # hour and minute offset that messes up # the index search. if date.date() == isoDate.date(): return idx else: def find_time_index(isoDate: datetime.datetime): for idx, date in enumerate(self.timestamps): # Only compare year and month if date.date().year == isoDate.date().year and \ date.date().month == isoDate.date().month: return idx time_range = [ dateutil.parser.parse(x) for x in query.get('time').split(',') ] time_range = [find_time_index(x) for x in time_range] apply_time_range = False if time_range[0] != time_range[1]: apply_time_range = True # Finds a variable in a dictionary given a substring containing common characters. # Don't use regex here since compiling a new pattern every call WILL add huge overhead. # This is guaranteed to be the fastest method. def find_variable(substring: str, variables: list): for key in variables: if substring in key: return key return None variable_list = list(self._dataset.variables.keys()) # Get lat/lon variable names from dataset (since they all differ >.>) lat_var = find_variable("lat", variable_list) lon_var = find_variable("lon", variable_list) depth_var = find_variable("depth", variable_list) if not entire_globe: # Find closest indices in dataset corresponding to each calculated point ymin_index, xmin_index, _ = find_nearest_grid_point( bottom_left[0], bottom_left[1], self._dataset, self._dataset.variables[lat_var], self._dataset.variables[lon_var]) ymax_index, xmax_index, _ = find_nearest_grid_point( top_right[0], top_right[1], self._dataset, self._dataset.variables[lat_var], self._dataset.variables[lon_var]) # Compute min/max for each slice in case the values are flipped # the netCDF4 module does not support unordered slices y_slice = slice(min(ymin_index, ymax_index), max(ymin_index, ymax_index)) x_slice = slice(min(xmin_index, xmax_index), max(xmin_index, xmax_index)) # Get nicely formatted bearings p0 = geopy.Point(bottom_left) p1 = geopy.Point(top_right) else: y_slice = slice(self._dataset.variables[lat_var].size) x_slice = slice(self._dataset.variables[lon_var].size) p0 = geopy.Point([-85.0, -180.0]) p1 = geopy.Point([85.0, 180.0]) # Get timestamp time_var = find_variable("time", variable_list) timestamp = str( format_date( pandas.to_datetime( np.float64(self._dataset[time_var][time_range[0]].values)), "yyyyMMdd")) endtimestamp = "" if apply_time_range: endtimestamp = "-" + str( format_date( pandas.to_datetime( np.float64( self._dataset[time_var][time_range[1]].values)), "yyyyMMdd")) dataset_name = query.get('dataset_name') # Figure out coordinate dimension names if "riops" in dataset_name: lon_coord = "xc" lat_coord = "yc" elif dataset_name == "giops_forecast": lon_coord = "longitude" lat_coord = "latitude" else: lon_coord = "x" lat_coord = "y" # Do subset along coordinates subset = self._dataset.isel(**{lat_coord: y_slice, lon_coord: x_slice}) # Select requested time (time range if applicable) if apply_time_range: subset = subset.isel(**{ time_var: slice(int(time_range[0]), int(time_range[1]) + 1) }) # slice doesn't include the last element else: subset = subset.isel( ** {time_var: slice(int(time_range[0]), int(time_range[0]) + 1)}) # Filter out unwanted variables output_vars = query.get('variables').split(',') output_vars.extend([depth_var, time_var, lat_var, lon_var]) # Keep the coordinate variables for variable in subset.data_vars: if variable not in output_vars: subset = subset.drop(variable) output_format = query.get('output_format') filename = dataset_name + "_" + "%dN%dW-%dN%dW" % (p0.latitude, p0.longitude, p1.latitude, p1.longitude) \ + "_" + timestamp + endtimestamp + "_" + output_format # "Special" output if output_format == "NETCDF3_NC": # Regrids an input data array according to it's input grid definition # to the output definition def regrid(data: np.ndarray, input_def: pyresample.geometry.SwathDefinition, output_def: pyresample.geometry.SwathDefinition): data = np.rollaxis(data, 0, 4) # Roll time axis backward data = np.rollaxis(data, 0, 4) # Roll depth axis backward data = data.reshape([data.shape[0], data.shape[1], -1]) # Merge time + depth axis together # Perform regridding using nearest neighbour weighting regridded = pyresample.kd_tree.resample_nearest( input_def, data, output_def, 50000, fill_value=None, nprocs=8) return np.moveaxis(regridded, -1, 0) # Move merged axis back to front GRID_RESOLUTION = 50 # Check lat/lon wrapping lon_vals, lat_vals = pyresample.utils.check_and_wrap( lons=subset[lon_var].values, lats=subset[lat_var].values) # Generate our lat/lon grid of 50x50 resolution min_lon, max_lon = np.amin(lon_vals), np.amax(lon_vals) min_lat, max_lat = np.amin(lat_vals), np.amax(lat_vals) XI = np.linspace(min_lon, max_lon, num=GRID_RESOLUTION, dtype=lon_vals.dtype) YI = np.linspace(min_lat, max_lat, num=GRID_RESOLUTION, dtype=lat_vals.dtype) XI_mg, YI_mg = np.meshgrid(XI, YI) # Define input/output grid definitions input_def = pyresample.geometry.SwathDefinition(lons=lon_vals, lats=lat_vals) output_def = pyresample.geometry.SwathDefinition(lons=XI_mg, lats=YI_mg) # Find correct variable names in subset temp_var = find_variable('temp', subset.variables) saline_var = find_variable('salin', subset.variables) x_vel_var = find_variable('crtx', subset.variables) y_vel_var = find_variable('crty', subset.variables) # Create file time_range = len(subset[time_var][:]) - 1 filename = dataset_name.upper() + "_" + \ datetime.date.today().strftime("%Y%m%d") +"_d0" + \ (("-"+str(time_range)) if time_range > 0 else "") + "_" + \ str(np.round(top_right[0]).astype(int)) + "N" + str(np.abs(np.round(bottom_left[1]).astype(int))).zfill(3) + "W" + \ str(np.round(bottom_left[0]).astype(int)) + "N" + str(np.abs(np.round(top_right[1])).astype(int)).zfill(3) + "W" + \ "_" + output_format ds = netCDF4.Dataset(working_dir + filename + ".nc", 'w', format="NETCDF3_CLASSIC") ds.description = "Converted " + dataset_name ds.history = "Created: " + str(datetime.datetime.now()) ds.source = "www.navigator.oceansdata.ca" # Create the netcdf dimensions ds.createDimension('lat', GRID_RESOLUTION) ds.createDimension('lon', GRID_RESOLUTION) ds.createDimension('time', len(subset[time_var][:])) # Create the netcdf variables and assign the values latitudes = ds.createVariable('lat', 'd', ('lat', )) longitudes = ds.createVariable('lon', 'd', ('lon', )) latitudes[:] = YI longitudes[:] = XI # Variable Attributes latitudes.long_name = "Latitude" latitudes.units = "degrees_north" latitudes.NAVO_code = 1 longitudes.long_name = "Longitude" longitudes.units = "degrees_east" longitudes.NAVO_code = 2 # LOL I had CreateDimension vs createDimension here >.< Stumped Clyde too hehe :P ds.createDimension('depth', len(subset[depth_var][:])) levels = ds.createVariable('depth', 'i', ('depth', )) levels[:] = subset[depth_var][:] levels.long_name = "Depth" levels.units = "meter" levels.positive = "down" levels.NAVO_code = 5 if temp_var is not None: origshape = subset[temp_var].shape temp_data = regrid(subset[temp_var].values, input_def, output_def) temp_data = np.reshape(temp_data, (origshape[0], origshape[1], GRID_RESOLUTION, GRID_RESOLUTION)) temp = ds.createVariable('water_temp', 'd', ('time', 'depth', 'lat', 'lon'), fill_value=-30000.0) # Convert from Kelvin to Celcius for i in range(0, len(subset[depth_var][:])): temp[:, i, :, :] = temp_data[:, i, :, :] - 273.15 temp.valid_min = -100.0 temp.valid_max = 100.0 temp.long_name = "Water Temperature" temp.units = "degC" temp.NAVO_code = 15 if saline_var is not None: salinity = ds.createVariable('salinity', 'd', ('time', 'depth', 'lat', 'lon'), fill_value=-30000.0) salinity[:] = regrid( subset[saline_var].values, input_def, output_def )[:] # Note the automatic reshaping by numpy here ^.^ salinity.long_name = "Salinity" salinity.units = "psu" salinity.valid_min = 0.0 salinity.valid_max = 45.0 salinity.NAVO_code = 16 if x_vel_var is not None: x_velo = ds.createVariable('water_u', 'd', ('time', 'depth', 'lat', 'lon'), fill_value=-30000.0) x_velo[:] = regrid(subset[x_vel_var].values, input_def, output_def)[:] x_velo.long_name = "Eastward Water Velocity" x_velo.units = "meter/sec" x_velo.NAVO_code = 17 if y_vel_var is not None: y_velo = ds.createVariable('water_v', 'd', ('time', 'depth', 'lat', 'lon'), fill_value=-30000.0) y_velo[:] = regrid(subset[y_vel_var].values, input_def, output_def)[:] y_velo.long_name = "Northward Water Velocity" y_velo.units = "meter/sec" y_velo.NAVO_code = 18 temp_file_name = working_dir + str(uuid.uuid4()) + ".nc" subset.to_netcdf(temp_file_name) subset.close() # Reopen using netCDF4 to get non-encoded time values subset = netCDF4.Dataset(temp_file_name, 'r') times = ds.createVariable('time', 'i', ('time', )) # Convert time from seconds to hours for i in range(0, len(subset[time_var])): times[i] = subset[time_var][i] / 3600 times.long_name = "Validity time" times.units = "hours since 1950-01-01 00:00:00" times.time_origin = "1950-01-01 00:00:00" ds.close() subset.close() else: # Save subset normally subset.to_netcdf(working_dir + filename + ".nc", format=output_format) if int(query.get('should_zip')) == 1: myzip = zipfile.ZipFile('%s%s.zip' % (working_dir, filename), mode='w') myzip.write('%s%s.nc' % (working_dir, filename), os.path.basename('%s%s.nc' % (working_dir, filename))) myzip.comment = b"Generated from www.navigator.oceansdata.ca" myzip.close() # Must be called to actually create zip return working_dir, filename + ".zip" return working_dir, filename + ".nc"
def run(): content ="" with open('tmp/op.json', 'r') as content_file: content = content_file.read() content = content.replace("var somepoints = ","") json_data = json.loads(content) for way in json_data: curr=None prev = None for latlongs in way['latlongs']: if not prev: prev = geopy.Point(latlongs["lat"],latlongs["lng"]) continue curr = geopy.Point(latlongs["lat"],latlongs["lng"]) distance = calculateDistance(prev,curr) bearing = bearing_tuple(prev,curr) prev = curr print("++++++++++++++++++++++++") new_json_data = [] for way in json_data: payload ={} payload['id'] = way['id'] payload['latlongs'] = [] curr = None prev = None for latlongs in way['latlongs']: if not prev: prev = geopy.Point(latlongs["lat"],latlongs["lng"]) payload['latlongs'].append(latlongs) continue curr = geopy.Point(latlongs["lat"],latlongs["lng"]) bearing = bearing_tuple(prev,curr) distance = calculateDistance(prev,curr) distance = int(distance) if distance > min_distance_in_m: np = getNextPoint(prev,bearing) payload['latlongs'].append({ "lat":np.latitude, "lng": np.longitude }) for i in range(int(distance/min_distance_in_m -1) ): np = getNextPoint(np,bearing)\ payload['latlongs'].append({ "lat":np.latitude, "lng": np.longitude }) payload['latlongs'].append({ "lat":curr.latitude, "lng":curr.longitude }) prev = curr new_json_data.append(payload) f = open("tmp/all_road_10m_min_distance.json", "w") f.write("var all_road_10m_min_distance = " + json.dumps(new_json_data)) f.close()
country = 'malawi' clustering = np.load(os.path.join(out_dir, country, 'clustering.npy')) counting = np.load(os.path.join(out_dir, country, 'counting.npy')) nlats = np.load("../data/output/LSMS/malawi/nlats.npy") nlons = np.load("../data/output/LSMS/malawi/nlons.npy") nlats_min = np.min(nlats) nlats_max = np.max(nlats) nlons_min = np.min(nlons) nlons_max = np.max(nlons) lats_dif = nlats_max - nlats_min lons_dif = nlons_max - nlons_min mid_point = geopy.Point(((nlats_max + nlats_min) / 2), ((nlons_max + nlons_min) / 2)) d = geopy.distance.great_circle(kilometers=1) onek_lats = mid_point.latitude - d.destination(point=mid_point, bearing=180).latitude onek_lons = mid_point.longitude - d.destination(point=mid_point, bearing=270).longitude size_y = math.ceil(lats_dif / onek_lats) size_x = math.ceil(lons_dif / onek_lons) df = pd.read_csv(os.path.join(out_dir, country, 'data.csv'), index_col='id') cdl = pd.read_csv(os.path.join(out_dir, country, 'candidate_download_locs.csv'), index_col='name')
startTime = datetime.now() # Enter the lat long of location that is the center of the area. We choose an area of 20KmX20Km. lat_S and long_S # are the latitude and longitude of the chosen city Side_secon_net = 70 Side_of_cell = 10 diag_secon_net = ((Side_secon_net**2 + Side_secon_net**2)**0.5) / 2 lat_S = 39.9612 long_S = -82.9988 d = [diag_secon_net] * 4 dis = [400] * 4 num_of_cells = (Side_secon_net**2) / (Side_of_cell**2) # given: lat1, lon1, b = bearing in degrees, d = distance in kilometers origin = geopy.Point(lat_S, long_S) # With the north line across the city we find the bearing of the diagonal corners(lower left and upper right) of the square around # the city. Convert bearing from degrees to radians bearing_deg = [45, 135, 225, 315] des = [] des_big = [] for index in range(len(bearing_deg)): destination = VincentyDistance(kilometers=d[index]).destination( origin, bearing_deg[index]) destination_big = VincentyDistance(kilometers=dis[index]).destination( origin, bearing_deg[index]) lat2 = destination.latitude lon2 = destination.longitude latBig = destination_big.latitude lonBig = destination_big.longitude
# radial interpolation of atmpospheric variables # repeated for other variables, and different radius distances (not all shown here) # this example is for Geopotential Heights (z500) within 1,500 km of location z500_1500km = np.empty([14880, 1081]) for k in range(8): lon = lons_8[k] lat = lats_8[k] latvec = [] lonvec = [] step = steps[k] # calculate interpolation grid for a in np.arange(50, 1550, 50): # 50 km radial increments (rho's) for b in np.arange(10, 370, 10): # 10 degree azimuth increments (theta's) start = geopy.Point(lat, lon) # origin circ = geopy.distance.distance( kilometers=a) # great circle distance dest = circ.destination(point=start, bearing=b) # transect along great circle latvec.append(dest[0]) lonvec.append(dest[1]) latvec = np.hstack([lat, latvec]) # add origin lat lonvec = np.hstack([lon, lonvec]) # add origin lon # interpolate underlying values using radial grid interp_vals = scipy.interpolate.interpn((merra_lons, merra_lats), z500_ja, (lonvec, latvec)) del latvec, lonvec interp_vals = np.squeeze(interp_vals) z500_1500km[ step, :] = interp_vals.T # interpolated values from r = 1,500 km grid
if len(close_clusters_indices) == 0: # create a new cluster new_cluster = Cluster(cid=len(clusters), nb_points=1, last_seen=point.timestamp, lat=point.lat, lon=point.lon, angle=point.angle) clusters.append(new_cluster) roadnet.add_node(new_cluster.cid) current_cluster = new_cluster.cid # recompute the cluster index update_cluster_index = True else: # add the point to the cluster pt = geopy.Point(point.get_coordinates()) close_clusters_distances = [ geopy.distance.distance( pt, geopy.Point( clusters[clu_index].get_coordinates())).meters for clu_index in close_clusters_indices ] closest_cluster_indx = close_clusters_indices[ close_clusters_distances.index( min(close_clusters_distances))] clusters[closest_cluster_indx].add(point) current_cluster = closest_cluster_indx # Adding the edge: if prev_cluster == -1: prev_cluster = current_cluster continue
lon1 = radians(coord1[1]) lat2 = asin(sin(lat1) * cos(distance / R) + cos(lat1) * sin(distance / R) * cos(bearing)) lon2 = lon1 + atan2( sin(bearing) * sin(distance / R) * cos(lat1), cos(distance / R) - sin(lat1) * sin(lat2)) lat2 = degrees(lat2) lon2 = degrees(lon2) return lat2, lon2 import geopy import geopy.distance # Define starting point. start = geopy.Point(48.853, 2.349) # Define a general distance object, initialized with a distance of 1 km. d = geopy.distance.VincentyDistance(meters = 1) # Use the `destination` method with a bearing of 0 degrees (which is north) # in order to go from point `start` 1 km to north. print(d.destination(point=start, bearing=0).) print(compute_offset(1, 0, (48.853, 2.349))) chi = [41.850033,-87.650055] nyc = [40.714268,-74.005974] print(haversine(chi,nyc)) # https://www.movable-type.co.uk/scripts/latlong.html
plot_point_ex = plt.gcf() plot_point_ex.savefig("plot_point_ex.png") #Clear current matplotlib plot plt.clf() #plt.figure(figsize=(40,20)) #plt.show() def get_grid_cell_indices(given_lat,given_lon,lat_grid,lon_grid): distances = np.sqrt((given_lon - lon_grid)**2 + (given_lat - lat_grid)**2) #return lat_grid[np.where(distances == np.amin(distances))][0], lon_grid[np.where(distances == np.amin(distances))][0] return np.where(distances == np.amin(distances))[0][0], np.where(distances == np.amin(distances))[1][0] # Define starting point. start = geopy.Point(storm_lat, storm_lon) # Define a general distance object, initialized with a distance of 400 km. radius = 800 d = geopy.distance.VincentyDistance(kilometers = radius) # Use the `destination` method with a bearing of 0 degrees (which is north) # in order to go from point `start` 1 km to north. north_point = d.destination(point=start, bearing=0) south_point = d.destination(point=start, bearing=180) east_point = d.destination(point=start, bearing=90) west_point = d.destination(point=start, bearing=270) urcrnrlat = north_point.latitude urcrnrlon = east_point.longitude
def main(): """Runs the program.""" #ask the user if a test is being run test = is_test() #if it is a test, get any extra constraints from the user if test: type_of_test = decide_test() #will store extra constraints if a test is being run extra_con = [] #read in the databases (each database contains the city name and its #longitude/latitude coordinate). canada = read_files("canada", "Canada Cities.csv") america = read_files("america", "US Cities.csv") # create a list for canadian and american cities canada_cities = [] america_cities = [] for entry in canada: canada_cities.append(entry["city"].lower()) for entry in america: america_cities.append(entry["city"].lower()) #get the raw location from the user and clarify any duplicates to get the #starting and ending city (the countries will of course remain the same) raw_location = raw_location_input(canada_cities, america_cities) start_city, end_city = clarify_duplicates(canada, america, raw_location) start_country = raw_location["starting country"] end_country = raw_location["ending country"] is_urgent = get_urgency() #calculate the total distance between the starting and ending city start_coord = (start_city["latitude"], start_city["longitude"]) end_coord = (end_city["latitude"], end_city["longitude"]) total_dist = calc_distance(start_coord, end_coord) print(str(start_coord) + " " + str(end_coord)) #tell the user the total number of km print("A trip from " + start_city["city"] + ", " + start_city["province/state"] + " to " + end_city["city"] + ", " + end_city["province/state"] + " is " + str(total_dist) + " km long.") #calculate 1/tenth of the distance from the start to the end #the user will be given 10 choices of evenly spaced cities to stop at along the way #they can stop at 0, 1, or multiple; their choice next_dist = total_dist / 10 geodesic = pyproj.Geod(ellps='WGS84') #calculates the initial bearing (fwd_azimuth) and the final bearing fwd_azimuth, back_azimuth, distance = geodesic.inv(start_city["longitude"], start_city["latitude"], end_city["longitude"], end_city["latitude"]) final_bearing = back_azimuth - 180 #Define the starting and ending points. temp_start = geopy.Point(start_city["latitude"], start_city["longitude"]) end = geopy.Point(end_city["latitude"], end_city["longitude"]) start = temp_start #Define a general distance object, initialized with a distance of the stop distance (in km). d = geopy.distance.distance(kilometers=next_dist) #lists that will hold all the stops and the stops that the user chooses, respectively all_stops = [] chosen_stops = [] #define the geolocator geolocator = Nominatim(user_agent="Bing") #loop 10 times (for 10 stops) for i in range(10): # Use the destination method with our starting point and initial bearing # in order to go from our starting point to the next city in the line of stops. #finds the next point from the starting point given the bearing #if we are closer to the start, use our initial bearing; otherwise, use the final bearing if (i < 5): final = d.destination(point=temp_start, bearing=fwd_azimuth) else: final = d.destination(point=temp_start, bearing=final_bearing) #finds the location location = geolocator.reverse(str(final)) print(str(i) + ": " + str(location)) #add it to the list of all stops all_stops.append({"location": str(location), "coord": final}) #reset the next starting point temp_start = final #add the starting location to the chosen stops chosen_stops.append({"location": start_city["city"], "coord": start}) user_input = -2 #initizalize #get the user input for the stops they would like and store it in chosen_stops print( "Please enter which stops you would like to take along the way." + "If you are done entering stops, please enter '-1'. If you don't want to take any stops," + " enter -1 right away.") while (user_input != -1): user_input = int(input("Enter your next stop: ")) if (user_input < -1 or user_input > 9): print("Wrong input! Please try again!") else: if (user_input != -1): chosen_stops.append(all_stops[user_input]) #add the ending location to the chosen stops #chosen_stops is now a list of all stops including the start and end chosen_stops.append({"location": end_city["city"], "coord": end}) for i in range(len(chosen_stops) - 1): #calculate the distance between each stop distance = calc_distance(chosen_stops[i]["coord"], chosen_stops[i + 1]["coord"]) print("The distance between " + str(chosen_stops[i]["location"]) + " and " + str(chosen_stops[i + 1]["location"]) + " is " + str(distance) + " km. ") dict_string = str(chosen_stops[i]["location"]) + " to " + str( chosen_stops[i + 1]["location"]) #set up the dictionary and append it to the list entry = {"location": dict_string, "distance": distance} stop_info.append(entry) #loop through every stop for i in range(len(stop_info)): #now that we know the distance, we can calculate the time needed to travel #between each stop with each mode of transportation distance = stop_info[i]["distance"] drive_time = calc_time(distance, "drive") transit_time = calc_time(distance, "transit") plane_time = calc_time(distance, "plane") travel = determine_travel_modes(drive_time, transit_time, plane_time) for mode in travel: print(mode + " from " + stop_info[i]["location"] + ":" + str(travel[mode]) + " hours.") all_modes = [] urgent = {} #determine the FASTEST mode of travel if travel != {}: if "drive" in travel.keys(): all_modes.append(travel["drive"]) if "transit" in travel.keys(): all_modes.append(travel["transit"]) if "plane" in travel.keys(): all_modes.append(travel["plane"]) fastest = min(all_modes) for mode in travel: if travel[mode] <= fastest: urgent[mode] = travel[mode] #add a new key, the dictionary of available travel modes, to the list stop_info[i]["travel"] = travel #do the same with the urgent travel mode stop_info[i]["urgent"] = urgent #reset the travel modes travel = {} urgent = {} #determine if the travel is international or not and set the appropriate constraint border = get_international(start_country, end_country) #add constraints for the appropriate test, if it is a test if test: if type_of_test == "w": extra_con = test_weather(stop_info) elif type_of_test == "a": extra_con = test_affordability() elif type_of_test == "t": extra_con = test_travel() #solve! solve(border, is_urgent, test, extra_con)
def google_earth_button(): ''' When map button is pressed ''' database_wb = openpyxl.load_workbook(DATABASE_FILE) database_sheet = database_wb[DATABASE_SHEET_NAME] projects = get_project_types() years = get_years() #list of years to select clients = get_client_types() #list of client types #check if location is checked if loc_spec_var.get(): #if it is get the location to select by coords = get_location() radius = int(loc_radius_var.get()) export_file = open('RACEGIS.kml', 'w') #open a text file of the date + rest export_file.write(kml_header_1) export_file.write(kml_header) if loc_spec_var.get(): #write radius long_r = radius / 52.28 lat_r = radius / 69.01 export_file.write(kml_circle) dist = geopy.distance.distance(miles=radius) for theta in range(361): pt = dist.destination(point=geopy.Point(coords), bearing=theta) point = str(pt[1]) + ',' + str(pt[0]) + ',0\n' export_file.write('\t\t' + point) export_file.write(kml_circle_end) for i in range(2, database_sheet.max_row): #FILTER proj_id = database_sheet["A" + str(i)].value proj_year = str(proj_id)[:4] if int(proj_year) in years: #check if meets year criteria if database_sheet["D" + str( i )].value in projects: #check if meets project type criteria if database_sheet["F" + str( i )].value in clients: #check if meets client type criteria if not loc_spec_var.get( ): #if we do NOT need to sort by location name = str(database_sheet["B" + str(i)].value) if "&" in name: name = name.replace("&", "/") proj_id = str(database_sheet["A" + str(i)].value) if "&" in proj_id: proj_id = proj_id.replace("&", "/") location = str(database_sheet["C" + str(i)].value) if "&" in location: location = location.replace("&", "/") proj_type = str(database_sheet["D" + str(i)].value) if "&" in proj_type: proj_type = proj_type.replace("&", "/") client = str(database_sheet["E" + str(i)].value) if "&" in client: client = client.replace("&", "/") client_type = str(database_sheet["F" + str(i)].value) if "&" in client_type: client_type = client_type.replace("&", "/") long_lat_str = str( database_sheet["H" + str(i)].value) + "," + str( database_sheet["G" + str(i)].value) export_file.write(' <Placemark>\n') export_file.write(' <name>' + name + '</name>\n') export_file.write( ' <styleUrl>#project-style</styleUrl>\n') export_file.write(' <ExtendedData>\n') export_file.write(' <Data name="ID">\n') export_file.write(' <value>' + proj_id + '</value>\n') export_file.write(' </Data>\n') export_file.write(' <Data name="Address">\n') export_file.write(' <value>' + location + '</value>\n') export_file.write(' </Data>\n') export_file.write(' <Data name="projectType">\n') export_file.write(' <value>' + proj_type + '</value>\n') export_file.write(' </Data>\n') export_file.write(' <Data name="clientType">\n') export_file.write(' <value>' + client_type + '</value>\n') export_file.write(' </Data>\n') export_file.write(' <Data name="clientName">\n') export_file.write(' <value>' + client + '</value>\n') export_file.write(' </Data>\n') export_file.write(' </ExtendedData>\n') export_file.write(' <Point>\n') export_file.write(' <coordinates>' + long_lat_str + ',0</coordinates>\n') export_file.write(' </Point>\n') export_file.write(' </Placemark>\n') else: if database_sheet["G" + str(i)].value != None: t_coords = (float(database_sheet["G" + str(i)].value), float(database_sheet["H" + str(i)].value)) distance = geodesic(coords, t_coords).miles statusbar_text.set(distance) if distance < radius: name = str(database_sheet["B" + str(i)].value) if "&" in name: name = name.replace("&", "/") proj_id = str(database_sheet["A" + str(i)].value) if "&" in proj_id: proj_id = proj_id.replace("&", "/") location = str(database_sheet["C" + str(i)].value) if "&" in location: location = location.replace("&", "/") proj_type = str(database_sheet["D" + str(i)].value) if "&" in proj_type: proj_type = proj_type.replace("&", "/") client = str(database_sheet["E" + str(i)].value) if "&" in client: client = client.replace("&", "/") client_type = str(database_sheet["F" + str(i)].value) if "&" in client_type: client_type = client_type.replace("&", "/") long_lat_str = str(database_sheet[ "H" + str(i)].value) + "," + str( database_sheet["G" + str(i)].value) export_file.write(' <Placemark>\n') export_file.write(' <name>' + name + '</name>\n') export_file.write( ' <styleUrl>#project-style</styleUrl>\n' ) export_file.write(' <ExtendedData>\n') export_file.write(' <Data name="ID">\n') export_file.write(' <value>' + proj_id + '</value>\n') export_file.write(' </Data>\n') export_file.write( ' <Data name="Address">\n') export_file.write(' <value>' + location + '</value>\n') export_file.write(' </Data>\n') export_file.write( ' <Data name="projectType">\n') export_file.write(' <value>' + proj_type + '</value>\n') export_file.write(' </Data>\n') export_file.write( ' <Data name="clientType">\n') export_file.write(' <value>' + client_type + '</value>\n') export_file.write(' </Data>\n') export_file.write( ' <Data name="clientName">\n') export_file.write(' <value>' + client + '</value>\n') export_file.write(' </Data>\n') export_file.write(' </ExtendedData>\n') export_file.write(' <Point>\n') export_file.write(' <coordinates>' + long_lat_str + ',0</coordinates>\n') export_file.write(' </Point>\n') export_file.write(' </Placemark>\n') export_file.write(kml_footer) export_file.close() os.startfile("RACEGIS.kml")
def jitterLocation(location=None, maxMeters=10): origin = geopy.Point(location[0], location[1]) b = random.randint(0, 360) d = math.sqrt(random.random()) * (float(maxMeters) / 1000) destination = geopy.distance.distance(kilometers=d).destination(origin, b) return (destination.latitude, destination.longitude, location[2])
def getPath(pointToReturn): global px, py return geopy.Point(px[pointToReturn], py[pointToReturn])
poly_overall = fr.values.reshape((len(dfa), 180, 2)) center = [] Cell_coord = [] TV_Tower_Dist = [] TV_Tower_Loc = [] TV_HAAT_Data = [] TV_ERP_Data = [] TV_Chan = [] TV_Receiver_Dist = [] TV_Receiver_Loc = [] bear_degree = [45, 135, 225, 315] out_channels = {} for cell in range(0, len(Data)): CENTER = (Data[str(cell)]['location'][1], Data[str(cell)]['location'][0]) dis = ((2 * (Side_of_cell**2))**0.5) / 2 orig = geopy.Point(CENTER[0], CENTER[1]) val = [] for b in range(0, len(bear_degree)): destination = VincentyDistance(kilometers=dis).destination( orig, bear_degree[b]) lat = destination.latitude lon = destination.longitude ext = (lat, lon) val.extend([ext]) Cell_coord.append(val) chan_avl = (Data[str(cell)]['Available_channel']) TV_chan = [] TV_TX_Dist = [] TV_TX_loc = [] TV_haat = [] TV_erp = []
def getGPSLocation(): global locationIndex currenctLocation = gps.getPositionData() return geopy.Point(currenctLocation.fLatitude, currenctLocation.fLongitude)
# Upper left corner lat_a = max(ys) lon_a = min(xs) # Lower left corner lat_b = min(ys) lon_b = min(xs) # Upper ritght corner lat_c = max(ys) lon_c = max(xs) coords_a = (lat_a, lon_a) coords_b = (lat_b, lon_b) coords_c = (lat_c, lon_c) # Rectangle profile start = geopy.Point(lat_a, lon_a) h = geopy.distance.vincenty(coords_a, coords_b).m w = geopy.distance.vincenty(coords_a, coords_c).m print(str(h) + ' Meters (Height)') print(str(w) + ' Meters (Width)') # The number of crop units placed horizontally and vertically units_v = h / crop_y units_h = w / crop_x # Distance move EAST = 90 SOUTH = 180 d = geopy.distance.VincentyDistance(meters=crop_center[1])
#!/usr/bin/python import json import math import geopy import random from geopy.distance import VincentyDistance # given: lat1, lon1, b = bearing in degrees, d = distance in kilometers # FIDI Manhattan origin = geopy.Point(40.705150, -74.0085300) orig_bearing = 139.3 # Crown Heights #origin = geopy.Point(40.673512, -73.958879) #orig_bearing = 195 with open('/home2/greenim9/data/data.json') as data_file: data_loaded = json.load(data_file) stroke_weight = float(data_loaded['DATA']['PWR']) destination = VincentyDistance(kilometers=15).destination( origin, orig_bearing + float(data_loaded['DATA']['DOA'])) lat2, lon2 = destination.latitude, destination.longitude print "Content-type: application/json" print
def __init__(self, data: dict) -> None: self.code = data["LocationCode"] self.point = geopy.Point(data["Latitude"], data["Longitude"]) self.facility_owned = bool(data["FacilityOwnedByCarvana"]) self.trips = [] self.visited = False
return requests.post(url, data=json.dumps(param), headers=headers).json() @classmethod def request(cls, urls): ' 以20个一轮,进行批量处理 ' out_list = [] obj = cls() for chunck in chunks(urls, 20): out_list.extend(obj.__batch_by_amap(chunck)) return out_list if __name__ == "__main__": # 单点绑路 print "单点绑路: ", BindRoad.one(geopy.Point(39.9849500000, 116.3077580000)) # 批量绑路服务测试 SIZE = 30 origins = [ x for x in repeat(geopy.Point(39.9849500000, 116.3077580000), SIZE) ] result = [x for x in BindRoad.batch(origins)] print "批量绑路: ", SIZE assert len(result) == SIZE # 附近搜索命名 result = Around.one(geopy.Point(39.9849500000, 116.3077580000)) print "单点命名: ", result[0]['name'], result[0]['type'], result[0]['distance'] # 批量调用高德接口 SIZE = 30