def __init__(self, vessel): self.name = vessel['Ship name'] self.lat = dms2dec(vessel['Latitude']) self.lon = dms2dec(vessel['Longitude']) # self.lat = vessel['Latitude'] # self.lon = vessel['Longitude'] self.velocity = 20
def check(long, lat): dec_long = dms2dec(long) dec_lat = dms2dec(lat) for k, v in sea_area.items(): if dec_long < v['max_long'] and \ dec_long > v['min_long'] and \ dec_lat < v['max_lat'] and \ dec_lat > v['min_lat']: return k return None
def cleanup_and_get_location_data_for(news): keys = news.keys() if "content" in keys: for i, content in enumerate(news['content']): content = re.sub(r"\[\d\]", "", content) content = unidecode(content) news['content'][i] = content if 'geo_dec' in keys: coords = news['geo_dec'].split(' ') for i, coord in enumerate(coords): for s in coord: if (s == "S" or s == "W"): coords[i] = f"-{coord}"[:-2] else: coords[i] = f"{coord}"[:-2] news['coords'] = {'lat': coords[0], 'lon': coords[1]} elif 'geo_dms' in keys: coords = news['geo_dms'].split(' ') for i, coord in enumerate(coords): coords[i] = dms2dec(coord.strip()) news['coords'] = {'lat': coords[0], 'lon': coords[1]} elif 'location_string' in keys: news['coords'] = location_help_from_here_api_with( news['location_string']) elif 'ptod' in keys: news['coords'] = location_help_from_here_api_with(news['ptod']) else: news['coords'] = location_help_from_here_api_with(news['title']) return news
def _get_XYZ(session, station_id): """ POSTs and parses XYZ coordinates """ # Request URL to get the data url = 'http://meteosearch.meteo.gr/FormProc.asp' # POST request body req_body = {'stationID': station_id, 'SelectYear': 2020, 'SelectMonth': 12} response = session.post(url, req_body) # Check for code 2xx. if response.ok: station_data = response.text xyz_line = station_data.splitlines()[3] # Check if the 3rd line contains the data. # Sometimes the 2nd one does. if 'LAT:' in xyz_line: z = xyz_line.split('ELEV:')[1].split('m')[0].strip() x_dms = xyz_line.split('LONG:')[1].split('E')[0].strip() y_dms = xyz_line.split('LAT:')[1].split('N')[0].strip() # Check if line contains coordinates if x_dms and y_dms: # Convert DMS to DD x = dms2dec(x_dms + "E") y = dms2dec(y_dms + "N") return x, y, z else: xyz_line = station_data.splitlines()[2] if 'LAT:' in xyz_line: z = xyz_line.split('ELEV:')[1].split('m')[0].strip() x_dms = xyz_line.split('LONG:')[1].strip() y_dms = xyz_line.split('LAT:')[1].split('LONG')[0].strip() # Check if line contains coordinates if x_dms and y_dms: x = dms2dec( x_dms.replace("deg", "°").replace("min", "'") + " 00'\"E") y = dms2dec( y_dms.replace("deg", "°").replace("min", "'") + "00'\"N") return x, y, z # If no data is found return None, None, None
def find_horizon(sat_objs, sec, location): """ Get satellite altitude and azimuth list for specific observer and time :param sat_objs: List of satellite objects :param sec: Time in second :param location: Observer location :return: Azimuth list, altitude list, shell list (different shells have different colors) """ lat_dms = decimalDegrees2DMS(location[0]) long_dms = decimalDegrees2DMS(location[1]) observer = ephem.Observer() observer.lon, observer.lat = long_dms, lat_dms shifted_epoch = (pd.to_datetime(EPOCH) + pd.to_timedelta(sec, unit='s')).strftime( format='%Y/%m/%d %H:%M:%S') observer.date = shifted_epoch alt_list = [] azim_list = [] shell_list = [] for shell_cntr in range(0, NUM_SHELLS): print(len(sat_objs[shell_cntr])) for id in range(len(sat_objs[shell_cntr])): sat = sat_objs[shell_cntr][id] sat.compute(observer) angle_alt = dms2dec(str(sat.alt)) if sat.alt < 0: angle_alt = 0 - angle_alt angle_az = dms2dec(str(sat.az)) if sat.az < 0: angle_az = 0 - angle_az print("%d %f %f" % (id, angle_alt, angle_az)) #if angle_alt > MIN_DEG_ELEVATION: if angle_alt > 0.0: #orb_id = math.floor(id/NUM_ORBS) print("%d %f %f" % (id, angle_alt, angle_az)) alt_list.append(angle_alt) azim_list.append(angle_az) shell_list.append(shell_cntr) X = np.array(azim_list) Y = np.array(alt_list) S = np.array(shell_list) return X, Y, S
def plotly_plot(button, long_d, long_m, long_s, lat_d, lat_m, lat_s): #convert dms to dd coordinate dd_long = dms2dec(f'''{long_d}°{long_m}'{long_s}"N''') dd_lat = dms2dec(f'''{lat_d}°{lat_m}'{lat_s}"E''') #convert dd to lambert 72 x, y = transformer.transform(dd_long, dd_lat) #create bounding box x_left = x - 30 x_right = x + 30 y_top = y + 30 y_bottom = y - 30 #read in bounding box rst = img.read(1, window=from_bounds(x_left, y_bottom, x_right, y_top, img.transform)) #create geopandas dataframe out of bounding box rst_gdf = gpd.GeoDataFrame(rst) rst_gdf = rst_gdf[::-1] #make 3d plot out of dataframe fig = go.Figure(data=[go.Surface(z=rst_gdf.values)]) fig.update_layout(autosize=True) return fig
def getData(formdata): LKP_time = float(formdata['LKP_time']) LKP_Altitude = float(formdata['LKP_Altitude']) Distress_time = float(formdata['Distress_time']) Distress_Altitude = float(formdata['Distress_Altitude']) brng = float(formdata['brng']) GSpeed = float(formdata['GSpeed']) VSpeed = float(formdata['VSpeed']) Glide_TAS = float(formdata['Glide_TAS']) Glide_Ratio = float(formdata['Glide_Ratio']) Daylight_hours = 8 Search_Speed = 150 ###input these as DD MM SS,DD MM SS destination = formdata['destination'] if destination.find(','): destination = destination.split(',') for i in range(2): a = destination[i].split() if i == 0: decVal = round(dms2dec(f'''{a[0]}°{a[1]}'{a[2]}N"'''), 5) destination[i] = decVal if i == 1: decVal = round(dms2dec(f'''{a[0]}°{a[1]}'{a[2]}E"'''), 5) destination[i] = decVal else: print("improper format") Distress_position = formdata['Distress_position'] if Distress_position.find(','): Distress_position = Distress_position.split(',') for i in range(2): a = Distress_position[i].split() if i == 0: decVal = round(dms2dec(f'''{a[0]}°{a[1]}'{a[2]}N"'''), 5) Distress_position[i] = decVal if i == 1: decVal = round(dms2dec(f'''{a[0]}°{a[1]}'{a[2]}E"'''), 5) Distress_position[i] = decVal else: print("improper format") LKP_position = formdata['LKP_position'] if LKP_position.find(','): LKP_position = LKP_position.split(',') for i in range(2): a = LKP_position[i].split() if i == 0: decVal = round(dms2dec(f'''{a[0]}°{a[1]}'{a[2]}N"'''), 5) LKP_position[i] = decVal if i == 1: decVal = round(dms2dec(f'''{a[0]}°{a[1]}'{a[2]}E"'''), 5) LKP_position[i] = decVal else: print("improper format") Communication_Interval = .5 Terrain_Altitude = 0 Altitude_Loss = Distress_Altitude - Terrain_Altitude Comm_Mode = 'GPS' Craft_Type = 'DualEngine' Search_Altitude = '600' Search_obj = 'SmallAircraft' visibility = '6' vegetation = 'Moderate' search_effort, Corr_Sweep_Width, Search_Endurance = availableSearchEffort.getSearchEffort( Search_Speed, Daylight_hours, Search_Altitude, Search_obj, visibility, vegetation) Descent_Rate = Glide_TAS * 101 / Glide_Ratio Datum_point = [] if (Distress_time and Distress_position): E = positionError.getE(Comm_Mode, Craft_Type, GSpeed, VSpeed, Distress_Altitude, Terrain_Altitude, LKP_time, Distress_time) Altitude_Loss = Distress_Altitude - Terrain_Altitude Descent_Time = Altitude_Loss / Descent_Rate / 60 Glide_Distance = ((Glide_TAS * Descent_Time) / 60) * 1.852 Datum_point = Datum.Estimated_position(Distress_position, Glide_Distance, brng) Seach_area_width = effortAllocation.getSeachAreaWidth( search_effort, E, Corr_Sweep_Width, None, Search_Speed, Search_Endurance) PMap_Type, Cell_Width, No_of_cells = Pmap.getPmapType( E, Seach_area_width) Grid = HeatmapPointData.getGrid(PMap_Type) return Datum_point, Grid, Cell_Width if (Distress_position == None and Distress_time): E = positionError.getE(Comm_Mode, Craft_Type, GSpeed, VSpeed, LKP_Altitude, Terrain_Altitude, LKP_time, Distress_time) Distance = ((Distress_time - LKP_time) * Gspeed) / 0.621371 Est_Distress_position = Datum.Estimated_position( LKP_position, Distance, brng) Altitude_Loss = LKP_Altitude - Terrain_Altitude Descent_Time = Altitude_Loss / Descent_Rate / 60 Glide_Distance = ((Glide_TAS * Descent_Time) / 60) * 1.852 Datum_point = Datum.Estimated_position(Est_Distress_position, Glide_Distance, brng) Seach_area_width = effortAllocation.getSeachAreaWidth( search_effort, E, Corr_Sweep_Width, None, Search_Speed, Search_Endurance) PMap_Type, Cell_Width, No_of_cells = Pmap.getPmapType( E, Seach_area_width) Grid = HeatmapPointData.getGrid(PMap_Type) return Datum_point, Grid, Cell_Width if (Distress_position == None and Distress_time == None): Sample_len = 1 / 12 samples = [] i = 0 PMap_Type = [] Cell_Width = [] No_of_cells = [] Datum_point = [] Grid = [] while (i <= Communication_Interval): samples.append(LKP_time + i) i = i + Sample_len for i in len(samples): Distance = ((samples[i] - LKP_time) * speed) / 0.621371 E = positionError.getE(Comm_Mode, Craft_Type, GSpeed, VSpeed, LKP_Altitude, Terrain_Altitude, LKP_time, samples[i]) Est_Distress_position = Datum.Estimated_position( LKP_position, Distance, brng) Altitude_Loss = LKP_Altitude - Terrain_Altitude Descent_Time = Altitude_Loss / Descent_Rate / 60 Glide_Distance = ((Glide_TAS * Descent_Time) / 60) * 1.852 Datum_point[i] = Datum.Estimated_position(Est_Distress_position, Glide_Distance, brng) Seach_area_width = effortAllocation.getSeachAreaWidth( search_effort, E, Corr_Sweep_Width, None, Search_Speed, Search_Endurance) PMap_Type[i], Cell_Width[i], No_of_cells[i] = (Pmap.getPmapType( E, Seach_area_width)) Grid[i] = HeatmapPointData.getGrid(typeMap[i](0)) Datum_line = [Est_Distress_position, destination] Datum_length = Calc_distance(Est_Distress_position, destination) E = positionError.getE(Comm_Mode, Craft_Type, GSpeed, VSpeed, LKP_Altitude, Terrain_Altitude, LKP_time, samples[len(samples)]) Seach_area_width = effortAllocation.getSeachAreaWidth( search_effort, E, Corr_Sweep_Width, Datum_length, Search_Speed, Search_Endurance) PMap_Type[len(PMap_Type)], Cell_Width[len(Cell_Width)], No_of_cells[ len(No_of_cells)] = PMap_Line.getPmapType(E, Seach_area_width) Grid.append(HeatmapLineData.getGrid(typeMap[len()](0))) return Datum_point, Datum_line, Grid, Cell_Width
def coordinates_convert(loc_degree): """ Convert degrees minutes seconds coordinate to decimal latitude or longitude """ loc_decimal = dms2dec(loc_degree) if loc_degree[-1] == 'O': loc_decimal = -loc_decimal return loc_decimal
'Location {}: Latitude', 'Location {}: Longitude', 'Location {}: Facility name', 'Location {}: Target country', 'Location {}: Location description', 'Location {}: Comment on location' ] deal_coords = {} for i, row in deals_df.iterrows(): id = row['Deal ID'] deal_coords[id] = [] for i in range(1, 22): lat = str(row['Location {}: Latitude'.format(i)]).replace(',', '.') lng = str(row['Location {}: Longitude'.format(i)]).replace(',', '.') try: lat = float(lat) except ValueError: # probably in degree minute seconds (DMS) format lat = dms2dec(lat) try: lng = float(lng) except ValueError: # probably in degree minute seconds (DMS) format lng = dms2dec(lng) country = row['Location {}: Target country'.format(i)] accuracy = row['Location {}: Spatial accuracy level'.format(i)] if np.isnan(lat): break deal_coords[id].append({ 'coords': (lat, lng), 'country': country, 'accuracy': accuracy, 'agriculture': row['Agriculture'], 'size': row['Size']
def scrap_park_data(): """ Extracts National Parks data from https://en.wikipedia.org/wiki/List_of_national_parks_of_the_United_States?oldformat=true Saves a csv file containing: - Park name - State - Latitude and Longitude - Surface (acres km2) - Number of visitors - Description """ ## load article, turn into soup and get the <table>s. page = urllib.request.urlopen(NP_LINK) ## create soup soup = BeautifulSoup(page, 'html.parser') ## find all tables np_table = soup.find_all('table', class_='sortable')[0] parks = [] data = {} ## find table header ## find table row (tr) tbody = np_table.find_all("tbody")[0] trs = tbody.find_all("tr") ## skip first tr since it contains the headers for i in range(1, len(trs)): ## extract row content tr = trs[i] ## find park name np = tr.find_all("a")[0].get('title') ## clean park name np = np.replace('ʻ', '') np = np.replace('ā', 'a') np = np.replace('–', '-') parks.append(np) data[np] = {} ## find park data tds = tr.find_all("td") ## first td: image ## skip ## state + coordinates ## offset by one if first cell is td if len(tds) == 6: offset = 1 else: offset = 2 ## extract state, latitude, and longitude data[np]["state"] = tds[offset].find("a").get('title') lat = tds[offset].find("span", {"class": "latitude"}).text long = tds[offset].find("span", {"class": "longitude"}).text data[np]["latitude"] = dms2dec(lat) data[np]["longitude"] = dms2dec(long) ## date data[np]["date"] = tds[offset + 1].find("span").text ## area surface = re.findall(r'(\d*\,?\d*\,?\d+\.?\d*)\s', tds[offset + 2].text) data[np]["surface_acres"] = float(surface[0].replace(',', "")) data[np]["surface_km2"] = float(surface[1].replace(',', "")) ## visitors data[np]["visitors"] = float( re.search(r'(\d*\,?\d*\,?\d+\.?\d*)', tds[offset + 3].text)[0].replace(",", "")) ## description description = re.sub(r'\[?\(?\d+\)?\]?', '', tds[offset + 4].text.strip()) data[np]["description"] = description ## create dataframe parks = pd.DataFrame(data).T.reset_index() ## read park units units = pd.read_csv("../scrapper/data/Parks.csv") ## merge parks = pd.merge(left=parks, right=units, left_on='index', right_on='parkname') del parks['parkname'] ## read park websites websites = pd.read_csv('../scrapper/data/park_websites.csv') ## merge parks = pd.merge(left=parks, right=websites, on='parkunit') ## count photos parks['photo_count'] = parks['parkunit'].apply(get_photo_count) ## rename columns parks = parks.rename(columns={"index": "parkname"}) ## get topo parks['boundaries'] = parks['parkunit'].apply(lambda x: get_geojson(x)) parks['bbox'] = parks['parkunit'].apply(lambda x: get_bbox(x)) return parks