def load_latest_bixi(stations=pd.DataFrame( columns={ 'name', 'new', 'moved', 'lat', 'lon', 'num_bikes', 'num_docks', 'last_update', 'll' })): print("Using station", stations) url = "https://secure.bixi.com/data/stations.json" response = urllib2.urlopen(url) data = json.load(response) n = datetime.now() sdir = 'station_logs' if not os.path.isdir(sdir): os.mkdir(sdir) fn = os.path.join( sdir, 'bixi_%04d%02d%02d_%02d%02d.json' % (n.year, n.month, n.day, n.hour, n.minute)) json.dump(data, open(fn, 'w')) existing_station_codes = list(stations.index) for station in data['stations']: station_code = station['n'] lat = station['la'] lon = station['lo'] num_bikes = station['ba'] num_docks = station['da'] last_update = station['lu'] if station_code in existing_station_codes: stations.loc[station_code, 'new'] = False cols = ['new', 'num_bikes', 'num_docks', 'last_update'] vals = [False, num_bikes, num_docks, last_update] # check if lat/lon changed if not ((stations.loc[station_code, 'lat'] == lat) and (stations.loc[station_code, 'lon'] == lon)): ll = LatLon.LatLon(LatLon.Latitude(lat), LatLon.Longitude(lon)) lcols = ['moved', 'lat', 'lon', 'll'] lvals = [True, lat, lon, ll] stations.loc[station_code, lcols] = lvals else: stations.loc[station_code, 'moved'] = False else: # create new station ll = LatLon.LatLon(LatLon.Latitude(lat), LatLon.Longitude(lon)) cols = [ 'name', 'new', 'moved', 'lon', 'lat', 'num_bikes', 'num_docks', 'last_update', 'll' ] vals = [ unidecode(station['s']).encode('ascii'), True, True, lon, lat, num_bikes, num_docks, last_update, ll ] stations.loc[station_code, cols] = vals return stations, fn
def deg_min_sec(value): """ Usage:: {{ form.origin_point.value|deg_min_sec }} """ #GEOSGeometry('POINT(-95.3385 29.7245)') try: point = GEOSGeometry(value) x = point.get_x() y = point.get_y() c=LatLon.LatLon(LatLon.Longitude(x), LatLon.Latitude(y)) latlon = c.to_string('d% %m% %S% %H') lon = latlon[0].split(' ') lat = latlon[1].split(' ') # need to format float number (seconds) to 1 dp lon[2] = str(round(eval(lon[2]), 1)) lat[2] = str(round(eval(lat[2]), 1)) # Degrees Minutes Seconds Hemisphere lat_str = lat[0] + u'\N{DEGREE SIGN} ' + lat[1].zfill(2) + '\' ' + lat[2].zfill(4) + '\" ' + lat[3] lon_str = lon[0] + u'\N{DEGREE SIGN} ' + lon[1].zfill(2) + '\' ' + lon[2].zfill(4) + '\" ' + lon[3] return 'Lat/Lon ' + lat_str + ', ' + lon_str except: return None
def find_distance_to_stations(ulat, ulon, lspd): uu = LatLon.LatLon(LatLon.Latitude(ulat), LatLon.Longitude(ulon)) lls = lspd.loc[:, 'll'] ds = [] for l in lls: ds.append(uu.distance(l)) return ds
def get_coordinate(self, type='latitude'): """Get latitude or longitude of photo from EXIF :param str type: Type of coordinate to get. Either "latitude" or "longitude". :returns: float or None if not present in EXIF or a non-photo file """ if (not self.is_valid()): return None key = self.exif_map['latitude'] if (type == 'longitude'): key = self.exif_map['longitude'] exif = self.get_exif() if (key not in exif): return None try: # this is a hack to get the proper direction by negating the # values for S and W latdir = 1 if (type == 'latitude' and str( exif[self.exif_map['latitude_ref']].value) == 'S'): # noqa latdir = -1 londir = 1 if (type == 'longitude' and str(exif[self.exif_map['longitude_ref']].value) == 'W'): # noqa londir = -1 coords = exif[key].value if (type == 'latitude'): lat_val = LatLon.Latitude(degree=coords[0], minute=coords[1], second=coords[2]) return float(str(lat_val)) * latdir else: lon_val = LatLon.Longitude(degree=coords[0], minute=coords[1], second=coords[2]) return float(str(lon_val)) * londir except KeyError: return None
def origin_geo(self): if not self.origin_point: return None c = LatLon.LatLon(LatLon.Longitude(self.origin_point.get_x()), LatLon.Latitude(self.origin_point.get_y())) latlon = c.to_string('d% %m% %S% %H') lon = latlon[0].split(' ') lat = latlon[1].split(' ') # need to format float number (seconds) to 1 dp lon[2] = str(round(eval(lon[2]), 1)) lat[2] = str(round(eval(lat[2]), 1)) # Degrees Minutes Seconds Hemisphere lat_str = lat[0] + u'\N{DEGREE SIGN} ' + lat[1].zfill( 2) + '\' ' + lat[2].zfill(4) + '\" ' + lat[3] lon_str = lon[0] + u'\N{DEGREE SIGN} ' + lon[1].zfill( 2) + '\' ' + lon[2].zfill(4) + '\" ' + lon[3] return 'Lat/Lon ' + lat_str + ', ' + lon_str
def field_value(field_name, bushfire=None, request=None, url_type="auto",is_upper=None,external_email=False): """ Return the value of model field to dispay in the email """ if bushfire: try: if field_name == "origin_point_geo": return bushfire.origin_geo elif field_name == "region": if is_upper == True: return bushfire.region.name.upper() else: return bushfire.region.name elif field_name == "district": if is_upper == True: return bushfire.district.name.upper() else: return bushfire.district.name elif field_name == "fire_number": if request and not external_email: return mark_safe("<a href='{}'>{}</a>".format(utils.get_bushfire_url(request,bushfire,url_type),bushfire.fire_number)) else: return bushfire.fire_number elif field_name == "url_link": return mark_safe("<a href='{0}'>{0}</a>".format(utils.get_bushfire_url(request,bushfire,url_type))) elif field_name == "url": return utils.get_bushfire_url(request,bushfire,url_type) elif field_name == "report_status": return bushfire.report_status_name elif field_name == "latitude_degree": return LatLon.Latitude(bushfire.origin_point.get_y()).degree elif field_name == "latitude_minute": return LatLon.Latitude(bushfire.origin_point.get_y()).minute elif field_name == "latitude_second": return LatLon.Latitude(bushfire.origin_point.get_y()).second elif field_name == "longitude_degree": return LatLon.Longitude(bushfire.origin_point.get_x()).degree elif field_name == "longitude_minute": return LatLon.Longitude(bushfire.origin_point.get_x()).minute elif field_name == "longitude_second": return LatLon.Longitude(bushfire.origin_point.get_x()).second value = getattr(bushfire, FIELD_MAPPING.get(field_name) or field_name) if field_name == "dfes_incident_no": return value or "Not available" elif value is None: return "-" elif type(value) == type(True): return "Yes" if value else "No" elif field_name == "dispatch_pw": return "Yes" if value == 1 else "No" elif isinstance(value,datetime.datetime): return value.astimezone(tz.gettz(settings.TIME_ZONE)).strftime('%Y-%m-%d %H:%M') else: value = str(value).strip() return value or "-" except: return "-" else: return "-"
output_fh.write(server_template.format(lat=args.lat, lon=args.lon)) print("Generating raw coordinates to {}".format(args.output_raw)) coords_fh = file(args.output_raw, 'wb') w_worker = (2 * steps - 1) * r_hex #convert the step limit of the worker into the r radius of the hexagon in meters? d = 2.0 * w_worker / 1000.0 #convert that into a diameter and convert to gps scale d_s = d brng_s = 0.0 brng = 0.0 mod = math.degrees(math.atan(1.732 / (6 * (steps - 1) + 3))) total_workers = (((rings * (rings - 1)) *3) + 1) # this mathamtically calculates the total number of workers locations = [LatLon.LatLon(LatLon.Latitude(0), LatLon.Longitude(0))] * total_workers #this initialises the list locations[0] = LatLon.LatLon(LatLon.Latitude(args.lat), LatLon.Longitude(args.lon)) #set the latlon for worker 0 from cli args turns = 0 # number of turns made in this ring (0 to 6) turn_steps = 0 # number of cells required to complete one turn of the ring turn_steps_so_far = 0 # current cell number in this side of the current ring for i in range(1, total_workers): if turns == 6 or turn_steps == 0: # we have completed a ring (or are starting the very first ring) turns = 0 turn_steps += 1 turn_steps_so_far = 0
coords_fh = file(args.output_raw, 'wb') w_worker = ( 2 * steps - 1 ) * r_hex #convert the step limit of the worker into the r radius of the hexagon in meters? d = 2.0 * w_worker / 1000.0 #convert that into a diameter and convert to gps scale d_s = d brng_s = 0.0 brng = 0.0 mod = math.degrees(math.atan(1.732 / (6 * (steps - 1) + 3))) total_workers = (((rings * (rings - 1)) * 3) + 1 ) # this mathamtically calculates the total number of workers locations = [LatLon.LatLon(LatLon.Latitude(0), LatLon.Longitude(0)) ] * total_workers #this initialises the list locations[0] = LatLon.LatLon( LatLon.Latitude(args.lat), LatLon.Longitude(args.lon)) #set the latlon for worker 0 from cli args turns = 0 # number of turns made in this ring (0 to 6) turn_steps = 0 # number of cells required to complete one turn of the ring turn_steps_so_far = 0 # current cell number in this side of the current ring for i in range(1, total_workers): if turns == 6 or turn_steps == 0: # we have completed a ring (or are starting the very first ring) turns = 0 turn_steps += 1 turn_steps_so_far = 0
def load_stats(station_location_files=[], sfile='stations.csv', shp_file='../geo/limadmin-shp/LIMADMIN.shp'): if os.path.exists(sfile): print("Loading station infromation from saved file") blocs = pd.read_csv(sfile) else: print("Creating station information file with relevant information") import geocoder import geopandas as gpd import LatLon from unidecode import unidecode from shapely.geometry import Point loc_files = station_location_files loclist = [] for ll in loc_files: loclist.append(load_loc(ll)) blocs = pd.concat(loclist) blocs.drop_duplicates(subset=['code'], keep='last', inplace=True) downtown = LatLon.LatLon(LatLon.Latitude(45.504045), LatLon.Longitude(-73.569101)) stat_loc = [ LatLon.LatLon(LatLon.Latitude(blocs.loc[stat, 'latitude']), LatLon.Latitude(blocs.loc[stat, 'longitude'])) for stat in blocs.index ] print("finding elevation for each station") ggs = [ geocoder.elevation( "%s, %s" % (blocs.loc[stat, 'latitude'], blocs.loc[stat, 'longitude'])) for stat in blocs.index ] elevs = [g.meters for g in ggs] dist_dt = [downtown.distance(sl) for sl in stat_loc] blocs['distance_to_downtown'] = dist_dt blocs['LatLon'] = stat_loc blocs['elev'] = elevs nbl = [] for bl in blocs['name']: t = unidecode(bl) t.encode( "ascii" ) #works fine, because all non-ASCII from s are replaced with their equivalents nbl.append(t) blocs['name fmt'] = nbl # remove names which are not easily written to file del blocs['name'] blocs.index = blocs['code'] # read shape file of region mtlbr = gpd.read_file(shp_file) pps = [Point(pt) for pt in zip(blocs['longitude'], blocs['latitude'])] nns = [] for pp in pps: shp_indx = np.argmax(mtlbr['geometry'].contains(pp)) nns.append(shp_indx) blocs['neighborhood code'] = nns nnames = np.array( mtlbr.loc[nns, 'NOM'].map(lambda x: unidecode(x).encode('ascii'))) blocs['neighborhood'] = nnames blocs.to_csv(sfile) return blocs