def make_indexes(): try: db.branches.create_index([("t", ASCENDING)]) db.branches.create_index("session") db.branches.create_index("sensor") except Exception as e: log.error(log.exc(e))
def run(self): while True: try: data = self.data.get() self.message_handler(data) except Exception as e: log.error(log.exc(e))
def parse(request): log.info("beacon_spot.parse") content = ingest_xml_body(request) if content is None: return content, "Could not parse" # print(request.headers) data = {'FeatureType': "beacon"} try: content = content['messageList']['message'] if type(content) is not OrderedDict: content = content[0] data['latitude'] = float(content['latitude']) data['longitude'] = float(content['longitude']) data['t_utc'] = int(content['timeInGMTSecond']) data['Satellite'] = content['esnName'] data['ESN'] = content['esn'] data['ID'] = content['id'] data['MessageType'] = content['messageType'] data['MessageDetail'] = content['messageDetail'] data['batteryState'] = None if 'batteryState' not in content else content['batteryState'] data['mode'] = None if 'mode' not in content else content['mode'] except Exception as e: log.error(log.exc(e)) log.error(content) return data
def parse(request): log.info("ambit_geo.parse") sample = ingest_json_body(request) if sample is None: return sample, "Could not parse" data = {} for key, value in sample.items(): if key == "UTC": dt = util.parse_date(sample['UTC']) # these are marked UTC in the data t = util.timestamp(dt) data['t_utc'] = t continue if key == "Longitude": data['longitude'] = math.degrees(float(sample['Longitude'])) continue if key == "Latitude": data['latitude'] = math.degrees(float(sample['Latitude'])) continue if key == "GPSAltitude": data['altitude'] = float(sample['GPSAltitude']) continue if type(value) != str: continue data[key] = strings.as_numeric(value) try: log.debug("%s %s %s" % (data['longitude'], data['latitude'], data['altitude'])) except: log.error("MISSING GEO") return data
def ingest_geo_feature(path, kind): log.info("ingest_geo_feature %s" % path) t_protect = model.get_protect(kind) sightings = [] headings = {} with open(path) as f: rows = csv.reader(f) for r, row in enumerate(rows): if r == 0: for i, item in enumerate(row): headings[item] = i continue try: dt = util.parse_date("%s %s" % (row[headings['Date']], row[headings['Time']]), tz=config['local_tz'], dayfirst=True) t = util.timestamp(dt) if t <= t_protect: log.warning("Protected t, skipping...") continue try: coordinates = strings.as_numeric(row[headings['Longitude']]), strings.as_numeric(row[headings['Latitude']]), strings.as_numeric(row[headings['Altitude']]) except Exception as e: log.error("Missing coordinates! Skipping...") continue properties = {'DateTime': dt.strftime("%Y-%m-%dT%H:%M:%S%z"), 't_utc': t, 'ContentType': kind} for heading in headings: if heading not in ['Date', 'Time', 'Latitude', 'Longitude', 'Altitude']: try: properties[heading] = strings.as_numeric(row[headings[heading]]) except IndexError: pass feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coordinates}, properties=properties) model.insert_feature(kind, t, geojson.dumps(feature)) except Exception as e: log.error("Row failed: " + log.exc(e)) continue
def message_handler(response): # log.info(response) try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = response['sensor'] samples = response['samples'] if type(samples[0]) is not list: samples = [samples] for sample in samples: x, y, z = sample rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = {'t': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session)} # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def get_tide(entry): try: stations = { (40.7033,-73.9883): "Brooklyn", (40.8133,-73.935): "Bronx", (41.0783,-73.87): "Tarrytown" } closest_miles = 10000 closest_city = None for location, city in stations.items(): miles = geo.distance((entry['longitude'], entry['latitude']), (location[1], location[0])) if miles < closest_miles: closest_miles = miles closest_city = city response = requests.get("http://api.wunderground.com/api/%s/rawtide/q/NY/%s.json" % (config['weather'], closest_city)) data = response.json() t_utc, height = data['rawtide']['rawTideObs'][0]['epoch'], data['rawtide']['rawTideObs'][0]['height'] entry.update({'tide_station': city, 'tide_height_ft': height}) except Exception as e: log.error(log.exc(e)) return entry
def message_handler(response): # log.info(response) try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = response['sensor'] samples = response['samples'] if type(samples[0]) is not list: samples = [samples] for sample in samples: x, y, z = sample rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = { 't': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session) } # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def get_video_times(gpx_filename): # get GPX file gpx = open(gpx_filename) try: xml = ElementTree.fromstring(gpx.read()) except Exception as e: log.error("XML error (%s): %s" % (tcx_filename, e)) exit() # load GPX data # basically, look at the first GPS time and the corresponding media time, and subtract to get the precise start time ns = "{http://www.topografix.com/GPX/1/1}" timestamps = xml.findall("%strk/%strkseg/%strkpt/%stime" % tuple([ns]*4)) first_timestamp = timestamps[0].text first_timestamp_dt = datetime.datetime.strptime(first_timestamp, "%Y-%m-%dT%H:%M:%S") first_timestamp_t = calendar.timegm(first_timestamp_dt.timetuple()) media_times = xml.findall("%strk/%strkseg/%strkpt/%sextensions/%smediatime" % tuple([ns]*5)) first_media_time = media_times[0].text first_media_time_dt = time.strptime(first_media_time, "%H:%M:%S.%f") first_media_time_seconds = first_media_time_dt.tm_min * 60 + first_media_time_dt.tm_sec start_timestamp = first_timestamp_t - first_media_time_seconds end_timestamp = timestamps[-1].text video_start_dt = datetime.datetime.utcfromtimestamp(start_timestamp) video_start_t = float(calendar.timegm(video_start_dt.timetuple())) video_end_dt = datetime.datetime.strptime(end_timestamp, "%Y-%m-%dT%H:%M:%S") video_end_t = float(calendar.timegm(video_end_dt.timetuple())) log.info("VIDEO START REAL TIME %s UTC" % datetime.datetime.utcfromtimestamp(video_start_t).strftime("%Y-%m-%d %H:%M:%S")) log.info("VIDEO END REAL TIME %s UTC" % datetime.datetime.utcfromtimestamp(video_end_t).strftime("%Y-%m-%d %H:%M:%S")) return video_start_t, video_end_t ## note! this is just the last GPS point, not necessarily the end of the movie!
def insert_sequence(db, walk_id, sequence): try: for step in sequence: db.execute("INSERT INTO sequence (walk_id, t, foot) VALUES (?, ?, ?)", (walk_id, int(step[0]), step[1])) except Exception as e: log.error(log.exc(e)) return None
def temporal_filter(features, resolution): try: log.info("--> starting temporal_filter") first_t = features[0]['properties']['t_utc'] dt = datetime.datetime.utcfromtimestamp(first_t) dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) start_t = util.timestamp(dt) log.debug("start_date %s" % util.datestring(start_t)) log.debug("stop_date %s" % util.datestring(features[-1]['properties']['t_utc'])) log.debug("start_t %s" % start_t) log.debug("step %s" % resolution) results = [] index_t = start_t index = 0 while True: # log.debug("Checking %s..." % util.datestring(index_t)) while index < len(features) and features[index]['properties']['t_utc'] < index_t: index += 1 if index == len(features): break if not (features[index]['properties']['t_utc'] > index_t + resolution): # log.debug("--> %s %s %s" % (index, features[index]['id'], util.datestring(features[index]['properties']['t_utc']))) results.append(features[index]) index_t += resolution log.info("--> done temporal_filter") return results except Exception as e: log.error(log.exc(e))
def make_indexes(): try: db.entries.create_index("type") db.entries.create_index([("t_utc", ASCENDING)]) db.entries.create_index([("t_utc", DESCENDING)]) except Exception as e: log.error(log.exc(e))
def parse(request): log.info("video.parse") paths = save_files(request) if not len(paths): return None, "No files" # process the json data = None for path in paths: if path[-4:] == "json": try: with open(path) as f: data = json.loads(f.read()) except Exception as e: log.error(log.exc(e)) return None, "Could not parse" break if data is None: return None, "No data" # process the video for path in paths: if path[-4:] != "json": break if 'TeamMember' in data: data['Member'] = data['TeamMember'] del data['TeamMember'] data['Title'] = strings.titlecase(data['Title']) data['UploadPath'] = path.split('/')[-1] data['YouTubeURL'] = None return data
def __init__(self, device_name=None, baud=9600, message_handler=None, blocking=False): threading.Thread.__init__(self) self.daemon = True self.verbose = False self.message_handler = message_handler if device_name is None: for dn in os.listdir("/dev"): if "tty.usbserial-" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() log.info("Receiving xbee messages on %s" % device_name) try: self.connection = serial.Serial(device_name, baud) self.xbee = XB(self.connection) except Exception as e: if e.message != "Port is already open.": log.error(log.exc(e)) return self.start() if blocking: try: while True: time.sleep(5) except (KeyboardInterrupt, SystemExit): self.connection.close() pass
def on_message(response): try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = config['sensors'][response['id']] sample = response['data'] x, y, z = response['data'] rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = { 't': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session) } # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def on_message(cls, ip, address, data): try: syncbox = cls.find(data[0], ip) syncbox.alive = True syncbox.status = data[1] except Exception as e: log.error(log.exc(e))
def ingest_image_api(path): log.info("ingest_image %s" % path) file_name = path.split('/')[-1] file_name = file_name.split('.')[0] front = 'img' if ('_' in file_name): front = file_name.split('_')[0] date_string = file_name.split('_')[1] else: date_string = file_name log.info("ingest_image %s" % date_string) #060814154100 dt = datetime.datetime.strptime(date_string.split('_')[0], "%d%m%y%H%M%S") log.info("datetime %s" % dt) tz = pytz.timezone(config['local_tz']) dt = tz.localize(dt) t = util.timestamp(dt) log.info("timestamp %s" % t) try: image = Image.open(path) width, height = image.size except Exception as e: log.error(log.exc(e)) width, height = None, None coords = model.get_coords_by_time(t); feature = geojson.Feature(geometry=coords,properties={'utc_t': t, 'ContentType': "image", 'url': "/static/data/images/%s_%s.jpg" % (front,t), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z"), 'size': [width, height]}) feature_id = model.insert_feature('image', t, geojson.dumps(feature)) new_path = os.path.join(os.path.dirname(__file__), "static", "data", "images", "%s_%s.jpg" % (front,t)) shutil.copy(path, new_path)
def run(self): try: device_name = None for dn in os.listdir("/dev"): if "tty.usbmodem" in dn: device_name = os.path.join("/dev", dn) break if "ttyACM0" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() connection = serial.Serial(device_name, 9600) log.info("Receiving xbee messages on %s" % device_name) except Exception as e: log.error(log.exc(e)) else: while True: result = None try: result = connection.readline().decode('utf-8').strip() data = json.loads(result) data.update({'source': SOURCE}) log.info(json.dumps(data, indent=4)) if self.data_sender is not None: self.data_sender.queue.put(data) # make another entry for GPS data = {key: value for (key, value) in data.items() if key in ['latitude', 'longitude', 'altitude_m', 'satellites']} data.update({'source': "gps"}) if self.data_sender is not None: self.data_sender.queue.put(data) except Exception as e: log.error(log.exc(e)) log.info(result)
def process(self, t): log.info("process %s" % t) try: filename = "%s/%s.wav" % (AUDIO_TMP, t) sample_rate, signal = wavfile.read(filename) # log.debug("samples %s" % len(signal)) # log.debug("sample_rate %s" % sample_rate) duration = float(len(signal)) / sample_rate # log.debug("duration %ss" % strings.format_time(duration)) signal = (np.array(signal).astype('float') / (2**16 * 0.5)) # assuming 16-bit PCM, -1 - 1 signal = abs(signal) # magnitude # log.debug("found magnitude") content_samples = 0 for sample in signal: if sample > config['noise_threshold']: content_samples += 1 total_content_time = float(content_samples) / sample_rate log.info("--> %s total_content_time %s" % (t, total_content_time)) if total_content_time > config['time_threshold']: self.out_queue.put((t, filename)) log.info("--> %s added to upload queue" % t) else: os.remove(filename) log.info("--> %s deleted" % t) except Exception as e: log.error(log.exc(e))
def plot(walk_id, xs, ys, zs, ds, peaks, total_samples, fs): try: from housepy import drawing except: log.error("Can't draw") return # plot ctx = drawing.Context(5000, 600, relative=True, flip=True) ctx.line(200.0 / total_samples, 0.5, 350.0 / total_samples, 0.5, thickness=10.0) ctx.line([(float(i) / total_samples, x) for (i, x) in enumerate(xs)], stroke=(1., 0., 0., 1.0)) # thickness=3.0) ctx.line([(float(i) / total_samples, y) for (i, y) in enumerate(ys)], stroke=(0., 1., 0., 1.0)) #, thickness=3.0) ctx.line([(float(i) / total_samples, z) for (i, z) in enumerate(zs)], stroke=(0., 0., 1., 1.0)) #, thickness=3.0) ctx.line([(float(i) / total_samples, d) for (i, d) in enumerate(ds)], stroke=(0., 0., 0.), thickness=3.0) ctx.line([(float(i) / total_samples, f) for (i, f) in enumerate(fs)], stroke=(1., 0., 1.), thickness=5.0) for peak in peaks: x, y = peak x = float(x) / total_samples ctx.arc(x, y, (10.0 / ctx.width), (10.0 / ctx.height), fill=(1., 0., 0.), thickness=0.0) ctx.output("charts/steps_%s_%s.png" % (walk_id, int(time.time())))
def mark_clip(db, t): log.info("Marking clip %s" % t) try: db.execute("UPDATE clips SET posted=1 WHERE t=?", (t, )) except Exception as e: log.error(log.exc(e)) return
def init(db): try: db.execute("CREATE TABLE IF NOT EXISTS clips (t INTEGER, hit_id TEXT, posted INTEGER)") db.execute("CREATE UNIQUE INDEX IF NOT EXISTS clips_t ON clips(t)") except Exception as e: log.error(log.exc(e)) return
def get(self, source=None, start=None, end=None, page=None): self.set_header("Access-Control-Allow-Origin", "*") if len(source): page = None if page is None or not len(page) else strings.as_numeric(page) if not len(start): start = "*" if not len(end): end = "*" try: filters = {key: strings.as_numeric(value[0]) for (key, value) in self.request.arguments.items()} results, start_t, end_t, count, page = actions.retrieve(self.db, source, start, end, filters, page) data = {'query': {'sources': source, 'start': util.datestring(start_t, tz=config['tz']), 'end': util.datestring(end_t, tz=config['tz']), 'filters': filters}} # log.info(data) data['results'] = results data['results_total'] = count data['results_returned'] = len(results) data['page'] = page data['pages'] = math.ceil(count / 100) return self.json(data) except Exception as e: log.error(log.exc(e)) return self.error("Request malformed: %s" % e) readme = "README failed to load" try: with open(os.path.abspath(os.path.join(os.path.dirname(__file__), "README.md"))) as f: text = f.read() readme = markdown.markdown(text) except Exception as e: log.error(log.exc(e)) sources = self.db.entries.find().distinct('source') return self.render("index.html", readme=readme, sources=sources)
def send(cls, user_id, message): socket = WebSocket.sockets[user_id] log.info("--> sending [%s] to %s" % (message, user_id)) try: socket.write_message(message) except Exception as e: log.error(log.exc(e))
def add_clip(db, t, hit_id): try: db.execute("INSERT INTO clips (t, hit_id, posted) VALUES (?, ?, 0)", (t, hit_id)) except Exception as e: log.error(log.exc(e)) return log.info("Added clip %s %s" % (t, hit_id))
def ingest_data(feature_type, feature): # note that this operates on the original datastructure log.info("ingest_data") try: db = Application.instance.db except AttributeError: from mongo import db feature = verify_geojson(feature) if not feature: return False, "Could not format as geojson" feature['properties'].update({'FeatureType': feature_type if 'FeatureType' not in feature['properties'] else feature['properties']['FeatureType']}) feature = verify_t(feature) if not feature: return False, "Missing t_utc" feature = verify_expedition(feature) feature = tag_team(feature) feature = verify_geometry(feature) if feature['geometry'] is None: feature = estimate_geometry(feature, db) feature['properties'].update({'t_created': util.timestamp(ms=True)}) try: feature_id = db.features.insert_one(feature).inserted_id except Exception as e: log.error(log.exc(e)) return False, "Database error" log.info("--> success (%s)" % feature_id) return True, feature_id
def save_files(request): log.info("ingest.save_files") paths = [] try: for key, fileinfo in request.files.items(): fileinfo = fileinfo[0] path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads", "%s_%s" % (util.timestamp(), fileinfo['filename']))) with open(path, 'wb') as f: f.write(fileinfo['body']) log.info("--> saved %s" % path) if zipfile.is_zipfile(path) is True: log.info("Examining zip file...") with zipfile.ZipFile(path, 'r') as archive: filenames = archive.namelist() for filename in filenames: if filename[0] == '.' or filename[0] == '_' or '/' in filename: # zipped files cannot start with . (invisible), _ (system), or contain longer paths continue dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads")) temp_path = archive.extract(filename, dir_path) path = os.path.abspath(os.path.join(dir_path, "%s_%s" % (util.timestamp(), filename))) shutil.move(temp_path, path) log.info("--> saved %s" % path) paths.append(path) log.info("--> zip file extracted") else: paths.append(path) except Exception as e: log.error(log.exc(e)) return paths
def mark_clip(db, t): log.info("Marking clip %s" % t) try: db.execute("UPDATE clips SET posted=1 WHERE t=?", (t,)) except Exception as e: log.error(log.exc(e)) return
def verify_expedition(data): """Verify we have an Expedition and Member property""" try: db = Application.instance.db except AttributeError: from mongo import db for wrong in ['TeamMember', 'teamMember', 'Person', 'person', 'member', 'Collectors', 'Collector', 'collectors', 'collector']: if wrong in data['properties']: if 'Member' not in data['properties']: data['properties']['Member'] = data['properties'][wrong] del data['properties'][wrong] for wrong in ['expedition']: if wrong in data['properties']: data['properties']['Expedition'] = data['properties'][wrong] del data['properties'][wrong] if 'Member' not in data['properties']: data['properties']['Member'] = None if data['properties']['Member'] is not None: if data['properties']['Member'].lower() == "null" or data['properties']['Member'].lower() == "none" or len(data['properties']['Member'].strip()) == 0: data['properties']['Member'] = None else: data['properties']['Member'] = data['properties']['Member'].strip().split(' ')[0] if data['properties']['Member'] is not None: data['properties']['Member'] = strings.depunctuate(data['properties']['Member'])[:15] data['properties']['Member'] = data['properties']['Member'].title() if len(data['properties']['Member']) > 2 else data['properties']['Member'].upper() data['properties']['Member'] = data['properties']['Member'].replace('\u00f6', 'oe') # sorry Goetz try: if not db.members.find({'Name': data['properties']['Member']}).count(): db.members.insert({'Name': data['properties']['Member'], 'Team': None, 'Core': False, 't_utc': data['properties']['t_utc']}) except Exception as e: log.error(log.exc(e)) if 'Expedition' not in data['properties']: data['properties']['Expedition'] = config['expedition'] return data
def tag_team(data): try: db = Application.instance.db except AttributeError: from mongo import db try: member = data['properties']['Member'] t = data['properties']['t_utc'] if member is None: if 'Satellite' in data['properties']: satellite = data['properties']['Satellite'] try: team = list(db.satellites.find({'Name': satellite, 't_utc': {'$lte': t}}).sort('t_utc', -1).limit(1))[0]['Team'] log.info("--> team is %s" % team) except IndexError: log.info("--> no team entry at time %s" % t) team = None else: log.info("--> no info for team") team = None else: try: team = list(db.members.find({'Name': member, 't_utc': {'$lte': t}}).sort('t_utc', -1).limit(1))[0]['Team'] log.info("--> team is %s" % team) except (IndexError, KeyError): log.info("--> no team entry at time %s" % t) team = None data['properties']['Team'] = team return data except Exception as e: log.error(log.exc(e)) return data
def __init__(self, device_name=None, baud=9600, message_handler=None, blocking=False, verbose=False): threading.Thread.__init__(self) self.daemon = True self.verbose = verbose self.message_handler = message_handler if device_name is None: for dn in os.listdir("/dev"): if "tty.usbserial-" in dn: device_name = os.path.join("/dev", dn) break if "ttyUSB" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() log.info("Receiving xbee messages on %s" % device_name) try: self.connection = serial.Serial(device_name, baud) self.xbee = XB(self.connection) except Exception as e: if e.message != "Port is already open.": log.error(log.exc(e)) return self.start() if blocking: try: while True: time.sleep(5) except (KeyboardInterrupt, SystemExit): self.connection.close() pass
def verify_geometry(data): """Verify or reformat geometry data""" lon, lat, alt = None, None, None properties = data['properties'] delete = [] try: for p, value in properties.items(): if p.lower().strip() == 'longitude' or p.lower().strip() == 'lon' or p.lower().strip() == 'lng' or p.lower().strip() == 'long': lon = value delete.append(p) elif p.lower().strip() == 'latitude' or p.lower().strip() == 'lat': lat = value delete.append(p) elif p.lower().strip() == 'altitude' or p.lower().strip() == 'alt': alt = value delete.append(p) if lon is not None and lat is not None: if data['geometry'] is None: ## this retains geometry if it exists, is that ok? data['geometry'] = {'type': "Point", 'coordinates': [float(lon), float(lat), float(alt) if alt is not None else None]} for p in delete: del properties[p] data['properties'] = properties # ### temporarily ditch altitude prior to mongo 3.2.0 # ##### now running 3.2.5 # if 'geometry' in data and data['geometry'] is not None: # if len(data['geometry']['coordinates']) == 3: # data['properties']['Altitude'] = data['geometry']['coordinates'][2] # data['geometry']['coordinates'] = data['geometry']['coordinates'][:2] except Exception as e: log.error("Error parsing coordinates: %s" % log.exc(e)) return data
def run(self): while True: try: message, address = self.socket.recvfrom(1024) self.messages.put(message.decode()) except Exception as e: log.error(log.exc(e))
def run(self): while True: try: data = self.xbee.wait_read_frame() if self.verbose: log.debug(data) response = {} if 'source_addr' in data: response['sensor'] = int(data['source_addr'][1]) if 'frame_id' in data: response['frame'] = str(data['frame_id'], 'ascii') if 'parameter' in data: response['parameter'] = int.from_bytes(data['parameter'], 'little') if 'rssi' in data: response['rssi'] = int.from_bytes(data['rssi'], 'little') if 'samples' in data: response['samples'] = [] for each in data['samples']: samples = list(each.items()) samples.sort(key=lambda item: item[0]) response['samples'].append([s[1] for s in samples]) if len(response['samples']) == 1: response['samples'] = response['samples'][0] if self.message_handler is not None: self.message_handler(response) except Exception as e: log.error(log.exc(e))
def main(): log.info("beacon_sender...") if config['geo_emails'] is None or not len(config['geo_emails']): log.info("--> no emails") return text = [] for satellite in config['satellites']: try: last_beacon = list(db.features.find({'properties.FeatureType': "beacon", 'properties.Satellite': {'$eq': satellite}}).sort('properties.t_utc', -1).limit(1))[0] datetime = last_beacon['properties']['DateTime'] lon, lat = last_beacon['geometry']['coordinates'] satellite = last_beacon['properties']['Satellite'] team = list(db.satellites.find({'Name': satellite}).sort('t_utc', -1).limit(1))[0]['Team'] if team is None: continue google = "https://www.google.com/maps/place/%s,%s" % (lat, lon) text.append("%s: %s\n%s\n%f,%f\n%s" % (satellite, team, datetime, lat, lon, google)) log.info("--> last reported beacon (%s: \"%s\" on %s) at: %f,%f" % (satellite, team, datetime, lat, lon)) except Exception as e: log.error("Could not get update: %s" % log.exc(e)) try: log.info("Emailing to %s..." % EMAILS) text = "\n\n".join(text) emailer.send(EMAILS, "OWP beacon report", text) except Exception as e: log.error("Could not email: %s" % log.exc(e))
def run(self): while True: try: message = self.messages.get() self.message_handler(message) except Exception as e: log.error(log.exc(e))
def ingest_audio(path, i, t_protect): log.info("ingest_audio %s" % path) dt = datetime.datetime.strptime(path.split('/')[-1], "audio %d%m%Y_%H%M.mp3") tz = pytz.timezone(config['local_tz']) dt = tz.localize(dt) t = util.timestamp(dt) # if t <= t_protect: # log.warning("Protected t, skipping...") # return fixed_path = path.replace(".mp3", ".amr") shutil.move(path, fixed_path) new_path = os.path.join(os.path.dirname(__file__), "static", "data", "audio", "%s-%s.wav" % (t, i)) log.debug("CONVERTING SOUND.") try: log.debug("--> converting [%s] to [%s]" % (fixed_path, new_path)) log.debug("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path))) subprocess.check_call("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path)), shell=True) except Exception as e: log.error(log.exc(e)) return log.debug("DONE CONVERTING SOUND.") feature = geojson.Feature(properties={'utc_t': t, 'ContentType': "audio", 'url': "/static/data/audio/%s-%s.wav" % (t, i), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z")}) feature_id = model.insert_feature('audio', t, geojson.dumps(feature))
def run(self): while True: try: message, address = self.messages.get() # log.info("SENDING [%s] to %s:%s" % (message, address[0], address[1])) self.socket.sendto(message.encode('ascii'), address) except Exception as e: log.error(log.exc(e))
def init(): try: db.execute("CREATE TABLE IF NOT EXISTS clips (t INTEGER, hit_id TEXT, posted INTEGER)") db.execute("CREATE UNIQUE INDEX IF NOT EXISTS clips_t ON clips(t)") except Exception as e: log.error(log.exc(e)) return connection.commit()
def is_valid(self, response=None): """Convenience function to figure out if the last request we made was valid.""" if response is None: response = self.response try: return self.get_response_element("Request", response=response)["IsValid"] == "True" except Exception as e: log.error(response) return False
def make_indexes(): try: db.entries.drop_indexes() db.entries.create_index("collar_id") db.entries.create_index("session") db.entries.create_index([("t", ASCENDING)]) db.entries.create_index([("t", DESCENDING)]) except Exception as e: log.error(log.exc(e))
def run(self): while True: message = self.queue.get()[:140] log.info("SENDING TWEET: %s" % message) try: self.sender.statuses.update(status=message) except Exception as e: log.error(log.exc(e)) else: log.info("--> sent")
def run(self): while True: try: data = self.queue.get() response = requests.post(config['server'], json=data, timeout=5) log.info(response.status_code) except Exception as e: log.error(log.exc(e))
def make_indexes(): db.entries.drop_indexes() try: db.entries.create_index("t") db.entries.create_index("user_id") db.entries.create_index([("location", GEOSPHERE)]) db.entries.create_index([("location", GEOSPHERE), ("user_id", ASCENDING)]) db.entries.create_index([("t", ASCENDING), ("user_id", ASCENDING)], unique=True) except Exception as e: log.error(log.exc(e))
def upload(self, t, filename): log.info("upload %s" % filename) try: s3.upload(filename) log.info("--> uploaded. Pinging server...") data = {'t': t} response = net.read("http://%s:%s" % (config['server']['host'], config['server']['port']), json.dumps(data).encode('utf-8')) log.info(response) os.remove(filename) except Exception as e: log.error(log.exc(e))
def delete(path): log.info("s3.delete") conn = S3lib.AWSAuthConnection(config['aws']['access_key_id'], config['aws']['secret_access_key']) log.info("--> deleting %s/%s" % (config['aws']['bucket'], path)) try: response = conn.delete(config['aws']['bucket'], path) except Exception as e: log.error("--> failed: %s" % log.exc(e)) return False log.info("--> %s" % response.message) return True
def fetch_walks(db, hidden=False, desc=False): try: log.debug(hidden) query = "SELECT * FROM walks %s ORDER BY start_time %s" % ("WHERE hidden=0" if not hidden else "", "DESC" if desc else "") log.debug(query) db.execute(query) rows = [dict(gd) for gd in db.fetchall()] except Exception as e: log.error(log.exc(e)) rows = [] return rows
def geocode(self): try: url = "https://maps.googleapis.com/maps/api/geocode/json?latlng=%s,%s" % ( self.lat, self.lon) result = requests.get(url).json() self.address = result['results'][0]['formatted_address'] self.address = self.address.split(", NY ")[0].replace( ", New York", "") except Exception as e: log.error(log.exc(e)) log.debug(json.dumps(result, indent=4))
def run(self): while True: try: message, address = self.socket.recvfrom(1024) data = message.decode('utf-8').split(',') if data[0] == "/a": self.queue.put(data[1:]) else: log.info(data) except Exception as e: log.error(log.exc(e))
def insert_walk(db, walk): try: db.execute("INSERT INTO walks (start_time, duration, ref_id, hidden) VALUES (?, ?, ?, ?)", (walk['start_time'], walk['duration'], walk['ref_id'], False)) walk_id = db.lastrowid for gd in walk['geo_data']: db.execute("INSERT INTO geo_data (walk_id, t, lat, lng) VALUES (?, ?, ?, ?)", (walk_id, gd[0], gd[1], gd[2])) for ad in walk['accel_data']: db.execute("INSERT INTO accel_data (walk_id, t, x, y, z) VALUES (?, ?, ?, ?, ?)", (walk_id, ad[0], ad[1], ad[2], ad[3])) except Exception as e: log.error(log.exc(e)) return None return walk_id
def list_contents(): log.info("s3.list") connection = boto.connect_s3(config['aws']['access_key_id'], config['aws']['secret_access_key']) log.info("--> listing %s" % (config['aws']['bucket'])) try: bucket = connection.get_bucket(config['aws']['bucket']) contents = [key.name.encode('utf-8') for key in bucket.list()] except Exception as e: log.error("--> failed: %s" % log.exc(e)) return False log.info("--> %s" % contents) return contents
def post(self, nop1=None, nop2=None, nop3=None, nop4=None): log.info("POST") raw = str(self.request.body, encoding="utf-8") batch = raw.split(';') d = 0 entries = [] for data in batch: if not len(data): continue try: if data[0:8] == data[8:16]: # who knows data = data[8:] assert len(data) == 31 # minus ; fields = data.split(',') response = { 'collar_id': int(fields[0]), 'rssi': int(fields[1]), 'bat': int(float(fields[2])), 't': (float(fields[3]) / 1000.0), 'mag': float(fields[4]) } log.info("[ID %s] [RSSI %02d] [T %.3f] [BAT %02d] [MAG %.3f]" % (response['collar_id'], response['rssi'], response['t'], response['bat'], response['mag'])) entries.append(response) d += 1 except AssertionError as e: log.error(data) log.error("Length is %d" % len(data)) except Exception as e: log.error(log.exc(e)) log.error(data) log.info("--> received %d entries" % len(entries)) entries.sort(key=lambda entry: entry['t']) max_t = entries[-1]['t'] result = list(self.db.entries.find().limit(1).sort([('t', DESCENDING) ])) if len(result): final_t = result[0]['t'] session = result[0]['session'] else: final_t = 0 session = 1 if max_t < final_t: session += 1 for entry in entries: entry['session'] = session try: self.db.entries.insert_many(entries) except Exception as e: log.error(log.exc(e)) return self.text("OK")
def __init__(self, handlers): settings = { 'template_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "templates")), 'static_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "static")) } if 'tornado' in config: tornado_settings = config['tornado'] for key in tornado_settings.keys(): settings[key] = tornado_settings[key] tornado.web.Application.__init__(self, handlers, **settings) if 'mysql' in config: log.info("--> tornado initializing mysql") import database try: self.db = database.Connection() except Exception as e: log.error("Could not connect to MySQL: %s" % log.exc(e)) elif 'mongo' in config: log.info("--> tornado initializing mongo") try: mongo = config['mongo'] import pymongo connection = pymongo.Connection(mongo['host']) self.db = connection[mongo['database']] except Exception as e: log.error("Could not connect to mongo: %s" % log.exc(e)) if 'redis' in config: log.info("--> tornado initializing redis") import redis self.redis = redis.StrictRedis() if 'memcache' in config: log.info("--> torando initializing memcache") import memcache self.cache = memcache.Client([config['memcache']['address'] + ":" + str(config['memcache']['port'])]) self.jobs = None if 'beanstalk' in config: log.info("--> tornado initializing beanstalk") import jobs self.jobs = jobs.Jobs() # intialize oauth server try: self.oauth_server = oauth2.Server(signature_methods={'HMAC-SHA1': oauth2.SignatureMethod_HMAC_SHA1()}) except ImportError: self.oauth_server = None Application.instance = self
def upload(source, dest=None, num_processes=2, split=50, force=True, reduced_redundancy=False, verbose=False): if dest is None: dest = source log.info("s3.upload %s to %s/%s..." % (source, config['aws']['bucket'], dest)) src = open(source) s3 = boto.connect_s3(config['aws']['access_key_id'], config['aws']['secret_access_key']) bucket = s3.lookup(config['aws']['bucket']) key = bucket.get_key(dest) if key is not None: if not force: raise ValueError("--> '%s' already exists" % dest) part_size = max(5 * 1024 * 1024, 1024 * 1024 * split) src.seek(0,2) size = src.tell() num_parts = int(math.ceil(size / part_size)) if size < 5 * 1024 * 1024: src.seek(0) t1 = time.time() k = boto.s3.key.Key(bucket, dest) k.set_contents_from_file(src) t2 = time.time() - t1 s = size/1024./1024. log.info("--> finished uploading %0.2fM in %0.2fs (%0.2fMbps)" % (s, t2, s/t2)) return mpu = bucket.initiate_multipart_upload(dest, reduced_redundancy=reduced_redundancy) log.info("--> initialized upload: %s" % mpu.id) def gen_args(num_parts, fold_last): for i in range(num_parts+1): part_start = part_size*i if i == (num_parts-1) and fold_last is True: yield (bucket.name, mpu.id, src.name, i, part_start, part_size*2) break else: yield (bucket.name, mpu.id, src.name, i, part_start, part_size) fold_last = ((size % part_size) < 5*1024*1024) try: pool = Pool(processes=num_processes) t1 = time.time() pool.map_async(do_part_upload, gen_args(num_parts, fold_last)).get(9999999) t2 = time.time() - t1 s = size/1024./1024. src.close() mpu.complete_upload() log.info("--> finished uploading %0.2fM in %0.2fs (%0.2fMbps)" % (s, t2, s/t2)) return True except Exception as err: log.error("--> encountered an error, canceling upload") log.error(log.exc(err)) mpu.cancel_upload() return False