def get(self, source=None, start=None, end=None, page=None): self.set_header("Access-Control-Allow-Origin", "*") if len(source): page = None if page is None or not len(page) else strings.as_numeric(page) if not len(start): start = "*" if not len(end): end = "*" try: filters = {key: strings.as_numeric(value[0]) for (key, value) in self.request.arguments.items()} results, start_t, end_t, count, page = actions.retrieve(self.db, source, start, end, filters, page) data = {'query': {'sources': source, 'start': util.datestring(start_t, tz=config['tz']), 'end': util.datestring(end_t, tz=config['tz']), 'filters': filters}} # log.info(data) data['results'] = results data['results_total'] = count data['results_returned'] = len(results) data['page'] = page data['pages'] = math.ceil(count / 100) return self.json(data) except Exception as e: log.error(log.exc(e)) return self.error("Request malformed: %s" % e) readme = "README failed to load" try: with open(os.path.abspath(os.path.join(os.path.dirname(__file__), "README.md"))) as f: text = f.read() readme = markdown.markdown(text) except Exception as e: log.error(log.exc(e)) sources = self.db.entries.find().distinct('source') return self.render("index.html", readme=readme, sources=sources)
def run(self): try: device_name = None for dn in os.listdir("/dev"): if "tty.usbmodem" in dn: device_name = os.path.join("/dev", dn) break if "ttyACM0" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() connection = serial.Serial(device_name, 9600) log.info("Receiving xbee messages on %s" % device_name) except Exception as e: log.error(log.exc(e)) else: while True: result = None try: result = connection.readline().decode('utf-8').strip() data = json.loads(result) data.update({'source': SOURCE}) log.info(json.dumps(data, indent=4)) if self.data_sender is not None: self.data_sender.queue.put(data) # make another entry for GPS data = {key: value for (key, value) in data.items() if key in ['latitude', 'longitude', 'altitude_m', 'satellites']} data.update({'source': "gps"}) if self.data_sender is not None: self.data_sender.queue.put(data) except Exception as e: log.error(log.exc(e)) log.info(result)
def main(): log.info("beacon_sender...") if config['geo_emails'] is None or not len(config['geo_emails']): log.info("--> no emails") return text = [] for satellite in config['satellites']: try: last_beacon = list(db.features.find({'properties.FeatureType': "beacon", 'properties.Satellite': {'$eq': satellite}}).sort('properties.t_utc', -1).limit(1))[0] datetime = last_beacon['properties']['DateTime'] lon, lat = last_beacon['geometry']['coordinates'] satellite = last_beacon['properties']['Satellite'] team = list(db.satellites.find({'Name': satellite}).sort('t_utc', -1).limit(1))[0]['Team'] if team is None: continue google = "https://www.google.com/maps/place/%s,%s" % (lat, lon) text.append("%s: %s\n%s\n%f,%f\n%s" % (satellite, team, datetime, lat, lon, google)) log.info("--> last reported beacon (%s: \"%s\" on %s) at: %f,%f" % (satellite, team, datetime, lat, lon)) except Exception as e: log.error("Could not get update: %s" % log.exc(e)) try: log.info("Emailing to %s..." % EMAILS) text = "\n\n".join(text) emailer.send(EMAILS, "OWP beacon report", text) except Exception as e: log.error("Could not email: %s" % log.exc(e))
def __init__(self, handlers): settings = { "template_path": os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "templates")), "static_path": os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "static")), } if "tornado" in config: tornado_settings = config["tornado"] for key in tornado_settings.keys(): settings[key] = tornado_settings[key] tornado.web.Application.__init__(self, handlers, **settings) if "mysql" in config: log.info("--> tornado initializing mysql") import database try: self.db = database.Connection() except Exception as e: log.error("Could not connect to MySQL: %s" % log.exc(e)) elif "mongo" in config: log.info("--> tornado initializing mongo") try: mongo = config["mongo"] import pymongo connection = pymongo.Connection(mongo["host"]) self.db = connection[mongo["database"]] except Exception as e: log.error("Could not connect to mongo: %s" % log.exc(e)) if "redis" in config: log.info("--> tornado initializing redis") import redis self.redis = redis.StrictRedis() if "memcache" in config: log.info("--> torando initializing memcache") import memcache self.cache = memcache.Client([config["memcache"]["address"] + ":" + str(config["memcache"]["port"])]) self.jobs = None if "beanstalk" in config: log.info("--> tornado initializing beanstalk") import jobs self.jobs = jobs.Jobs() # intialize oauth server try: self.oauth_server = oauth2.Server(signature_methods={"HMAC-SHA1": oauth2.SignatureMethod_HMAC_SHA1()}) except ImportError: self.oauth_server = None Application.instance = self
def main(): ## called via instagram_grabber.py try: response = requests.get("https://api.instagram.com/v1/tags/%s/media/recent?client_id=%s" % (settings['hashtag'], settings['client_id'])) photos = response.json()['data'] except Exception as e: log.error(log.exc(e)) for photo in photos: image_id = None try: username = photo['user']['username'] if username not in ACCOUNTS: log.info("Skipping photo by %s" % username) continue data = {} data['Url'] = photo['link'] dup = db.features.find_one({'properties.FeatureType': 'instagram', 'properties.Url': data['Url']}) if dup is not None: log.info("--> skipping duplicate") continue data['Member'] = MEMBERS[ACCOUNTS.index(username)] data['Caption'] = photo['caption']['text'] data['Tags'] = photo['tags'] data['Filter'] = photo['filter'] data['t_utc'] = int(photo['created_time']) data['InstagramPhotoURL'] = photo['images']['standard_resolution']['url'] try: path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads", "%s_%s.jpg" % (util.timestamp(), data['Url'].split("/")[-2]))) net.grab(data['InstagramPhotoURL'], path) image_data = process_image(path, data['Member'], data['t_utc']) if image_data is None: log.info("--> no image data") else: success, image_id = ingest_data("image", image_data.copy()) # make a second request for the image featuretype if not success: log.error(image_id) image_data['ImageUrl'] = image_data['Url'] del image_data['Url'] data.update(image_data) except Exception as e: log.error(log.exc(e)) except Exception as e: log.error(log.exc(e)) return success, post_id = ingest_data("instagram", data) if not success: log.error("--> failed: %s" % post_id) else: log.info("--> %s" % post_id) try: db.features.update({'_id': image_id}, {'$set': {'properties.InstagramID': post_id}}) except Exception as e: log.error(log.exc(e))
def main(): ## called via tweet_grabber.py twitter = Twython(AUTH['app_key'], AUTH['app_secret'], AUTH['oauth_token'], AUTH['oauth_token_secret']) twitter.verify_credentials() ## what does this do if it fails? for a, account in enumerate(ACCOUNTS): log.info("Checking %s..." % account) try: timeline = twitter.get_user_timeline(screen_name=account) except TwythonError as e: log.error(log.exc(e)) continue log.info("--> %s has %s total tweets" % (account, len(timeline))) for t, tweet in enumerate(timeline): # log.debug(json.dumps(tweet, indent=4, default=lambda x: str(x))) log.info("Tweet %s:%s" % (a, t)) text = tweet.get('text') if a == 0 or HASHTAG.lower() in text.lower(): # the first entry in the accounts is the official account -- all tweets are processed try: data = {} dt = datetime.datetime.strptime(tweet.get('created_at'), '%a %b %d %H:%M:%S +0000 %Y') data['t_utc'] = util.timestamp(dt) data['Member'] = MEMBERS[a] data['Handle'] = account data['Text'] = text data['Retweet'] = text[:2] == "RT" data['Url'] = "https://twitter.com/%s/status/%s" % (account, tweet.get('id')) data['TweetID'] = tweet.get('id') data['Images'] = [] dup = db.features.find_one({'properties.FeatureType': 'tweet', 'properties.TweetID': data['TweetID']}) if dup is not None: log.info("--> skipping duplicate tweet") continue try: for image in tweet['extended_entities']['media']: if image['type'] != "photo": continue data['Images'].append({'Url': image['media_url']}) log.info("--> added image %s" % image['media_url']) except KeyError as e: pass log.info("--> %s (RT: %s): %s" % (account, data['Retweet'], data['Text'])) success, value = ingest_data("tweet", data) if not success: log.error("--> failed: %s" % value) else: log.info("--> %s" % value) except Exception as e: log.error(log.exc(e)) continue else: log.info("--> skipping unrelated tweet")
def post(self, nop1=None, nop2=None, nop3=None, nop4=None): log.info("POST") raw = str(self.request.body, encoding="utf-8") batch = raw.split(';') d = 0 entries = [] for data in batch: if not len(data): continue try: if data[0:8] == data[8:16]: # who knows data = data[8:] assert len(data) == 31 # minus ; fields = data.split(',') response = { 'collar_id': int(fields[0]), 'rssi': int(fields[1]), 'bat': int(float(fields[2])), 't': (float(fields[3]) / 1000.0), 'mag': float(fields[4]) } log.info("[ID %s] [RSSI %02d] [T %.3f] [BAT %02d] [MAG %.3f]" % (response['collar_id'], response['rssi'], response['t'], response['bat'], response['mag'])) entries.append(response) d += 1 except AssertionError as e: log.error(data) log.error("Length is %d" % len(data)) except Exception as e: log.error(log.exc(e)) log.error(data) log.info("--> received %d entries" % len(entries)) entries.sort(key=lambda entry: entry['t']) max_t = entries[-1]['t'] result = list(self.db.entries.find().limit(1).sort([('t', DESCENDING) ])) if len(result): final_t = result[0]['t'] session = result[0]['session'] else: final_t = 0 session = 1 if max_t < final_t: session += 1 for entry in entries: entry['session'] = session try: self.db.entries.insert_many(entries) except Exception as e: log.error(log.exc(e)) return self.text("OK")
def __init__(self, handlers): settings = { 'template_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "templates")), 'static_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "static")) } if 'tornado' in config: tornado_settings = config['tornado'] for key in tornado_settings.keys(): settings[key] = tornado_settings[key] tornado.web.Application.__init__(self, handlers, **settings) if 'mysql' in config: log.info("--> tornado initializing mysql") import database try: self.db = database.Connection() except Exception as e: log.error("Could not connect to MySQL: %s" % log.exc(e)) elif 'mongo' in config: log.info("--> tornado initializing mongo") try: mongo = config['mongo'] import pymongo connection = pymongo.Connection(mongo['host']) self.db = connection[mongo['database']] except Exception as e: log.error("Could not connect to mongo: %s" % log.exc(e)) if 'redis' in config: log.info("--> tornado initializing redis") import redis self.redis = redis.StrictRedis() if 'memcache' in config: log.info("--> torando initializing memcache") import memcache self.cache = memcache.Client([config['memcache']['address'] + ":" + str(config['memcache']['port'])]) self.jobs = None if 'beanstalk' in config: log.info("--> tornado initializing beanstalk") import jobs self.jobs = jobs.Jobs() # intialize oauth server try: self.oauth_server = oauth2.Server(signature_methods={'HMAC-SHA1': oauth2.SignatureMethod_HMAC_SHA1()}) except ImportError: self.oauth_server = None Application.instance = self
def post(self, nop=None, nop2=None, nop3=None, nop4=None): log.info("POST") self.set_header("Access-Control-Allow-Origin", "*") try: data = json.loads(str(self.request.body, encoding='utf-8')) except Exception as e: log.error(log.exc(e)) return self.error() try: entry_id = actions.insert(self.db, data) except Exception as e: log.error(log.exc(e)) return self.error("ERROR: %s" % e) return self.text(str(entry_id))
def run(self): while True: try: data = self.data.get() self.message_handler(data) except Exception as e: log.error(log.exc(e))
def process(self, t): log.info("process %s" % t) try: filename = "%s/%s.wav" % (AUDIO_TMP, t) sample_rate, signal = wavfile.read(filename) # log.debug("samples %s" % len(signal)) # log.debug("sample_rate %s" % sample_rate) duration = float(len(signal)) / sample_rate # log.debug("duration %ss" % strings.format_time(duration)) signal = (np.array(signal).astype('float') / (2**16 * 0.5)) # assuming 16-bit PCM, -1 - 1 signal = abs(signal) # magnitude # log.debug("found magnitude") content_samples = 0 for sample in signal: if sample > config['noise_threshold']: content_samples += 1 total_content_time = float(content_samples) / sample_rate log.info("--> %s total_content_time %s" % (t, total_content_time)) if total_content_time > config['time_threshold']: self.out_queue.put((t, filename)) log.info("--> %s added to upload queue" % t) else: os.remove(filename) log.info("--> %s deleted" % t) except Exception as e: log.error(log.exc(e))
def make_indexes(): try: db.branches.create_index([("t", ASCENDING)]) db.branches.create_index("session") db.branches.create_index("sensor") except Exception as e: log.error(log.exc(e))
def parse(request): log.info("video.parse") paths = save_files(request) if not len(paths): return None, "No files" # process the json data = None for path in paths: if path[-4:] == "json": try: with open(path) as f: data = json.loads(f.read()) except Exception as e: log.error(log.exc(e)) return None, "Could not parse" break if data is None: return None, "No data" # process the video for path in paths: if path[-4:] != "json": break if 'TeamMember' in data: data['Member'] = data['TeamMember'] del data['TeamMember'] data['Title'] = strings.titlecase(data['Title']) data['UploadPath'] = path.split('/')[-1] data['YouTubeURL'] = None return data
def on_message(response): try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = config['sensors'][response['id']] sample = response['data'] x, y, z = response['data'] rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = { 't': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session) } # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def run(self): while True: try: message = self.messages.get() self.message_handler(message) except Exception as e: log.error(log.exc(e))
def insert_sequence(db, walk_id, sequence): try: for step in sequence: db.execute("INSERT INTO sequence (walk_id, t, foot) VALUES (?, ?, ?)", (walk_id, int(step[0]), step[1])) except Exception as e: log.error(log.exc(e)) return None
def main(): try: filename = sys.argv[1] if len(sys.argv) > 1 else None notes = retrieve_convo(filename) # normalize min_t = notes[0][0] for note in notes: note[0] -= min_t sender = osc.Sender(config['oscpin'], 23232) for pin in (2, 3): sender.send("/noteoff", pin) time.sleep(1) start_t = time.time() i = 0 while True: while time.time() - start_t < notes[i][0]: time.sleep(0.01) sender.send("/noteon" if notes[i][2] else "/noteoff", 2 if notes[i][1] == 'A' else 3) log.info("%s %s" % (notes[i][1], "ON " if notes[i][2] else "OFF")) i += 1 if i == len(notes): break except Exception as e: log.info(log.exc(3)) for pin in (2, 3): sender.send("/noteoff", pin)
def parse(request): log.info("beacon_spot.parse") content = ingest_xml_body(request) if content is None: return content, "Could not parse" # print(request.headers) data = {'FeatureType': "beacon"} try: content = content['messageList']['message'] if type(content) is not OrderedDict: content = content[0] data['latitude'] = float(content['latitude']) data['longitude'] = float(content['longitude']) data['t_utc'] = int(content['timeInGMTSecond']) data['Satellite'] = content['esnName'] data['ESN'] = content['esn'] data['ID'] = content['id'] data['MessageType'] = content['messageType'] data['MessageDetail'] = content['messageDetail'] data['batteryState'] = None if 'batteryState' not in content else content['batteryState'] data['mode'] = None if 'mode' not in content else content['mode'] except Exception as e: log.error(log.exc(e)) log.error(content) return data
def temporal_filter(features, resolution): try: log.info("--> starting temporal_filter") first_t = features[0]['properties']['t_utc'] dt = datetime.datetime.utcfromtimestamp(first_t) dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) start_t = util.timestamp(dt) log.debug("start_date %s" % util.datestring(start_t)) log.debug("stop_date %s" % util.datestring(features[-1]['properties']['t_utc'])) log.debug("start_t %s" % start_t) log.debug("step %s" % resolution) results = [] index_t = start_t index = 0 while True: # log.debug("Checking %s..." % util.datestring(index_t)) while index < len(features) and features[index]['properties']['t_utc'] < index_t: index += 1 if index == len(features): break if not (features[index]['properties']['t_utc'] > index_t + resolution): # log.debug("--> %s %s %s" % (index, features[index]['id'], util.datestring(features[index]['properties']['t_utc']))) results.append(features[index]) index_t += resolution log.info("--> done temporal_filter") return results except Exception as e: log.error(log.exc(e))
def mark_clip(db, t): log.info("Marking clip %s" % t) try: db.execute("UPDATE clips SET posted=1 WHERE t=?", (t,)) except Exception as e: log.error(log.exc(e)) return
def get_tide(entry): try: stations = { (40.7033,-73.9883): "Brooklyn", (40.8133,-73.935): "Bronx", (41.0783,-73.87): "Tarrytown" } closest_miles = 10000 closest_city = None for location, city in stations.items(): miles = geo.distance((entry['longitude'], entry['latitude']), (location[1], location[0])) if miles < closest_miles: closest_miles = miles closest_city = city response = requests.get("http://api.wunderground.com/api/%s/rawtide/q/NY/%s.json" % (config['weather'], closest_city)) data = response.json() t_utc, height = data['rawtide']['rawTideObs'][0]['epoch'], data['rawtide']['rawTideObs'][0]['height'] entry.update({'tide_station': city, 'tide_height_ft': height}) except Exception as e: log.error(log.exc(e)) return entry
def send(cls, user_id, message): socket = WebSocket.sockets[user_id] log.info("--> sending [%s] to %s" % (message, user_id)) try: socket.write_message(message) except Exception as e: log.error(log.exc(e))
def __init__(self, device_name=None, baud=9600, message_handler=None, blocking=False, verbose=False): threading.Thread.__init__(self) self.daemon = True self.verbose = verbose self.message_handler = message_handler if device_name is None: for dn in os.listdir("/dev"): if "tty.usbserial-" in dn: device_name = os.path.join("/dev", dn) break if "ttyUSB" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() log.info("Receiving xbee messages on %s" % device_name) try: self.connection = serial.Serial(device_name, baud) self.xbee = XB(self.connection) except Exception as e: if e.message != "Port is already open.": log.error(log.exc(e)) return self.start() if blocking: try: while True: time.sleep(5) except (KeyboardInterrupt, SystemExit): self.connection.close() pass
def verify_geometry(data): """Verify or reformat geometry data""" lon, lat, alt = None, None, None properties = data['properties'] delete = [] try: for p, value in properties.items(): if p.lower().strip() == 'longitude' or p.lower().strip() == 'lon' or p.lower().strip() == 'lng' or p.lower().strip() == 'long': lon = value delete.append(p) elif p.lower().strip() == 'latitude' or p.lower().strip() == 'lat': lat = value delete.append(p) elif p.lower().strip() == 'altitude' or p.lower().strip() == 'alt': alt = value delete.append(p) if lon is not None and lat is not None: if data['geometry'] is None: ## this retains geometry if it exists, is that ok? data['geometry'] = {'type': "Point", 'coordinates': [float(lon), float(lat), float(alt) if alt is not None else None]} for p in delete: del properties[p] data['properties'] = properties # ### temporarily ditch altitude prior to mongo 3.2.0 # ##### now running 3.2.5 # if 'geometry' in data and data['geometry'] is not None: # if len(data['geometry']['coordinates']) == 3: # data['properties']['Altitude'] = data['geometry']['coordinates'][2] # data['geometry']['coordinates'] = data['geometry']['coordinates'][:2] except Exception as e: log.error("Error parsing coordinates: %s" % log.exc(e)) return data
def ingest_data(feature_type, feature): # note that this operates on the original datastructure log.info("ingest_data") try: db = Application.instance.db except AttributeError: from mongo import db feature = verify_geojson(feature) if not feature: return False, "Could not format as geojson" feature['properties'].update({'FeatureType': feature_type if 'FeatureType' not in feature['properties'] else feature['properties']['FeatureType']}) feature = verify_t(feature) if not feature: return False, "Missing t_utc" feature = verify_expedition(feature) feature = tag_team(feature) feature = verify_geometry(feature) if feature['geometry'] is None: feature = estimate_geometry(feature, db) feature['properties'].update({'t_created': util.timestamp(ms=True)}) try: feature_id = db.features.insert_one(feature).inserted_id except Exception as e: log.error(log.exc(e)) return False, "Database error" log.info("--> success (%s)" % feature_id) return True, feature_id
def tag_team(data): try: db = Application.instance.db except AttributeError: from mongo import db try: member = data['properties']['Member'] t = data['properties']['t_utc'] if member is None: if 'Satellite' in data['properties']: satellite = data['properties']['Satellite'] try: team = list(db.satellites.find({'Name': satellite, 't_utc': {'$lte': t}}).sort('t_utc', -1).limit(1))[0]['Team'] log.info("--> team is %s" % team) except IndexError: log.info("--> no team entry at time %s" % t) team = None else: log.info("--> no info for team") team = None else: try: team = list(db.members.find({'Name': member, 't_utc': {'$lte': t}}).sort('t_utc', -1).limit(1))[0]['Team'] log.info("--> team is %s" % team) except (IndexError, KeyError): log.info("--> no team entry at time %s" % t) team = None data['properties']['Team'] = team return data except Exception as e: log.error(log.exc(e)) return data
def save_files(request): log.info("ingest.save_files") paths = [] try: for key, fileinfo in request.files.items(): fileinfo = fileinfo[0] path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads", "%s_%s" % (util.timestamp(), fileinfo['filename']))) with open(path, 'wb') as f: f.write(fileinfo['body']) log.info("--> saved %s" % path) if zipfile.is_zipfile(path) is True: log.info("Examining zip file...") with zipfile.ZipFile(path, 'r') as archive: filenames = archive.namelist() for filename in filenames: if filename[0] == '.' or filename[0] == '_' or '/' in filename: # zipped files cannot start with . (invisible), _ (system), or contain longer paths continue dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads")) temp_path = archive.extract(filename, dir_path) path = os.path.abspath(os.path.join(dir_path, "%s_%s" % (util.timestamp(), filename))) shutil.move(temp_path, path) log.info("--> saved %s" % path) paths.append(path) log.info("--> zip file extracted") else: paths.append(path) except Exception as e: log.error(log.exc(e)) return paths
def mark_clip(db, t): log.info("Marking clip %s" % t) try: db.execute("UPDATE clips SET posted=1 WHERE t=?", (t, )) except Exception as e: log.error(log.exc(e)) return
def verify_expedition(data): """Verify we have an Expedition and Member property""" try: db = Application.instance.db except AttributeError: from mongo import db for wrong in ['TeamMember', 'teamMember', 'Person', 'person', 'member', 'Collectors', 'Collector', 'collectors', 'collector']: if wrong in data['properties']: if 'Member' not in data['properties']: data['properties']['Member'] = data['properties'][wrong] del data['properties'][wrong] for wrong in ['expedition']: if wrong in data['properties']: data['properties']['Expedition'] = data['properties'][wrong] del data['properties'][wrong] if 'Member' not in data['properties']: data['properties']['Member'] = None if data['properties']['Member'] is not None: if data['properties']['Member'].lower() == "null" or data['properties']['Member'].lower() == "none" or len(data['properties']['Member'].strip()) == 0: data['properties']['Member'] = None else: data['properties']['Member'] = data['properties']['Member'].strip().split(' ')[0] if data['properties']['Member'] is not None: data['properties']['Member'] = strings.depunctuate(data['properties']['Member'])[:15] data['properties']['Member'] = data['properties']['Member'].title() if len(data['properties']['Member']) > 2 else data['properties']['Member'].upper() data['properties']['Member'] = data['properties']['Member'].replace('\u00f6', 'oe') # sorry Goetz try: if not db.members.find({'Name': data['properties']['Member']}).count(): db.members.insert({'Name': data['properties']['Member'], 'Team': None, 'Core': False, 't_utc': data['properties']['t_utc']}) except Exception as e: log.error(log.exc(e)) if 'Expedition' not in data['properties']: data['properties']['Expedition'] = config['expedition'] return data
def __init__(self, device_name=None, baud=9600, message_handler=None, blocking=False): threading.Thread.__init__(self) self.daemon = True self.verbose = False self.message_handler = message_handler if device_name is None: for dn in os.listdir("/dev"): if "tty.usbserial-" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() log.info("Receiving xbee messages on %s" % device_name) try: self.connection = serial.Serial(device_name, baud) self.xbee = XB(self.connection) except Exception as e: if e.message != "Port is already open.": log.error(log.exc(e)) return self.start() if blocking: try: while True: time.sleep(5) except (KeyboardInterrupt, SystemExit): self.connection.close() pass
def run(self): while True: try: message, address = self.socket.recvfrom(1024) self.messages.put(message.decode()) except Exception as e: log.error(log.exc(e))
def make_indexes(): try: db.entries.create_index("type") db.entries.create_index([("t_utc", ASCENDING)]) db.entries.create_index([("t_utc", DESCENDING)]) except Exception as e: log.error(log.exc(e))
def run(self): while True: try: data = self.xbee.wait_read_frame() if self.verbose: log.debug(data) response = {} if 'source_addr' in data: response['sensor'] = int(data['source_addr'][1]) if 'frame_id' in data: response['frame'] = str(data['frame_id'], 'ascii') if 'parameter' in data: response['parameter'] = int.from_bytes(data['parameter'], 'little') if 'rssi' in data: response['rssi'] = int.from_bytes(data['rssi'], 'little') if 'samples' in data: response['samples'] = [] for each in data['samples']: samples = list(each.items()) samples.sort(key=lambda item: item[0]) response['samples'].append([s[1] for s in samples]) if len(response['samples']) == 1: response['samples'] = response['samples'][0] if self.message_handler is not None: self.message_handler(response) except Exception as e: log.error(log.exc(e))
def init(db): try: db.execute("CREATE TABLE IF NOT EXISTS clips (t INTEGER, hit_id TEXT, posted INTEGER)") db.execute("CREATE UNIQUE INDEX IF NOT EXISTS clips_t ON clips(t)") except Exception as e: log.error(log.exc(e)) return
def add_clip(db, t, hit_id): try: db.execute("INSERT INTO clips (t, hit_id, posted) VALUES (?, ?, 0)", (t, hit_id)) except Exception as e: log.error(log.exc(e)) return log.info("Added clip %s %s" % (t, hit_id))
def ingest_geo_feature(path, kind): log.info("ingest_geo_feature %s" % path) t_protect = model.get_protect(kind) sightings = [] headings = {} with open(path) as f: rows = csv.reader(f) for r, row in enumerate(rows): if r == 0: for i, item in enumerate(row): headings[item] = i continue try: dt = util.parse_date("%s %s" % (row[headings['Date']], row[headings['Time']]), tz=config['local_tz'], dayfirst=True) t = util.timestamp(dt) if t <= t_protect: log.warning("Protected t, skipping...") continue try: coordinates = strings.as_numeric(row[headings['Longitude']]), strings.as_numeric(row[headings['Latitude']]), strings.as_numeric(row[headings['Altitude']]) except Exception as e: log.error("Missing coordinates! Skipping...") continue properties = {'DateTime': dt.strftime("%Y-%m-%dT%H:%M:%S%z"), 't_utc': t, 'ContentType': kind} for heading in headings: if heading not in ['Date', 'Time', 'Latitude', 'Longitude', 'Altitude']: try: properties[heading] = strings.as_numeric(row[headings[heading]]) except IndexError: pass feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coordinates}, properties=properties) model.insert_feature(kind, t, geojson.dumps(feature)) except Exception as e: log.error("Row failed: " + log.exc(e)) continue
def ingest_audio(path, i, t_protect): log.info("ingest_audio %s" % path) dt = datetime.datetime.strptime(path.split('/')[-1], "audio %d%m%Y_%H%M.mp3") tz = pytz.timezone(config['local_tz']) dt = tz.localize(dt) t = util.timestamp(dt) # if t <= t_protect: # log.warning("Protected t, skipping...") # return fixed_path = path.replace(".mp3", ".amr") shutil.move(path, fixed_path) new_path = os.path.join(os.path.dirname(__file__), "static", "data", "audio", "%s-%s.wav" % (t, i)) log.debug("CONVERTING SOUND.") try: log.debug("--> converting [%s] to [%s]" % (fixed_path, new_path)) log.debug("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path))) subprocess.check_call("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path)), shell=True) except Exception as e: log.error(log.exc(e)) return log.debug("DONE CONVERTING SOUND.") feature = geojson.Feature(properties={'utc_t': t, 'ContentType': "audio", 'url': "/static/data/audio/%s-%s.wav" % (t, i), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z")}) feature_id = model.insert_feature('audio', t, geojson.dumps(feature))
def message_handler(response): # log.info(response) try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = response['sensor'] samples = response['samples'] if type(samples[0]) is not list: samples = [samples] for sample in samples: x, y, z = sample rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = { 't': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session) } # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def run(self): while True: try: message, address = self.messages.get() # log.info("SENDING [%s] to %s:%s" % (message, address[0], address[1])) self.socket.sendto(message.encode('ascii'), address) except Exception as e: log.error(log.exc(e))
def init(): try: db.execute("CREATE TABLE IF NOT EXISTS clips (t INTEGER, hit_id TEXT, posted INTEGER)") db.execute("CREATE UNIQUE INDEX IF NOT EXISTS clips_t ON clips(t)") except Exception as e: log.error(log.exc(e)) return connection.commit()
def make_indexes(): try: db.entries.drop_indexes() db.entries.create_index("collar_id") db.entries.create_index("session") db.entries.create_index([("t", ASCENDING)]) db.entries.create_index([("t", DESCENDING)]) except Exception as e: log.error(log.exc(e))
def run(self): while True: message = self.queue.get()[:140] log.info("SENDING TWEET: %s" % message) try: self.sender.statuses.update(status=message) except Exception as e: log.error(log.exc(e)) else: log.info("--> sent")
def make_indexes(): db.entries.drop_indexes() try: db.entries.create_index("t") db.entries.create_index("user_id") db.entries.create_index([("location", GEOSPHERE)]) db.entries.create_index([("location", GEOSPHERE), ("user_id", ASCENDING)]) db.entries.create_index([("t", ASCENDING), ("user_id", ASCENDING)], unique=True) except Exception as e: log.error(log.exc(e))
def run(self): while True: try: data = self.queue.get() response = requests.post(config['server'], json=data, timeout=5) log.info(response.status_code) except Exception as e: log.error(log.exc(e))
def run(self): while True: try: message, address = self.socket.recvfrom(1024) # shit, this is where the limit comes from ip, port = address batch = message.decode('utf-8') batch = batch.split(';') for d, data in enumerate(batch): if not len(data): continue try: data = data.split(',') esp_id = int(data[0]) data = {'id': esp_id, 'rssi': int(data[1]), 'bat': int(float(data[2])), 'ip': ip, 't_utc': timeutil.timestamp(ms=True), 't': float(data[3]) / 1000.0, 'mag': float(data[4])} self.messages.put(data) except Exception as e: log.error(log.exc(e)) except Exception as e: log.error(log.exc(e))
def run(self): t_start = time.time() while True: try: message, address = self.socket.recvfrom( 1024) # shit, this is where the limit comes from ip, port = address data = message.decode('utf-8') if not len(data): continue try: data = data.split(',') esp_id = int(data[0]) if esp_id not in self.events: self.events[esp_id] = [] if esp_id not in self.rates: self.rates[esp_id] = 0 self.events[esp_id].append(1) data = { 'id': esp_id, 'rssi': int(data[1]), 'bat': int(float(data[2])), 'rate': self.rates[esp_id], 'ip': ip, 't_utc': timeutil.timestamp(ms=True), 't': float(data[3]) / 1000.0, 'mag': float(data[4]) } self.messages.put(data) except Exception as e: log.error(log.exc(e)) continue elapsed_t = time.time() - t_start if elapsed_t >= 1: for esp_id in self.events: events = len(self.events[esp_id]) rate = math.floor(events / elapsed_t) self.rates[esp_id] = rate self.events[esp_id] = [] t_start = time.time() except Exception as e: log.error(log.exc(e))
def fetch_walks(db, hidden=False, desc=False): try: log.debug(hidden) query = "SELECT * FROM walks %s ORDER BY start_time %s" % ("WHERE hidden=0" if not hidden else "", "DESC" if desc else "") log.debug(query) db.execute(query) rows = [dict(gd) for gd in db.fetchall()] except Exception as e: log.error(log.exc(e)) rows = [] return rows
def upload(self, t, filename): log.info("upload %s" % filename) try: s3.upload(filename) log.info("--> uploaded. Pinging server...") data = {'t': t} response = net.read("http://%s:%s" % (config['server']['host'], config['server']['port']), json.dumps(data).encode('utf-8')) log.info(response) os.remove(filename) except Exception as e: log.error(log.exc(e))
def geocode(self): try: url = "https://maps.googleapis.com/maps/api/geocode/json?latlng=%s,%s" % ( self.lat, self.lon) result = requests.get(url).json() self.address = result['results'][0]['formatted_address'] self.address = self.address.split(", NY ")[0].replace( ", New York", "") except Exception as e: log.error(log.exc(e)) log.debug(json.dumps(result, indent=4))
def run(self): while True: try: message, address = self.socket.recvfrom(1024) data = message.decode('utf-8').split(',') if data[0] == "/a": self.queue.put(data[1:]) else: log.info(data) except Exception as e: log.error(log.exc(e))
def delete(path): log.info("s3.delete") conn = S3lib.AWSAuthConnection(config['aws']['access_key_id'], config['aws']['secret_access_key']) log.info("--> deleting %s/%s" % (config['aws']['bucket'], path)) try: response = conn.delete(config['aws']['bucket'], path) except Exception as e: log.error("--> failed: %s" % log.exc(e)) return False log.info("--> %s" % response.message) return True
def insert_walk(db, walk): try: db.execute("INSERT INTO walks (start_time, duration, ref_id, hidden) VALUES (?, ?, ?, ?)", (walk['start_time'], walk['duration'], walk['ref_id'], False)) walk_id = db.lastrowid for gd in walk['geo_data']: db.execute("INSERT INTO geo_data (walk_id, t, lat, lng) VALUES (?, ?, ?, ?)", (walk_id, gd[0], gd[1], gd[2])) for ad in walk['accel_data']: db.execute("INSERT INTO accel_data (walk_id, t, x, y, z) VALUES (?, ?, ?, ?, ?)", (walk_id, ad[0], ad[1], ad[2], ad[3])) except Exception as e: log.error(log.exc(e)) return None return walk_id
def post(self, nop=None, nop2=None): log.info("Home.post") try: data = self.get_argument('walk_data') data = base64.b64decode(data) data = zlib.decompress(data, -15) data = data.decode() data = json.loads(data) # log.debug(data) except Exception as e: return self.error(log.exc(e)) if not len(data['accel_data']): return self.error("NO DATA") walk_id = model.insert_walk(data) log.info("Processing data...") try: process_walk(walk_id) except Exception as e: return self.error("Could not process: %s" % log.exc(e)) log.info("--> done") return self.text("OK")
def list_contents(): log.info("s3.list") connection = boto.connect_s3(config['aws']['access_key_id'], config['aws']['secret_access_key']) log.info("--> listing %s" % (config['aws']['bucket'])) try: bucket = connection.get_bucket(config['aws']['bucket']) contents = [key.name.encode('utf-8') for key in bucket.list()] except Exception as e: log.error("--> failed: %s" % log.exc(e)) return False log.info("--> %s" % contents) return contents