def ingest_geo_feature(path, kind): log.info("ingest_geo_feature %s" % path) t_protect = model.get_protect(kind) sightings = [] headings = {} with open(path) as f: rows = csv.reader(f) for r, row in enumerate(rows): if r == 0: for i, item in enumerate(row): headings[item] = i continue try: dt = util.parse_date("%s %s" % (row[headings['Date']], row[headings['Time']]), tz=config['local_tz'], dayfirst=True) t = util.timestamp(dt) if t <= t_protect: log.warning("Protected t, skipping...") continue try: coordinates = strings.as_numeric(row[headings['Longitude']]), strings.as_numeric(row[headings['Latitude']]), strings.as_numeric(row[headings['Altitude']]) except Exception as e: log.error("Missing coordinates! Skipping...") continue properties = {'DateTime': dt.strftime("%Y-%m-%dT%H:%M:%S%z"), 't_utc': t, 'ContentType': kind} for heading in headings: if heading not in ['Date', 'Time', 'Latitude', 'Longitude', 'Altitude']: try: properties[heading] = strings.as_numeric(row[headings[heading]]) except IndexError: pass feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coordinates}, properties=properties) model.insert_feature(kind, t, geojson.dumps(feature)) except Exception as e: log.error("Row failed: " + log.exc(e)) continue
def main(): try: filename = sys.argv[1] if len(sys.argv) > 1 else None notes = retrieve_convo(filename) # normalize min_t = notes[0][0] for note in notes: note[0] -= min_t sender = osc.Sender(config['oscpin'], 23232) for pin in (2, 3): sender.send("/noteoff", pin) time.sleep(1) start_t = time.time() i = 0 while True: while time.time() - start_t < notes[i][0]: time.sleep(0.01) sender.send("/noteon" if notes[i][2] else "/noteoff", 2 if notes[i][1] == 'A' else 3) log.info("%s %s" % (notes[i][1], "ON " if notes[i][2] else "OFF")) i += 1 if i == len(notes): break except Exception as e: log.info(log.exc(3)) for pin in (2, 3): sender.send("/noteoff", pin)
def __init__(self, points, paths, places, cities): self.points = points self.paths = paths self.places = places self.cities = cities self.current_point = points[-1] years = {} for p, point in enumerate(reversed(points)): # start with the most recent point if point.year not in years: years[point.year] = [[[None] * 1440 for d in xrange(7)] for w in xrange(52)] # every minute in every day in every week year = years[point.year] w = point.week d = point.weekday m = point.minute while year[w][d][m] is None: year[w][d][m] = point.place if point.place is not None else -1 # flag for moving. might replace this with a speed / transportation mode indicator. if p == 0: # dont do it for the last point break m += 1 if m == len(year[w][d]): # end of the day m = 0 d += 1 if d == len(year[w]): # end of the week d = 0 w += 1 if w == len(year): # end of the year log.info("HIT END") break dict.__init__(self, years)
def parse(request): log.info("ambit_geo.parse") sample = ingest_json_body(request) if sample is None: return sample, "Could not parse" data = {} for key, value in sample.items(): if key == "UTC": dt = util.parse_date(sample['UTC']) # these are marked UTC in the data t = util.timestamp(dt) data['t_utc'] = t continue if key == "Longitude": data['longitude'] = math.degrees(float(sample['Longitude'])) continue if key == "Latitude": data['latitude'] = math.degrees(float(sample['Latitude'])) continue if key == "GPSAltitude": data['altitude'] = float(sample['GPSAltitude']) continue if type(value) != str: continue data[key] = strings.as_numeric(value) try: log.debug("%s %s %s" % (data['longitude'], data['latitude'], data['altitude'])) except: log.error("MISSING GEO") return data
def add_clip(db, t, hit_id): try: db.execute("INSERT INTO clips (t, hit_id, posted) VALUES (?, ?, 0)", (t, hit_id)) except Exception as e: log.error(log.exc(e)) return log.info("Added clip %s %s" % (t, hit_id))
def strips(points, user_id=None): log.info("Drawing strips for user %s..." % user_id) lines = [] q = 0 for p, point in enumerate(points): prev = points[p - 1] if p > 0 else None if prev is not None and point.period < prev.period: q += 1 color = colors[point.location % len(colors)] lines.append([(point.period / PERIODS) * 1000, q, ((point.period + point.duration) / PERIODS) * 1000, q, color, 8.0]) if point.period + point.duration > PERIODS: overflow = (point.period + point.duration) - PERIODS lines.append( [0, q + 1, (overflow / PERIODS) * 1000, q + 1, color, 8.0]) ctx = drawing.Context(1000, ((q + 2) * 10) + 2, relative=False, flip=False, hsv=False, background=(0., 0., 0., 1.)) for line in lines: line[1] = line[3] = (((line[1] / (q + 2))) * ((q + 2) * 10)) + 6 ctx.line(*line) ctx.output("images/%s_strips.png" % user_id, False)
def get_video_times(gpx_filename): # get GPX file gpx = open(gpx_filename) try: xml = ElementTree.fromstring(gpx.read()) except Exception as e: log.error("XML error (%s): %s" % (tcx_filename, e)) exit() # load GPX data # basically, look at the first GPS time and the corresponding media time, and subtract to get the precise start time ns = "{http://www.topografix.com/GPX/1/1}" timestamps = xml.findall("%strk/%strkseg/%strkpt/%stime" % tuple([ns]*4)) first_timestamp = timestamps[0].text first_timestamp_dt = datetime.datetime.strptime(first_timestamp, "%Y-%m-%dT%H:%M:%S") first_timestamp_t = calendar.timegm(first_timestamp_dt.timetuple()) media_times = xml.findall("%strk/%strkseg/%strkpt/%sextensions/%smediatime" % tuple([ns]*5)) first_media_time = media_times[0].text first_media_time_dt = time.strptime(first_media_time, "%H:%M:%S.%f") first_media_time_seconds = first_media_time_dt.tm_min * 60 + first_media_time_dt.tm_sec start_timestamp = first_timestamp_t - first_media_time_seconds end_timestamp = timestamps[-1].text video_start_dt = datetime.datetime.utcfromtimestamp(start_timestamp) video_start_t = float(calendar.timegm(video_start_dt.timetuple())) video_end_dt = datetime.datetime.strptime(end_timestamp, "%Y-%m-%dT%H:%M:%S") video_end_t = float(calendar.timegm(video_end_dt.timetuple())) log.info("VIDEO START REAL TIME %s UTC" % datetime.datetime.utcfromtimestamp(video_start_t).strftime("%Y-%m-%d %H:%M:%S")) log.info("VIDEO END REAL TIME %s UTC" % datetime.datetime.utcfromtimestamp(video_end_t).strftime("%Y-%m-%d %H:%M:%S")) return video_start_t, video_end_t ## note! this is just the last GPS point, not necessarily the end of the movie!
def render(self, template_name, template_values=None, **kwargs): # using jinja2 templates instead of tornados to keep consistency if type(template_values) == dict: template_values.update(kwargs) else: template_values = kwargs template_values['uri'] = self.request.uri if 'log_templates' in config['tornado'] and config['tornado']['log_templates']: log.info("TEMPLATE %s: %s" % (template_name, template_values)) else: log.info("TEMPLATE %s" % template_name) for key in config: if type(config[key]) is dict: for param in config[key]: template_values[key + "_" + param] = str(config[key][param]) else: template_values[key] = config[key] template_values['template_name'] = template_name if 'user' not in template_values: template_values['user'] = self.user template_dir = os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "templates")) renderer = render_jinja(template_dir) renderer._lookup.filters.update(filters.filters) output = (renderer[template_name](template_values)).encode('utf-8') suffix = strings.suffix('.', template_name) if suffix == "html": self.html(output) else: self.text(output)
def run(self): while True: notes = self.queue.get() log.info("Playing...") self.playing = True note_ons = [ (note.on * DURATION, int(note.channel), int(note.pitch), int(127 * ((note.velocity * MIN_VELOCITY) + MIN_VELOCITY))) for note in notes ] note_offs = [(note.off * DURATION, int(note.channel), int(note.pitch), 0) for note in notes] notes = (note_ons + note_offs) notes.sort(key=lambda n: n[0]) start_t = time.time() n = 0 while True: t = time.time() - start_t if t > notes[n][0]: # midi_out.send_note(notes[n][1], notes[n][2], notes[n][3]) midi_out.send_note(1, [0, 64, 69, 71][notes[n][1]], notes[n][3]) n += 1 if n == len(notes): break time.sleep(0.01) self.playing = False log.info("--> done")
def temporal_filter(features, resolution): try: log.info("--> starting temporal_filter") first_t = features[0]['properties']['t_utc'] dt = datetime.datetime.utcfromtimestamp(first_t) dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) start_t = util.timestamp(dt) log.debug("start_date %s" % util.datestring(start_t)) log.debug("stop_date %s" % util.datestring(features[-1]['properties']['t_utc'])) log.debug("start_t %s" % start_t) log.debug("step %s" % resolution) results = [] index_t = start_t index = 0 while True: # log.debug("Checking %s..." % util.datestring(index_t)) while index < len(features) and features[index]['properties']['t_utc'] < index_t: index += 1 if index == len(features): break if not (features[index]['properties']['t_utc'] > index_t + resolution): # log.debug("--> %s %s %s" % (index, features[index]['id'], util.datestring(features[index]['properties']['t_utc']))) results.append(features[index]) index_t += resolution log.info("--> done temporal_filter") return results except Exception as e: log.error(log.exc(e))
def ingest_data(feature_type, feature): # note that this operates on the original datastructure log.info("ingest_data") try: db = Application.instance.db except AttributeError: from mongo import db feature = verify_geojson(feature) if not feature: return False, "Could not format as geojson" feature['properties'].update({'FeatureType': feature_type if 'FeatureType' not in feature['properties'] else feature['properties']['FeatureType']}) feature = verify_t(feature) if not feature: return False, "Missing t_utc" feature = verify_expedition(feature) feature = tag_team(feature) feature = verify_geometry(feature) if feature['geometry'] is None: feature = estimate_geometry(feature, db) feature['properties'].update({'t_created': util.timestamp(ms=True)}) try: feature_id = db.features.insert_one(feature).inserted_id except Exception as e: log.error(log.exc(e)) return False, "Database error" log.info("--> success (%s)" % feature_id) return True, feature_id
def parse(request): log.info("beacon_spot.parse") content = ingest_xml_body(request) if content is None: return content, "Could not parse" # print(request.headers) data = {'FeatureType': "beacon"} try: content = content['messageList']['message'] if type(content) is not OrderedDict: content = content[0] data['latitude'] = float(content['latitude']) data['longitude'] = float(content['longitude']) data['t_utc'] = int(content['timeInGMTSecond']) data['Satellite'] = content['esnName'] data['ESN'] = content['esn'] data['ID'] = content['id'] data['MessageType'] = content['messageType'] data['MessageDetail'] = content['messageDetail'] data['batteryState'] = None if 'batteryState' not in content else content['batteryState'] data['mode'] = None if 'mode' not in content else content['mode'] except Exception as e: log.error(log.exc(e)) log.error(content) return data
def get(self, user_id): log.info("Home.get") if not len(user_id): return self.text("/user_id") user = self.db.get("SELECT * FROM users WHERE id=%s", user_id) if user is None: return self.text("User not found") if not self.get_argument("partial", None): return self.render("page.html", user=user) log.info("--> generating partial") almanac = Almanac.build(self.db, user_id) # should be pulled from a cache if not almanac.current_point.moving: total_weight = sum( [weight for place, weight in almanac.current_point.place.connections.items()] ) # this goes in almanac somewhere weights = [] for place, weight in almanac.current_point.place.connections.items(): weights.append("%s (%f)" % (place.id, (weight / total_weight))) place = almanac.current_point.place if almanac.current_point.place is not None else None return self.render("content.html", place=place, weights=weights, user=user)
def parse(request): log.info("video.parse") paths = save_files(request) if not len(paths): return None, "No files" # process the json data = None for path in paths: if path[-4:] == "json": try: with open(path) as f: data = json.loads(f.read()) except Exception as e: log.error(log.exc(e)) return None, "Could not parse" break if data is None: return None, "No data" # process the video for path in paths: if path[-4:] != "json": break if 'TeamMember' in data: data['Member'] = data['TeamMember'] del data['TeamMember'] data['Title'] = strings.titlecase(data['Title']) data['UploadPath'] = path.split('/')[-1] data['YouTubeURL'] = None return data
def open(self): log.info("//////////// CollarSocket.open") self.socket_id = strings.random_string(10) CollarSocket.sockets[self.socket_id] = self self.device_id = None log.info("--> new collar socket_id %s" % self.socket_id) CollarSocket.send(self.socket_id, {'socket_id': self.socket_id})
def post(self, nop=None): walk_id = self.get_argument('walk_id') show = self.get_argument('show') log.info("Sequence.post %s %s" % (walk_id, show)) hidden = show == "false" model.hide(walk_id, hidden) return self.text("OK")
def get(self, page=None, walk_id=None): log.info("Home.get %s" % page) if not len(page): return self.render("home.html") if page == "walk": if len(walk_id) and walk_id == "c": walks = model.fetch_walks() if len(walks): walk_id = random.choice(walks)['id'] else: walk_id = None elif not type(walk_id) == int and not len(walk_id): walk_id = None return self.render("walk.html", sequence=json.dumps( model.fetch_sequence(walk_id)), ref_id=walk_id) if page == "walks": return self.render("walks.html", walks=model.fetch_walks(hidden=True)) if page == "choose": return self.render("choose.html", walks=model.fetch_walks()) if page in [ "prepare", "route", "map", "thanks", "orientation", "background" ]: return self.render("%s.html" % page) return self.not_found()
def mark_clip(db, t): log.info("Marking clip %s" % t) try: db.execute("UPDATE clips SET posted=1 WHERE t=?", (t,)) except Exception as e: log.error(log.exc(e)) return
def __init__(self, device_name=None, baud=9600, message_handler=None, blocking=False, verbose=False): threading.Thread.__init__(self) self.daemon = True self.verbose = verbose self.message_handler = message_handler if device_name is None: for dn in os.listdir("/dev"): if "tty.usbserial-" in dn: device_name = os.path.join("/dev", dn) break if "ttyUSB" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() log.info("Receiving xbee messages on %s" % device_name) try: self.connection = serial.Serial(device_name, baud) self.xbee = XB(self.connection) except Exception as e: if e.message != "Port is already open.": log.error(log.exc(e)) return self.start() if blocking: try: while True: time.sleep(5) except (KeyboardInterrupt, SystemExit): self.connection.close() pass
def build(cls, user_id, host): if host == "NONE": return None visits = model.fetch_visits(user_id) if len(visits) < 2: return None visits = visits[:-1] if visits[-1]['host'] == host else visits # dont include current site in analysis cls.sites = {} v = n = 0 site = cls.get(visits[v]['host']) while v < len(visits): while n < len(visits) and visits[v]['host'] == visits[n]['host']: if not visits[n]['auto']: # don't include auto-browsed sites in model if visits[n]['page'] != '/': # more interesting to stick with deeper paths site.pages.append(visits[n]['page']) n += 1 if n == len(visits): break duration = visits[n]['t'] - visits[v]['t'] next_site = cls.get(visits[n]['host']) if not visits[n]['auto']: # don't include auto-browsed sites in model if next_site.host != "NONE": site.nexts.append(next_site) site.durations.append(max(min(duration, MAX_DURATION), MIN_DURATION)) site = next_site v = n if 'NONE' in Model.sites: del Model.sites['NONE'] log.info("Model size: %s" % len(Model.sites)) return True
def __init__(self, device_name=None, baud=9600, message_handler=None, blocking=False): threading.Thread.__init__(self) self.daemon = True self.verbose = False self.message_handler = message_handler if device_name is None: for dn in os.listdir("/dev"): if "tty.usbserial-" in dn: device_name = os.path.join("/dev", dn) break if device_name is None: log.info("No devices available") exit() log.info("Receiving xbee messages on %s" % device_name) try: self.connection = serial.Serial(device_name, baud) self.xbee = XB(self.connection) except Exception as e: if e.message != "Port is already open.": log.error(log.exc(e)) return self.start() if blocking: try: while True: time.sleep(5) except (KeyboardInterrupt, SystemExit): self.connection.close() pass
def ingest_audio(path, i, t_protect): log.info("ingest_audio %s" % path) dt = datetime.datetime.strptime(path.split('/')[-1], "audio %d%m%Y_%H%M.mp3") tz = pytz.timezone(config['local_tz']) dt = tz.localize(dt) t = util.timestamp(dt) # if t <= t_protect: # log.warning("Protected t, skipping...") # return fixed_path = path.replace(".mp3", ".amr") shutil.move(path, fixed_path) new_path = os.path.join(os.path.dirname(__file__), "static", "data", "audio", "%s-%s.wav" % (t, i)) log.debug("CONVERTING SOUND.") try: log.debug("--> converting [%s] to [%s]" % (fixed_path, new_path)) log.debug("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path))) subprocess.check_call("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path)), shell=True) except Exception as e: log.error(log.exc(e)) return log.debug("DONE CONVERTING SOUND.") feature = geojson.Feature(properties={'utc_t': t, 'ContentType': "audio", 'url': "/static/data/audio/%s-%s.wav" % (t, i), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z")}) feature_id = model.insert_feature('audio', t, geojson.dumps(feature))
def send(cls, user_id, message): socket = WebSocket.sockets[user_id] log.info("--> sending [%s] to %s" % (message, user_id)) try: socket.write_message(message) except Exception as e: log.error(log.exc(e))
def tag_team(data): try: db = Application.instance.db except AttributeError: from mongo import db try: member = data['properties']['Member'] t = data['properties']['t_utc'] if member is None: if 'Satellite' in data['properties']: satellite = data['properties']['Satellite'] try: team = list(db.satellites.find({'Name': satellite, 't_utc': {'$lte': t}}).sort('t_utc', -1).limit(1))[0]['Team'] log.info("--> team is %s" % team) except IndexError: log.info("--> no team entry at time %s" % t) team = None else: log.info("--> no info for team") team = None else: try: team = list(db.members.find({'Name': member, 't_utc': {'$lte': t}}).sort('t_utc', -1).limit(1))[0]['Team'] log.info("--> team is %s" % team) except (IndexError, KeyError): log.info("--> no team entry at time %s" % t) team = None data['properties']['Team'] = team return data except Exception as e: log.error(log.exc(e)) return data
def save_files(request): log.info("ingest.save_files") paths = [] try: for key, fileinfo in request.files.items(): fileinfo = fileinfo[0] path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads", "%s_%s" % (util.timestamp(), fileinfo['filename']))) with open(path, 'wb') as f: f.write(fileinfo['body']) log.info("--> saved %s" % path) if zipfile.is_zipfile(path) is True: log.info("Examining zip file...") with zipfile.ZipFile(path, 'r') as archive: filenames = archive.namelist() for filename in filenames: if filename[0] == '.' or filename[0] == '_' or '/' in filename: # zipped files cannot start with . (invisible), _ (system), or contain longer paths continue dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads")) temp_path = archive.extract(filename, dir_path) path = os.path.abspath(os.path.join(dir_path, "%s_%s" % (util.timestamp(), filename))) shutil.move(temp_path, path) log.info("--> saved %s" % path) paths.append(path) log.info("--> zip file extracted") else: paths.append(path) except Exception as e: log.error(log.exc(e)) return paths
def mark_clip(db, t): log.info("Marking clip %s" % t) try: db.execute("UPDATE clips SET posted=1 WHERE t=?", (t, )) except Exception as e: log.error(log.exc(e)) return
def temp_image(self, image): import imaging self.set_header("Content-Type", "image/png") self.set_header("Cache-Control", "no-cache") if type(image) != str: image = imaging.to_string(image) self.write(image) log.info("200 image/png (temporary)")
def image(self, image): import imaging self.set_header("Content-Type", "image/png") self.set_header("Expires", "Thu, 15 Apr 2050 20:00:00 GMT") if type(image) != str: image = imaging.to_string(image) self.write(image) log.info("200 image/png")
def send(cls, score): data = [] t = util.timestamp(ms=True) for cue in score: log.info("Sending %s" % (cue,)) data.append(str(cue[0] + t)) data.append(cue[1]) cls.sender.send('/cue', score)
def insert_beat(t, events): num = len(events) if num == 0: return events = json.dumps(events) db.execute("INSERT INTO beats (t, events) VALUES (?, ?)", (int(t), events)) log.info("added events for %s stations" % num) connection.commit()
def insert_feature(db, kind, t, data): try: db.execute("INSERT INTO features (kind, t, data, t_created) VALUES (?, ?, ?, ?)", (kind, t, data, util.timestamp())) entry_id = db.lastrowid except Exception as e: log.error(log.exc(e)) return log.info("Inserted feature (%s) %s" % (entry_id, t)) return entry_id
def get(self, page=None): if page == config['sendpw']: message = '"%s"' % self.get_argument('message').strip('"')[:138] ts.queue.put(message) log.info("remote: %s" % message) return self.text("OK") if not len(page): return self.text("OK") return self.not_found()
def insert_hydrodrop(db, t, hydrosensor_id, lat, lon): try: db.execute("INSERT INTO hydrodrops (t, id, lat, lon, t_created) VALUES (?, ?, ?, ?, ?)", (t, hydrosensor_id, float(lat), float(lon), util.timestamp())) hydrodrop_id = db.lastrowid except Exception as e: log.error(log.exc(e)) return log.info("Inserted hydrodrop (%s) %s %s" % (hydrodrop_id, hydrosensor_id, t)) return hydrodrop_id
def get_user_points(user_ids): location = {'$geoWithin': {'$geometry': {'type': "Polygon", 'coordinates': [[ [LON_1, LAT_1], [LON_2, LAT_1], [LON_2, LAT_2], [LON_1, LAT_2], [LON_1, LAT_1] ]]}}} for u, user_id in enumerate(user_ids): log.info("USER %s..." % user_id) cursor = db.entries.find({'user_id': user_id, 'location': location, 't': {'$gt': timeutil.timestamp(timeutil.string_to_dt(config['start_date'], tz="America/New_York")), '$lt': timeutil.timestamp(timeutil.string_to_dt(config['stop_date'], tz="America/New_York"))}}).sort('t') points = [Point(point['location']['coordinates'][0], point['location']['coordinates'][1], point['t']) for point in cursor] log.info("--> %d points" % len(points)) yield user_id, points yield (None, None)
def get(self, page=None): if page == config["sendpw"]: message = '"%s"' % self.get_argument("message").strip('"')[:138] ts.queue.put(message) log.info("remote: %s" % message) return self.text("OK") if not len(page): return self.text("OK") return self.not_found()
def run(self): while True: try: message, address = self.messages.get() log.info("SENDING [%s] to %s:%s" % (message, address[0], address[1])) self.socket.sendto(message.encode('ascii'), address) except Exception as e: log.error(log.exc(e))
def get(self, page=None): if page in METALS: point = sample() level = point[METALS.index(page)] level = int((level * 20) + 80) / 100.0 # .8 - 1. log.info("%s: %s" % (page, level)) return self.text(str(level)) # return self.text(str(1.0)) return self.text("ANIMAS: %s" % (METALS,))
def get(self, page=None): if page in METALS: point = sample() level = point[METALS.index(page)] level = int((level * 20) + 80) / 100.0 # .8 - 1. log.info("%s: %s" % (page, level)) return self.text(str(level)) # return self.text(str(1.0)) return self.text("ANIMAS: %s" % (METALS, ))
def on_key_press(info): global quality key, modifiers = info if key in characters: quality = key log.info("Quality is %s" % quality) elif key == '-': export() player.play(notes)
def run(self): while True: message = self.queue.get()[:140] log.info("SENDING TWEET: %s" % message) try: self.sender.statuses.update(status=message) except Exception as e: log.error(log.exc(e)) else: log.info("--> sent")
def draw(): global spectrum try: spectrum = spectrums.get_nowait() log.info("New spectrum: %sx%s" % (spectrum.shape[1], spectrum.shape[0])) except queue.Empty: pass if spectrum is None: return ctx.pixels(spectrum)
def run(self): while True: try: data = self.queue.get() response = requests.post(config['server'], json=data, timeout=5) log.info(response.status_code) except Exception as e: log.error(log.exc(e))
def message_handler(response): global start_t, stop_t, entries, rate log.info("[ID %s] [IP %s] [RSSI %02d] [T %.3f] [BAT %02d] [MAG %.3f]" % (response['id'], response['ip'], response['rssi'], response['t'], response['bat'], response['mag'])) entries += 1 current_t = response['t'] if start_t is None: start_t = current_t elapsed_t = current_t - start_t if elapsed_t > 0: rate = entries / elapsed_t
def draw(): global display_spectrum try: display_spectrum = display_spectrums.get_nowait() log.info("New spectrum: %sx%s" % (display_spectrum.shape[1], display_spectrum.shape[0])) except queue.Empty: pass if display_spectrum is None: return ctx.pixels(display_spectrum)
def post(self, nop=None): log.info("Home.post") try: data = json.loads(self.request.body.decode('utf-8')) hit_id = mturk.create_hit("https://s3.amazonaws.com/%s/%s.wav" % (config['s3']['bucket'], data['t'])) if hit_id != False: model.add_clip(data['t'], hit_id) except Exception as e: return self.error(e) return self.text("OK")
def export(): global notes, filename if not len(notes): return calc_play() notes.sort(key=lambda x: x.on) fn = "scores/%s_%s.score" % (filename.split('.')[0], util.timestamp()) if not os.path.isdir("scores"): os.mkdir("scores") util.save(fn, (notes, ledgers, columns)) log.info("Saved %s" % fn)
def path_print(points, index): t = str(timeutil.timestamp(ms=True)).replace(".", "-") log.info("Drawing path...") ctx = drawing.Context(3000, int(3000 / RATIO), relative=True, flip=True, hsv=True) ctx.image("basemap/basemap.png") midline = sum([point.x for point in points]) / len(points) poss = [] for p in range(len(points)): x1, y1 = points[p].x, points[p].y if p < len(points) - 1: x2, y2 = points[p + 1].x, points[p + 1].y ctx.line(x1, y1, x2, y2, stroke=(0., 0., .5, 1.), thickness=5.0) ctx.arc(x1, y1, 15 / ctx.width, 15 / ctx.height, fill=(0., 0., 0., 1.), thickness=0.0) flip = False if x1 < midline: flip = True for pos in poss: dist_x = abs(x1 - pos[0]) * ctx.height dist_y = abs(y1 - pos[1]) * ctx.height if dist_y <= 100 and dist_x <= 400: flip = not flip if not flip: x = x1 + (30 / ctx.width) else: x = x1 - (50 / ctx.width) y = y1 - (12 / ctx.height) poss.append((x, y)) ctx.label(x, y, str(p + 1), stroke=(0., 0., 0., 1.), font="Monaco", size=36) for p, point in enumerate(points): label = "%d) %s %s%s" % (p + 1, "Wake up at" if p == 0 else "%s," % point.display_time, point.address, "" if p != (len(points) - 1) else " ... sleep") ctx.label((200 / ctx.width), 1.0 - ((200 + (40 * p)) / ctx.height), label, stroke=(0., 0., 0., 1.), font="Monaco", size=36) ctx.output("images/%s_path.png" % (index, )) log.info("--> done")
def run(self): while True: try: message, address = self.socket.recvfrom(1024) data = message.decode('utf-8').split(',') if data[0] == "/a": self.queue.put(data[1:]) else: log.info(data) except Exception as e: log.error(log.exc(e))
def json(self, data, filename=False): try: import numpy as np except Exception: output = jsonlib.dumps(data, indent=4, default=lambda obj: str(obj)) else: output = jsonlib.dumps(data, indent=4, default=lambda obj: str(obj) if type(obj) != np.ndarray else list(obj)) self.set_header("Content-Type", "text/plain") if filename: self.set_header("Content-Disposition", "attachment; filename=%s" % filename) self.write(output) log.info("200 text/plain (JSON)")
def __init__(self, handlers): settings = { 'template_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "templates")), 'static_path': os.path.abspath(os.path.join(os.path.dirname(__main__.__file__), "static")) } if 'tornado' in config: tornado_settings = config['tornado'] for key in tornado_settings.keys(): settings[key] = tornado_settings[key] tornado.web.Application.__init__(self, handlers, **settings) if 'mysql' in config: log.info("--> tornado initializing mysql") import database try: self.db = database.Connection() except Exception as e: log.error("Could not connect to MySQL: %s" % log.exc(e)) elif 'mongo' in config: log.info("--> tornado initializing mongo") try: mongo = config['mongo'] import pymongo connection = pymongo.Connection(mongo['host']) self.db = connection[mongo['database']] except Exception as e: log.error("Could not connect to mongo: %s" % log.exc(e)) if 'redis' in config: log.info("--> tornado initializing redis") import redis self.redis = redis.StrictRedis() if 'memcache' in config: log.info("--> torando initializing memcache") import memcache self.cache = memcache.Client([config['memcache']['address'] + ":" + str(config['memcache']['port'])]) self.jobs = None if 'beanstalk' in config: log.info("--> tornado initializing beanstalk") import jobs self.jobs = jobs.Jobs() # intialize oauth server try: self.oauth_server = oauth2.Server(signature_methods={'HMAC-SHA1': oauth2.SignatureMethod_HMAC_SHA1()}) except ImportError: self.oauth_server = None Application.instance = self
def run(self): start_t = time.time() while True: t = time.time() if t - start_t >= 1: events = [] while not self.queue.empty(): events.append(self.queue.get_nowait()) hz = sum(events) log.info("Running at %shz" % hz) self.monitor.queue.put(hz) start_t = t time.sleep(0.1)
def get(self, collar_id=None, session_id=None, start_t=None, stop_t=None): log.info("GET") session_list = [] if not len(collar_id) or not len(session_id): collar_ids = list(self.db.entries.find().distinct("collar_id")) for collar_id in collar_ids: sessions = list( self.db.entries.find({ 'collar_id': collar_id }).distinct("session")) session_list.append({ 'collar_id': collar_id, 'sessions': sessions }) return self.render("index.html", session_list=session_list) if not len(start_t): start_t = 0 else: start_t = int(start_t) if not len(stop_t): stop_t = 86400000 else: stop_t = int(stop_t) collar_id = strings.as_numeric(collar_id) session_id = strings.as_numeric(session_id) log.info("%d (%s-%s)" % (collar_id, start_t, stop_t)) template = { 't': { '$gt': start_t, '$lt': stop_t }, 'collar_id': collar_id, 'session': session_id } log.debug(template) results = list(self.db.entries.find(template).sort('t')) start_segment = None stop_segment = None if len(results): start_segment = timeutil.seconds_to_string(results[0]['t']) stop_segment = timeutil.seconds_to_string(results[-1]['t']) for result in results: del result['_id'] del result['session'] log.debug("Returned %s entries" % len(results)) return self.render("home.html", data=results, collar_id=collar_id, session=session_id, start_segment=start_segment, stop_segment=stop_segment)
def on_mouse_press(data): global waiting, transmitting, current_string, incoming_message # waiting mode, nothing happens if waiting: log.debug("MODE: waiting") return # transmitting mode, we've clicked, so go into receiving mode if transmitting: log.debug("MODE: transmitting") ctx.textures = [] incoming_message = [] label.text = "" ctx.objects = [o for o in ctx.objects if o is label] draw_reception() transmitting = False flush_messages() return # receiving mode: process clicks and build message log.debug("MODE: receiving") x, y, button, modifiers = data x *= ctx.width y *= ctx.height for c, coord in enumerate(coords): if x > coord[0][0] and x < coord[1][0] and y > coord[0][ 1] and y < coord[1][1]: if c == len(CHARACTERS) + 1: sender.messages.put("DONE") ctx.textures = [] incoming_message = [] label.text = "" ctx.objects = [o for o in ctx.objects if o is label] result = subprocess.run( ["osascript", "focus.scpt", "main_terminal"], stdout=subprocess.PIPE) log.info(result) waiting = True elif c == len(CHARACTERS): if len(incoming_message): incoming_message.pop() sender.messages.put("ERASE") else: character = CHARACTERS[c] if len(incoming_message) < 30: incoming_message.append(character) sender.messages.put(character) label.text = "".join(incoming_message) break