def retrieve(db, start, end, type_): start = util.parse_date(start, tz=config['tz']) start_t = util.timestamp(start) end = util.parse_date(end, tz=config['tz']) end_t = util.timestamp(end) results = db.entries.find({'t_utc': {'$gt': start_t, '$lt': end_t}, 'type': type_}).sort('t_utc') return list(results)
def start_session(): print("STARTING SESSION") global current_session t = util.timestamp(ms=True) current_session = db.sessions.insert({'t': t}) sessions.append([t, None]) print("--> %s" % current_session)
def on_message(response): try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = config['sensors'][response['id']] sample = response['data'] x, y, z = response['data'] rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = { 't': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session) } # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def run(self): while True: t = util.timestamp(ms=True) for syncbox in Syncbox.boxes: if t - syncbox.t > config['health_rate']: syncbox.alive = False time.sleep(config['health_rate'])
def ingest_audio(path, i, t_protect): log.info("ingest_audio %s" % path) dt = datetime.datetime.strptime(path.split('/')[-1], "audio %d%m%Y_%H%M.mp3") tz = pytz.timezone(config['local_tz']) dt = tz.localize(dt) t = util.timestamp(dt) # if t <= t_protect: # log.warning("Protected t, skipping...") # return fixed_path = path.replace(".mp3", ".amr") shutil.move(path, fixed_path) new_path = os.path.join(os.path.dirname(__file__), "static", "data", "audio", "%s-%s.wav" % (t, i)) log.debug("CONVERTING SOUND.") try: log.debug("--> converting [%s] to [%s]" % (fixed_path, new_path)) log.debug("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path))) subprocess.check_call("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path)), shell=True) except Exception as e: log.error(log.exc(e)) return log.debug("DONE CONVERTING SOUND.") feature = geojson.Feature(properties={'utc_t': t, 'ContentType': "audio", 'url': "/static/data/audio/%s-%s.wav" % (t, i), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z")}) feature_id = model.insert_feature('audio', t, geojson.dumps(feature))
def __init__(self, name, ip): self.name = name self.ip = ip self.alive = True self.status = "ready" self.t = util.timestamp(ms=True) Syncbox.sender.add_target(self.ip, 5280)
def save_files(request): log.info("ingest.save_files") paths = [] try: for key, fileinfo in request.files.items(): fileinfo = fileinfo[0] path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads", "%s_%s" % (util.timestamp(), fileinfo['filename']))) with open(path, 'wb') as f: f.write(fileinfo['body']) log.info("--> saved %s" % path) if zipfile.is_zipfile(path) is True: log.info("Examining zip file...") with zipfile.ZipFile(path, 'r') as archive: filenames = archive.namelist() for filename in filenames: if filename[0] == '.' or filename[0] == '_' or '/' in filename: # zipped files cannot start with . (invisible), _ (system), or contain longer paths continue dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "uploads")) temp_path = archive.extract(filename, dir_path) path = os.path.abspath(os.path.join(dir_path, "%s_%s" % (util.timestamp(), filename))) shutil.move(temp_path, path) log.info("--> saved %s" % path) paths.append(path) log.info("--> zip file extracted") else: paths.append(path) except Exception as e: log.error(log.exc(e)) return paths
def draw(): t_now = util.timestamp(ms=True) ctx.translate(-1., -0.85, -1.5) ctx.rotate(*rotation_x) ctx.rotate(*rotation_y) colors = (1., 1., 1., 1.), (.7, 1., 1., 1.), (1., .7, .7, 1.), for s, sensor in enumerate(list(sensor_data)): samples = sensor_data[sensor] if len(samples): # x = [((t_now - sample[0]) / 10.0, (sample[1][0] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)] # y = [((t_now - sample[0]) / 10.0, (sample[1][1] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)] # z = [((t_now - sample[0]) / 10.0, (sample[1][2] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)] ts = [(t_now - sample[0]) / 10.0 for sample in samples] ys = [(sample[1][2] - RANGE[0]) / (RANGE[1] - RANGE[0]) for sample in samples] zs = [(sample[1][1] - RANGE[0]) / (RANGE[1] - RANGE[0]) - 0.5 for sample in samples] # ys = list(sp.smooth(sp.remove_shots(ys))) # zs = list(sp.smooth(sp.remove_shots(zs))) # ys = list(sp.remove_shots(ys)) # zs = list(sp.remove_shots(zs)) ys = (np.array(ys) * 2.0) - 0.5 zs = (np.array(zs) * 2.0) - 0.5 ys = sp.smooth(ys, 20) zs = sp.smooth(zs, 20) # combo_yz = [((t_now - sample[0]) / 10.0, (sample[1][2] - RANGE[0]) / (RANGE[1] - RANGE[0]), ((sample[1][1] - RANGE[0]) / (RANGE[1] - RANGE[0])) - 0.5) for sample in list(samples)] combo_yz = [(ts[i], ys[i], zs[i]) for i in range(0, len(ys))] ctx.lines3D(combo_yz, color=colors[s], thickness=2.0)
def ingest_image_api(path): log.info("ingest_image %s" % path) file_name = path.split('/')[-1] file_name = file_name.split('.')[0] front = 'img' if ('_' in file_name): front = file_name.split('_')[0] date_string = file_name.split('_')[1] else: date_string = file_name log.info("ingest_image %s" % date_string) #060814154100 dt = datetime.datetime.strptime(date_string.split('_')[0], "%d%m%y%H%M%S") log.info("datetime %s" % dt) tz = pytz.timezone(config['local_tz']) dt = tz.localize(dt) t = util.timestamp(dt) log.info("timestamp %s" % t) try: image = Image.open(path) width, height = image.size except Exception as e: log.error(log.exc(e)) width, height = None, None coords = model.get_coords_by_time(t); feature = geojson.Feature(geometry=coords,properties={'utc_t': t, 'ContentType': "image", 'url': "/static/data/images/%s_%s.jpg" % (front,t), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z"), 'size': [width, height]}) feature_id = model.insert_feature('image', t, geojson.dumps(feature)) new_path = os.path.join(os.path.dirname(__file__), "static", "data", "images", "%s_%s.jpg" % (front,t)) shutil.copy(path, new_path)
def message_handler(response): # log.info(response) try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = response['sensor'] samples = response['samples'] if type(samples[0]) is not list: samples = [samples] for sample in samples: x, y, z = sample rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = { 't': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session) } # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def ingest_geo_feature(path, kind): log.info("ingest_geo_feature %s" % path) t_protect = model.get_protect(kind) sightings = [] headings = {} with open(path) as f: rows = csv.reader(f) for r, row in enumerate(rows): if r == 0: for i, item in enumerate(row): headings[item] = i continue try: dt = util.parse_date("%s %s" % (row[headings['Date']], row[headings['Time']]), tz=config['local_tz'], dayfirst=True) t = util.timestamp(dt) if t <= t_protect: log.warning("Protected t, skipping...") continue try: coordinates = strings.as_numeric(row[headings['Longitude']]), strings.as_numeric(row[headings['Latitude']]), strings.as_numeric(row[headings['Altitude']]) except Exception as e: log.error("Missing coordinates! Skipping...") continue properties = {'DateTime': dt.strftime("%Y-%m-%dT%H:%M:%S%z"), 't_utc': t, 'ContentType': kind} for heading in headings: if heading not in ['Date', 'Time', 'Latitude', 'Longitude', 'Altitude']: try: properties[heading] = strings.as_numeric(row[headings[heading]]) except IndexError: pass feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coordinates}, properties=properties) model.insert_feature(kind, t, geojson.dumps(feature)) except Exception as e: log.error("Row failed: " + log.exc(e)) continue
def temporal_filter(features, resolution): try: log.info("--> starting temporal_filter") first_t = features[0]['properties']['t_utc'] dt = datetime.datetime.utcfromtimestamp(first_t) dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) start_t = util.timestamp(dt) log.debug("start_date %s" % util.datestring(start_t)) log.debug("stop_date %s" % util.datestring(features[-1]['properties']['t_utc'])) log.debug("start_t %s" % start_t) log.debug("step %s" % resolution) results = [] index_t = start_t index = 0 while True: # log.debug("Checking %s..." % util.datestring(index_t)) while index < len(features) and features[index]['properties']['t_utc'] < index_t: index += 1 if index == len(features): break if not (features[index]['properties']['t_utc'] > index_t + resolution): # log.debug("--> %s %s %s" % (index, features[index]['id'], util.datestring(features[index]['properties']['t_utc']))) results.append(features[index]) index_t += resolution log.info("--> done temporal_filter") return results except Exception as e: log.error(log.exc(e))
def parse(request): log.info("ambit_geo.parse") sample = ingest_json_body(request) if sample is None: return sample, "Could not parse" data = {} for key, value in sample.items(): if key == "UTC": dt = util.parse_date(sample['UTC']) # these are marked UTC in the data t = util.timestamp(dt) data['t_utc'] = t continue if key == "Longitude": data['longitude'] = math.degrees(float(sample['Longitude'])) continue if key == "Latitude": data['latitude'] = math.degrees(float(sample['Latitude'])) continue if key == "GPSAltitude": data['altitude'] = float(sample['GPSAltitude']) continue if type(value) != str: continue data[key] = strings.as_numeric(value) try: log.debug("%s %s %s" % (data['longitude'], data['latitude'], data['altitude'])) except: log.error("MISSING GEO") return data
def ingest_data(feature_type, feature): # note that this operates on the original datastructure log.info("ingest_data") try: db = Application.instance.db except AttributeError: from mongo import db feature = verify_geojson(feature) if not feature: return False, "Could not format as geojson" feature['properties'].update({'FeatureType': feature_type if 'FeatureType' not in feature['properties'] else feature['properties']['FeatureType']}) feature = verify_t(feature) if not feature: return False, "Missing t_utc" feature = verify_expedition(feature) feature = tag_team(feature) feature = verify_geometry(feature) if feature['geometry'] is None: feature = estimate_geometry(feature, db) feature['properties'].update({'t_created': util.timestamp(ms=True)}) try: feature_id = db.features.insert_one(feature).inserted_id except Exception as e: log.error(log.exc(e)) return False, "Database error" log.info("--> success (%s)" % feature_id) return True, feature_id
def message_handler(response): # log.info(response) try: # print(response['sensor'], response['samples'], response['rssi']) t = util.timestamp(ms=True) sensor = response['sensor'] samples = response['samples'] if type(samples[0]) is not list: samples = [samples] for sample in samples: x, y, z = sample rms = math.sqrt(x**2 + y**2 + z**2) sample.append(rms) rssi = response['rssi'] if current_session is not None: data = {'t': t, 'sensor': sensor, 'sample': sample, 'rssi': rssi, 'session': str(current_session)} # print(json.dumps(data, indent=4)) db.branches.insert(data) if sensor not in sensor_data: sensor_data[sensor] = deque() sensor_rssi[sensor] = None sensor_data[sensor].appendleft((t, sample)) sensor_rssi[sensor] = t, rssi if len(sensor_data[sensor]) == 1000: sensor_data[sensor].pop() except Exception as e: log.error(log.exc(e))
def send(cls, score): data = [] t = util.timestamp(ms=True) for cue in score: log.info("Sending %s" % (cue,)) data.append(str(cue[0] + t)) data.append(cue[1]) cls.sender.send('/cue', score)
def stop_session(): print("STOPPING SESSION") global current_session t = util.timestamp(ms=True) start_t = db.sessions.find_one({'_id': current_session})['t'] duration = t - start_t result = db.sessions.update({'_id': current_session}, {'$set': {'duration': duration}}) sessions[-1][-1] = t current_session = None
def retrieve(db, source, start, end, filters, page=None): if filters == None: filters = {} sources = [clean(source) for source in source.split(",")] start_t = 0 if start == "*" else util.timestamp(util.parse_date(start, tz=config['tz'])) end_t = min(2147483647, sys.maxsize) if end == "*" else util.timestamp(util.parse_date(end, tz=config['tz'])) template = {'t_utc': {'$gt': start_t, '$lt': end_t}, '$or': [{'source': source} for source in sources]} template.update(filters) log.info("QUERY %s" % template) results = db.entries.find(template).sort('t_utc') count = results.count() if page is None: page = (count // 100) + 1 skip = (page - 1) * 100 log.debug("page %s, skip %s" % (page, skip)) results = results.skip(skip).limit(100) log.info("--> done") return list(results), start_t, end_t, count, page
def ingest_hydrosensor(hydrosensor_id, content, dt): log.info("ingest_hydrosensor") t_protect = model.get_protect('hydrosensor') lat, lon = model.get_drop_by_id(hydrosensor_id) #HACK coordinates = [0, 0, 0] t = util.timestamp(dt) #properties = {'DateTime': dt.strftime("%Y-%m-%dT%H:%M:%S%z"), 't_utc': t, 'ContentType': "hydrosensor"} properties = {'ContentType': "hydrosensor"} try: lines = content.split('\n') for line in lines: if not len(line.strip()): continue try: #Date: Sat, Sep 13, 2014 at 5:23 AM if "Date" in line: dt = util.parse_date(line.replace("Date: ", "").strip(), tz=config['local_tz']) t = util.timestamp(dt) properties['DateTime'] = dt.strftime("%Y-%m-%dT%H:%M:%S%z") properties['t_utc'] = t log.info(dt.strftime("%Y-%m-%dT%H:%M:%S%z")) if "Temp" in line: temperature = strings.as_numeric(line.replace("Temp (deg C) = (", "").replace(")", "").strip()) properties['temperature'] = temperature if "pH" in line: ph = strings.as_numeric(line.replace("pH = (", "").replace(")", "").strip()) properties['ph'] = ph if "Conductivity" in line: conductivity = line.replace("Conductivity (Cond,TDS,Sal,SG) = (", "").replace(")", "").strip() conductivity = [strings.as_numeric(element) for element in conductivity.split(",")] properties['conductivity'] = conductivity except Exception as e: log.error(log.exc(e)) continue # if t <= t_protect: # log.warning("Protected t, skipping...") # return feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coordinates}, properties=properties) feature_id = model.insert_feature('hydrosensor', t, geojson.dumps(feature)) except Exception as e: log.error(log.exc(e))
def message_handler(response): try: # print(response['sensor'], response['samples'], response['rssi']) t_utc = util.timestamp(ms=True) sensor = response['sensor'] sample = response['samples'] rssi = response['rssi'] data = {'t_utc': t_utc, 'type': TYPE, 'sensor': sensor, 'sample': sample, 'rssi': rssi} data_sender.queue.put(data) except Exception as e: log.error(log.exc(e))
def export(): global notes, filename if not len(notes): return calc_play() notes.sort(key=lambda x: x.on) fn = "scores/%s_%s.score" % (filename.split('.')[0], util.timestamp()) if not os.path.isdir("scores"): os.mkdir("scores") util.save(fn, (notes, ledgers, columns)) log.info("Saved %s" % fn)
def process_image(path, member=None, t_utc=None): # try to get EXIF data log.info("process_image %s..." % path) data = {} if member is not None: data['Member'] = member if t_utc is not None: data['t_utc'] = t_utc try: image = Image.open(path) width, height = image.size data['Dimensions'] = width, height try: exif = {ExifTags.TAGS[k]: v for (k, v) in image._getexif().items() if k in ExifTags.TAGS} except Exception as e: log.warning("--> no EXIF data in image: %s" % e) if 't_utc' not in data: log.warning("--> substituting current time for t_utc") data['t_utc'] = util.timestamp() else: # log.debug(json.dumps(exif, indent=4, default=lambda x: str(x))) date_field = exif['DateTimeOriginal'] if 'DateTimeOriginal' in exif else exif['DateTime'] if date_field[4] == ":" and date_field[7] == ":": date_field = list(date_field) date_field[4] = "-" date_field[7] = "-" date_field = ''.join(date_field) date = util.parse_date(date_field, tz=config['local_tz']) data['t_utc'] = util.timestamp(date) ## careful about this overriding data['DateTime'] = util.datestring(data['t_utc'], tz=config['local_tz']) data['Make'] = exif['Make'].replace("\u0000", '').strip() if 'Make' in exif else None data['Model'] = exif['Model'].replace("\u0000", '').strip() if 'Model' in exif else None filename = "%s_%s.jpg" % (data['t_utc'], str(uuid.uuid4())) new_path = os.path.join(os.path.dirname(__file__), "..", "static", "data", "images", filename) shutil.copy(path, new_path) data['Url'] = "/static/data/images/%s" % filename except Exception as e: log.error(log.exc(e)) return None return data
def draw(): t_now = util.timestamp(ms=True) # ctx.line3(0., 0., 0., .5, .5, .5) # ctx.line(0., 0., .5, .5) # draw session highlighting for (start_t, stop_t) in sessions: if stop_t is None: stop_t = t_now if t_now - stop_t > 10.0: continue # ctx.line((t_now - stop_t) / 10.0, .99, (t_now - start_t) / 10.0, .99, color=(1., 0., 0., .2), thickness=10.0) x1 = (t_now - stop_t) / 10.0 x2 = (t_now - start_t) / 10.0 ctx.rect(x1, 0.0, x2 - x1, 1.0, color=(1., 0., 0., 0.25)) # do labels for s, (sensor, (t, rssi)) in enumerate(sensor_rssi.copy().items()): if t_now - t > 3: bar = 0.01 else: bar = 1.0 - (max(abs(rssi) - 25, 0) / 100) x = (20 + (s * 20)) / ctx.width ctx.line(x, .1, x, (bar * 0.9) + .1, color=colors[sensor], thickness=10) if sensor not in labels: print("Adding label for sensor %s" % sensor) labels.append(sensor) ctx.label(x, .05, str(sensor), font="Monaco", size=10, width=10, center=True) # data for s, sensor in enumerate(list(sensor_data)): samples = sensor_data[sensor] if len(samples): # ctx.lines([((t_now - sample[0]) / 10.0, (sample[1][0] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)], color=(1., 0., 0., 1.)) # ctx.lines([((t_now - sample[0]) / 10.0, (sample[1][1] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)], color=(0., 1., 0., 1.)) # ctx.lines([((t_now - sample[0]) / 10.0, (sample[1][2] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)], color=(0., 0., 1., 1.)) ctx.lines([((t_now - sample[0]) / 10.0, ((sample[1][3] / 2) - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)], color=colors[sensor] ) # hack to bring down rms to similar range
def main(): ## called via tweet_grabber.py twitter = Twython(AUTH['app_key'], AUTH['app_secret'], AUTH['oauth_token'], AUTH['oauth_token_secret']) twitter.verify_credentials() ## what does this do if it fails? for a, account in enumerate(ACCOUNTS): log.info("Checking %s..." % account) try: timeline = twitter.get_user_timeline(screen_name=account) except TwythonError as e: log.error(log.exc(e)) continue log.info("--> %s has %s total tweets" % (account, len(timeline))) for t, tweet in enumerate(timeline): # log.debug(json.dumps(tweet, indent=4, default=lambda x: str(x))) log.info("Tweet %s:%s" % (a, t)) text = tweet.get('text') if a == 0 or HASHTAG.lower() in text.lower(): # the first entry in the accounts is the official account -- all tweets are processed try: data = {} dt = datetime.datetime.strptime(tweet.get('created_at'), '%a %b %d %H:%M:%S +0000 %Y') data['t_utc'] = util.timestamp(dt) data['Member'] = MEMBERS[a] data['Handle'] = account data['Text'] = text data['Retweet'] = text[:2] == "RT" data['Url'] = "https://twitter.com/%s/status/%s" % (account, tweet.get('id')) data['TweetID'] = tweet.get('id') data['Images'] = [] dup = db.features.find_one({'properties.FeatureType': 'tweet', 'properties.TweetID': data['TweetID']}) if dup is not None: log.info("--> skipping duplicate tweet") continue try: for image in tweet['extended_entities']['media']: if image['type'] != "photo": continue data['Images'].append({'Url': image['media_url']}) log.info("--> added image %s" % image['media_url']) except KeyError as e: pass log.info("--> %s (RT: %s): %s" % (account, data['Retweet'], data['Text'])) success, value = ingest_data("tweet", data) if not success: log.error("--> failed: %s" % value) else: log.info("--> %s" % value) except Exception as e: log.error(log.exc(e)) continue else: log.info("--> skipping unrelated tweet")
def insert(db, data): for key in data.keys(): if type(key) is not str: del data[key] fixed_key = strings.slugify(strings.depunctuate(key, "_")) if key != fixed_key: data[fixed_key] = data[key] del data[key] if 't_utc' not in data: data['t_utc'] = util.timestamp() log.info(json.dumps(data, indent=4)) entry_id = db.entries.insert_one(data).inserted_id return entry_id
def run(self): while True: t = util.timestamp() log.info("record %s" % t) try: command = "/usr/bin/arecord -D plughw:1,0 -d %s -f S16_LE -c1 -r11025 -t wav %s/%s.wav" % (DURATION, AUDIO_TMP, t) # 10s of mono 11k PCM log.info("%s" % command) subprocess.check_call(command, shell=True) except Exception as e: log.error(log.exc(e)) time.sleep(DURATION) continue log.info("--> wrote audio_tmp/%s.wav" % t) self.out_queue.put(t)
def main(): ## called via tweet_grabber.py for a, account in enumerate(ACCOUNTS): log.info("Checking %s..." % account) try: feed = "https://medium.com/feed/@%s" % account data = feedparser.parse(feed)['entries'] except Exception as e: log.error(log.exc(e)) continue for entry in data: try: entry = {strings.camelcase(key): value for (key, value) in entry.items() if key in ['title', 'link', 'summary', 'published']} entry['Member'] = MEMBERS[a] entry['t_utc'] = util.timestamp(util.parse_date(entry['Published'])) if entry['t_utc'] < (util.timestamp(util.parse_date(str(config['start_date'][config['expedition']])))) - (3 * 24 * 60 * 60): ## hack, minus three days to get jer's post log.info("--> skipping too early blog post") continue del entry['Published'] entry['Url'] = entry['Link'] del entry['Link'] entry['Summary'] = strings.strip_html(entry['Summary']).replace("Continue reading on Medium \u00bb", "") entry['FeatureType'] = "blog" dup = db.features.find_one({'properties.FeatureType': 'blog', 'properties.Url': entry['Url']}) if dup is not None: log.info("--> skipping duplicate blog post") continue log.info("--> %s" % entry) success, value = ingest_data("tweet", entry) if not success: log.error("--> failed: %s" % value) else: log.info("--> %s" % value) except Exception as e: log.error(log.exc(e)) continue
def get_timeline(self): skip = self.get_argument('skip', 1) kinds = self.get_argument('types', "beacon").split(',') kinds = [kind.rstrip('s') for kind in kinds if kind.rstrip('s') in ['ambit', 'ambit_geo', 'sighting', 'breadcrumb', 'image', 'audio', 'breadcrumb', 'beacon', 'heart_spike']] # sanitizes try: dt = self.get_argument('date', datetime.datetime.now(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%d")) log.debug(dt) dt = util.parse_date(dt, tz=config['local_tz']) days = int(self.get_argument('days', 1)) except Exception as e: return self.error("Bad parameters: %s" % log.exc(e)) t = util.timestamp(dt) log.debug("--> search for kinds: %s" % kinds) features = model.fetch_features(kinds, t, t + (days * (24 * 60 * 60)), skip) feature_collection = geojson.FeatureCollection(features) return self.json(feature_collection)
def insert(db, data): for key in data.keys(): if type(key) is not str: del data[key] continue clean_key = clean(key) if key != clean_key: data[clean_key] = data[key] del data[key] key = clean_key data[key] = strings.as_numeric(data[key]) if 't_utc' not in data: data['t_utc'] = util.timestamp() data['date'] = util.datestring(data['t_utc'], tz=config['tz']) log.info(json.dumps(data, indent=4)) entry_id = db.entries.insert_one(data).inserted_id return entry_id
def ingest_audio_api(path): log.info("ingest_audio %s" % path) file_name = path.split('/')[-1] file_name = file_name.split('.')[0] front = 'mp3' if ('_' in file_name): front = file_name.split('_')[0] date_string = file_name.split('_')[1] else: date_string = file_name #dt = datetime.datetime.strptime(path.split('/')[-1], "audio %d%m%Y_%H%M.mp3") dt = datetime.datetime.strptime(date_string.split('_')[0], "%d%m%y%H%M%S") tz = pytz.timezone(config['local_tz']) dt = tz.localize(dt) t = util.timestamp(dt) # if t <= t_protect: # log.warning("Protected t, skipping...") # return """ fixed_path = path #.replace(".mp3", ".amr") shutil.move(path, fixed_path) new_path = os.path.join(os.path.dirname(__file__), "static", "data", "audio", "%s-%s.wav" % (front, t)) log.debug("CONVERTING SOUND.") try: log.debug("--> converting [%s] to [%s]" % (fixed_path, new_path)) log.debug("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path))) subprocess.check_call("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path)), shell=True) except Exception as e: log.debug("ERROR.") log.error(log.exc(e)) return log.debug("DONE CONVERTING SOUND.") """ new_path = os.path.join(os.path.dirname(__file__), "static", "data", "audio", "%s-%s.mp3" % (front, t)) shutil.move(path, new_path) coords = model.get_coords_by_time(t); feature = geojson.Feature(geometry=coords,properties={'utc_t': t, 'ContentType': "audio", 'url': "/static/data/audio/%s-%s.mp3" % (front, t), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z")}) feature_id = model.insert_feature('audio', t, geojson.dumps(feature))
def parse(request): log.info("ambit.parse") sample = ingest_json_body(request) if sample is None: return sample, "Could not parse" data = {} for key, value in sample.items(): if key == "UTC": dt = util.parse_date(value) # these are marked UTC in the data t = util.timestamp(dt) data['t_utc'] = t continue if type(value) != str: continue data[key] = strings.as_numeric(value) return data
def main(): log.info("geo_estimator...") t = util.timestamp() # for data tagged to a Member, find something else that's geotagged with that Member, best case ambit_geo, worst case take the beacon if they are core, otherwise fail ## for non-ambit wearers, this will keep querying, unfortunately log.info("Updating features with a Member...") features = db.features.find({'properties.Expedition': config['expedition'], 'properties.EstimatedGeometry': {'$exists': True, '$ne': 'ambit_geo'}, 'properties.Member': {'$ne': None}}) for feature in features: try: # if t - feature['properties']['t_utc'] > 60 * 60 * 48: ## after 48 hours, don't worry about it # continue log.info("Updating geometry for %s %s (currently from %s)..." % (feature['properties']['FeatureType'], feature['_id'], feature['properties']['EstimatedGeometry'])) feature = estimate_geometry(feature, db) db.features.update({"_id" : feature['_id']}, feature) except Exception as e: log.error(log.exc(e))
def run(self): while True: try: message, address = self.receiver.recvfrom(1024) data = message.decode('utf-8').split(',') data = { 'id': str(data[0]), 'rssi': int(data[1]), 'ip': address, 't_utc': util.timestamp(ms=True), 'data': [float(v) for v in data[2:]] } self.data.put(data) if data['id'] not in self.events: self.events[data['id']] = queue.Queue() self.events[data['id']].put(1) except Exception as e: log.error(log.exc(e))
def run(self): while True: try: message, address = self.receiver.recvfrom(1024) data = message.decode("utf-8").split(",") data = { "id": str(data[0]), "rssi": int(data[1]), "ip": address, "t_utc": util.timestamp(ms=True), "data": [float(v) for v in data[2:]], } self.data.put(data) if data["id"] not in self.events: self.events[data["id"]] = queue.Queue() self.events[data["id"]].put(1) except Exception as e: log.error(log.exc(e))
#!/usr/bin/env python3 import requests, json, time, os from housepy import config, log, util, process process.secure_pid(os.path.abspath(os.path.join(os.path.dirname(__file__), "run"))) while True: # current conditions try: url = "http://api.wunderground.com/api/%s/conditions/q/MA/North_Adams.json" % config['wunderground'] response = requests.get(url) data = response.json()['current_observation'] # log.info(json.dumps(data, indent=4)) entry = {'t_utc': util.timestamp(), 'type': 'weather', 'temp_f': data['temp_f'], 'wind_mph': data['wind_mph']} log.info(json.dumps(entry, indent=4)) response = requests.post(config['server'], json=entry) log.info(response) except Exception as e: log.error(log.exc(e)) time.sleep(60) # # hourly # url = "http://api.wunderground.com/api/%s/hourly/q/MA/North_Adams.json" % config['wunderground'] # response = requests.get(url) # data = response.json()
def draw(): t_now = util.timestamp(ms=True) # ctx.translate(0, 0, -1) # ctx.translate(-1.5, -0.5, -1) # ctx.translate(-1.3, -0.85, -2) ctx.translate(-1., -0.85, -1.5) ctx.rotate(*rotation_x) ctx.rotate(*rotation_y) # axes # ctx.line3D(-.25, 0, 0, .25, 0, 0, color=(1., 1., 0., 1.)) # ctx.line3D(0, -.25, 0, 0, .25, 0, color=(0., 1., 1., 1.)) # ctx.line3D(0, 0, -.25, 0, 0, .25, color=(1., 0., 1., 1.)) # for (start_t, stop_t) in sessions: # if stop_t is None: # stop_t = t_now # if t_now - stop_t > 10.0: # continue # # ctx.line((t_now - stop_t) / 10.0, .99, (t_now - start_t) / 10.0, .99, color=(1., 0., 0., .2), thickness=10.0) # x1 = (t_now - stop_t) / 10.0 # x2 = (t_now - start_t) / 10.0 # ctx.rect(x1, 0.0, x2 - x1, 1.0, color=(1., 0., 0., 0.25)) # for s, (sensor, (t, rssi)) in enumerate(sensor_rssi.items()): # if t_now - t > 3: # bar = 0.01 # else: # bar = 1.0 - (max(abs(rssi) - 25, 0) / 100) # x = (20 + (s * 20)) / ctx.width # ctx.line(x, .1, x, (bar * 0.9) + .1, color=(0., 0., 0., 0.5), thickness=10) # if sensor not in labels: # print("Adding label for sensor %s" % sensor) # labels.append(sensor) # ctx.label(x, .05, str(sensor), font="Monaco", size=10, width=10, center=True) colors = (1., 1., 1., 1.), (.7, 1., 1., 1.), (1., .7, .7, 1.), for s, sensor in enumerate(list(sensor_data)): samples = sensor_data[sensor] if len(samples): # x = [((t_now - sample[0]) / 10.0, (sample[1][0] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)] # y = [((t_now - sample[0]) / 10.0, (sample[1][1] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)] # z = [((t_now - sample[0]) / 10.0, (sample[1][2] - RANGE[0]) / (RANGE[1] - RANGE[0])) for sample in list(samples)] ts = [(t_now - sample[0]) / 10.0 for sample in samples] ys = [(sample[1][2] - RANGE[0]) / (RANGE[1] - RANGE[0]) for sample in samples] zs = [(sample[1][1] - RANGE[0]) / (RANGE[1] - RANGE[0]) - 0.5 for sample in samples] # ys = list(sp.smooth(sp.remove_shots(ys))) # zs = list(sp.smooth(sp.remove_shots(zs))) # ys = list(sp.remove_shots(ys)) # zs = list(sp.remove_shots(zs)) ys = (np.array(ys) * 2.0) - 0.5 zs = (np.array(zs) * 2.0) - 0.5 ys = sp.smooth(ys, 20) zs = sp.smooth(zs, 20) # combo_yz = [((t_now - sample[0]) / 10.0, (sample[1][2] - RANGE[0]) / (RANGE[1] - RANGE[0]), ((sample[1][1] - RANGE[0]) / (RANGE[1] - RANGE[0])) - 0.5) for sample in list(samples)] combo_yz = [(ts[i], ys[i], zs[i]) for i in range(0, len(ys))] ctx.lines3D(combo_yz, color=colors[s], thickness=2.0)
signal = sp.normalize(signal) signal = sp.smooth(signal, 15) signals.append(signal) # color = colors[i] color = i / len(labels), .8, .8, 1. ctx.plot(signal, stroke=color, thickness=2) ctx.line(10 / ctx.width, 1 - ((10 + (i * 10)) / ctx.height), 30 / ctx.width, 1 - ((10 + (i * 10)) / ctx.height), stroke=color, thickness=2) ctx.label(35 / ctx.width, 1 - ((13 + (i * 10)) / ctx.height), label.upper(), size=8) except KeyError as e: log.error(log.exc(e)) log.error(values) ctx.output("charts/") points = [] for i in range(len(signals[0])): point = [signal[i] for signal in signals] points.append(point) points = np.array(points) util.save("data/%s.pkl" % util.timestamp(), (points, rates)) util.save("data/last.pkl", (points, rates))
def get_recent(db): t = util.timestamp() db.execute("SELECT * FROM clips WHERE t>=? AND posted=0", (t - config['lag'], )) clips = [dict(clip) for clip in db.fetchall()] return clips
def main(session_id): result = db.branches.find({'session': session_id}).sort([('t', ASCENDING)]) if not result.count(): print("NO DATA!") exit() log.info("Start processing...") result = list(result) ts = [r['t'] for r in result] rms = [r['sample'][3] for r in result] duration = ts[-1] - ts[0] SAMPLING_RATE = 60 # hz log.info("DURATION %fs" % duration) signal = sp.resample(ts, rms, duration * SAMPLING_RATE) signal = sp.remove_shots(signal) signal = sp.normalize(signal) signal = sp.smooth(signal, 15) # this number should match some lower frequency bound. ie, put this in hz. # the smaller the number, the more it will affect small motion # so this should be higher than the slowest motion we care about # ie, dont care about motion over 0.5hz, which is 120 samples trend = sp.smooth(signal, 120) signal -= trend signal += 0.5 atrend = sp.smooth(signal, 500) ## autocorrelation auto = sp.autocorrelate(signal) # this should be small -- if 60hz, fastest gesture would reasonably be half of that, so 30 peaks, valleys = sp.detect_peaks(auto, 10) peaks = [peak for peak in peaks[1:] if peak[1] > 0.5] partials = [] for peak in peaks: frequency = SAMPLING_RATE / peak[0] partial = frequency * 1000 partials.append([partial, float(peak[1])]) log.info("%d samps\t%fhz\t%f magnitude\t%f map" % (peak[0], frequency, peak[1], partial)) log.info(partials) ctx = drawing.Context(2000, 750) ctx.plot(auto, stroke=(0.0, 0.0, 0.0, 1.0), thickness=2.0) for peak in peaks: x = peak[0] / len(auto) ctx.line(x, 0.0, x, peak[1], stroke=(1.0, 0.0, 0.0, 1.0)) ctx.output("graphs") ## audio audio_signal = sp.make_audio(signal) spectrum(audio_signal, SAMPLING_RATE) AUDIO_RATE = 11025 filename = "%s.wav" % util.timestamp() sound.write_audio(audio_signal, filename, AUDIO_RATE) subprocess.call(["open", filename]) log.info("AUDIO DURATION %fs" % (duration / (AUDIO_RATE / SAMPLING_RATE))) ctx = drawing.Context(2000, 750) ctx.plot(signal, stroke=(0.0, 0.0, 0.0, 1.0), thickness=2.0) ctx.plot(trend, stroke=(1.0, 0.0, 0.0, 1.0), thickness=2.0) ctx.plot(atrend, stroke=(0.0, 0.0, 1.0, 1.0), thickness=2.0) ctx.output("graphs") log.info("--> done") # around 300ms
def generate(): # load data into t and count arrays per species species = OrderedDict() start_t = util.timestamp(util.parse_date(str(config['start']))) end_t = util.timestamp(util.parse_date(str(config['end']))) max_count = 0 with open("data.csv") as f: data = csv.reader(f) for r, row in enumerate(data): if r == 0: continue plot = row[1] name = row[2] if len(config['species_list'] ) and name not in config['species_list']: continue dt = datetime.datetime(int(row[3]), 1, 1) + datetime.timedelta(int(row[4]) - 1) t = util.timestamp(dt) if t < start_t or t > end_t: continue count = 0 if row[5] == "NA" else int(row[5]) if count > max_count: max_count = count if name not in species: species[name] = {'ts': [start_t, t - 1], 'counts': [0, 0]} species[name]['ts'].append(t) species[name]['counts'].append(count) species = OrderedDict(sorted(species.items())) print("--> loaded") # add a zero count at the start and end of every year yts = [ util.timestamp(datetime.datetime(y, 1, 1)) for y in range(1974, 2017) ] for name in species: ts = species[name]['ts'] for yt in yts: i = 0 while i < len(ts) and ts[i] < yt: i += 1 if i > 0: end_season_t = ts[i - 1] if i < len(ts): start_season_t = ts[i] ts.insert(i, start_season_t - config['tail']) species[name]['counts'].insert(i, 0) ts.insert(i, end_season_t + config['tail']) species[name]['counts'].insert(i, 0) species[name]['ts'].append(end_t) species[name]['counts'].append(0) print("--> onsets added") # create and draw signals signals = [] names = [] i = 0 for name, data in species.items(): print("Processing %s..." % name) # create signal from bloom counts signal = sp.resample(data['ts'], data['counts']) if config['normalize']: signal = sp.normalize(signal) else: signal = sp.normalize(signal, 0, max_count) signal = sp.smooth(signal, size=8) signal = sp.limit( signal, max(signal)) # get rid of noise below 0 for onset detection # add spikes for peaks if config['peak_spikes']: peaks, valleys = sp.detect_peaks(signal, lookahead=50) peak_signal = np.zeros(len(signal)) for peak in peaks: peak_signal[peak[0]] = 1.0 signal += peak_signal # add spikes for onsets if config['onset_spikes']: onsets = sp.detect_onsets(signal) onset_signal = np.zeros(len(signal)) for onset in onsets: onset_signal[onset] = 0.5 onset_signal[onset + 1] = 0.4 onset_signal[onset + 2] = 0.25 signal += onset_signal # limit signal = sp.limit(signal, 1.0) signal *= 0.9 # hack, just controlling gain signals.append(signal) names.append(name) i += 1 return signals, names