Exemplo n.º 1
0
 def run(self):
     while True:
         try:
             data = self.xbee.wait_read_frame()
             if self.verbose:
                 log.debug(data)
             response = {}
             if 'source_addr' in data:
                 response['sensor'] = int(data['source_addr'][1])
             if 'frame_id' in data:
                 response['frame'] = str(data['frame_id'], 'ascii')
             if 'parameter' in data:
                 response['parameter'] = int.from_bytes(data['parameter'], 'little')
             if 'rssi' in data:
                 response['rssi'] = int.from_bytes(data['rssi'], 'little')
             if 'samples' in data:              
                 response['samples'] = []
                 for each in data['samples']:
                     samples = list(each.items())
                     samples.sort(key=lambda item: item[0])
                     response['samples'].append([s[1] for s in samples])
                 if len(response['samples']) == 1:
                     response['samples'] = response['samples'][0]
             if self.message_handler is not None:
                 self.message_handler(response)
         except Exception as e:
             log.error(log.exc(e))
Exemplo n.º 2
0
def parse(request):
    log.info("ambit_geo.parse")
    sample = ingest_json_body(request)
    if sample is None:
        return sample, "Could not parse"

    data = {}
    for key, value in sample.items():
        if key == "UTC":
            dt = util.parse_date(sample['UTC']) # these are marked UTC in the data
            t = util.timestamp(dt)
            data['t_utc'] = t
            continue
        if key == "Longitude":
            data['longitude'] = math.degrees(float(sample['Longitude']))
            continue                       
        if key == "Latitude":
            data['latitude'] = math.degrees(float(sample['Latitude']))
            continue
        if key == "GPSAltitude":
            data['altitude'] = float(sample['GPSAltitude'])
            continue
        if type(value) != str:
            continue                            
        data[key] = strings.as_numeric(value) 

    try:
        log.debug("%s %s %s" % (data['longitude'], data['latitude'], data['altitude']))
    except:
        log.error("MISSING GEO")

    return data
Exemplo n.º 3
0
def ingest_json_api(path):
    log.info("ingest_json_api %s" % path)

    d = open(path)
    txt = d.read();

    data = json.loads(txt)

    t = data['t_utc']
    lat = data['Longitude']
    lon = data['Latitude']

    coords = [float(lat),float(lon),0]
    log.debug(data)

    feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coords},properties=data)

    if ('Exhaustion' in data):
        feature_id = model.insert_feature('ethnographic', t, geojson.dumps(feature))
        log.info("ingest_json_api ETHNO")
    elif ('Hardness' in data):
        feature_id = model.insert_feature('hydro', t, geojson.dumps(feature))
        log.info("ingest_json_api HYDRO")
    else:
        feature_id = model.insert_feature('sighting', t, geojson.dumps(feature))
        log.info("ingest_json_api SIGHTING")

    d.close()
Exemplo n.º 4
0
 def on_data(self, data):
     try:
         tweet = json.loads(data)
         log.debug("@%s: %s" % (tweet['user']['screen_name'], tweet['text']))
         # log.debug(json.dumps(data, indent=4))
         self.queue.put(1)
         return True
     except Exception as e:
         log.error(log.exc(e))
         return True
Exemplo n.º 5
0
def find_future(user_id, host):
    if Model.build(user_id, host) is None:
        return None
    if __name__ == "__main__":
        for host, site in Model.sites.items():
            log.debug(site)
        log.debug("Novelty: %f" % Model.calc_novelty())
    if len(Model.sites) < max(2, MIN_MODEL_SIZE):                                # the model is too small
        return None
    future = Model.get(host).find_next()
    return future        
Exemplo n.º 6
0
def fetch_walks(db, hidden=False, desc=False):
    try:
        log.debug(hidden)
        query = "SELECT * FROM walks %s ORDER BY start_time %s" % ("WHERE hidden=0" if not hidden else "", "DESC" if desc else "")
        log.debug(query)
        db.execute(query)
        rows = [dict(gd) for gd in db.fetchall()]
    except Exception as e:
        log.error(log.exc(e))
        rows = []
    return rows    
Exemplo n.º 7
0
 def geocode(self):
     try:
         url = "https://maps.googleapis.com/maps/api/geocode/json?latlng=%s,%s" % (
             self.lat, self.lon)
         result = requests.get(url).json()
         self.address = result['results'][0]['formatted_address']
         self.address = self.address.split(", NY ")[0].replace(
             ", New York", "")
     except Exception as e:
         log.error(log.exc(e))
         log.debug(json.dumps(result, indent=4))
Exemplo n.º 8
0
def generate():
    day = []
    index = random.choice(range(len(X)))
    x = X[index]
    for i in range(PERIODS):
        distribution = model.predict(np.array([x[-MEMORY:]]),
                                     verbose=0,
                                     batch_size=1)[0]
        y = sample(distribution, config['temperature'])
        x = np.append(x, to_categorical(y, CATEGORIES), axis=0)
        day.append(y)
    log.debug(day)
    return day
Exemplo n.º 9
0
 def get(self, collar_id=None, session_id=None, start_t=None, stop_t=None):
     log.info("GET")
     session_list = []
     if not len(collar_id) or not len(session_id):
         collar_ids = list(self.db.entries.find().distinct("collar_id"))
         for collar_id in collar_ids:
             sessions = list(
                 self.db.entries.find({
                     'collar_id': collar_id
                 }).distinct("session"))
             session_list.append({
                 'collar_id': collar_id,
                 'sessions': sessions
             })
         return self.render("index.html", session_list=session_list)
     if not len(start_t):
         start_t = 0
     else:
         start_t = int(start_t)
     if not len(stop_t):
         stop_t = 86400000
     else:
         stop_t = int(stop_t)
     collar_id = strings.as_numeric(collar_id)
     session_id = strings.as_numeric(session_id)
     log.info("%d (%s-%s)" % (collar_id, start_t, stop_t))
     template = {
         't': {
             '$gt': start_t,
             '$lt': stop_t
         },
         'collar_id': collar_id,
         'session': session_id
     }
     log.debug(template)
     results = list(self.db.entries.find(template).sort('t'))
     start_segment = None
     stop_segment = None
     if len(results):
         start_segment = timeutil.seconds_to_string(results[0]['t'])
         stop_segment = timeutil.seconds_to_string(results[-1]['t'])
     for result in results:
         del result['_id']
         del result['session']
     log.debug("Returned %s entries" % len(results))
     return self.render("home.html",
                        data=results,
                        collar_id=collar_id,
                        session=session_id,
                        start_segment=start_segment,
                        stop_segment=stop_segment)
Exemplo n.º 10
0
def fetch_features(db, kinds, start_t, stop_t, skip=1):
    kindq = []
    for kind in kinds:
        kindq.append(" OR kind='%s'" % kind)
    query = "SELECT rowid, data FROM features WHERE rowid %% ? = 0 AND (1=0%s) AND t>=? AND t<? ORDER BY t" % ''.join(kindq)
    log.debug(query)
    db.execute(query, (skip, start_t, stop_t))
    features = []
    # this is slow
    for row in db.fetchall():
        feature = geojson.loads(row['data'])
        feature.id = row['rowid'] 
        features.append(feature)
    return features
Exemplo n.º 11
0
def temporal_filter(features, resolution):
    try:

        log.info("--> starting temporal_filter")
        first_t = features[0]['properties']['t_utc']
        dt = datetime.datetime.utcfromtimestamp(first_t)
        dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
        start_t = util.timestamp(dt)
        log.debug("start_date %s" % util.datestring(start_t))
        log.debug("stop_date %s" % util.datestring(features[-1]['properties']['t_utc']))
        log.debug("start_t %s" % start_t)
        log.debug("step %s" % resolution)

        results = []
        index_t = start_t
        index = 0
        while True:
            # log.debug("Checking %s..." % util.datestring(index_t))
            while index < len(features) and features[index]['properties']['t_utc'] < index_t:
                index += 1
            if index == len(features):
                break
            if not (features[index]['properties']['t_utc'] > index_t + resolution):
                # log.debug("--> %s %s %s" % (index, features[index]['id'], util.datestring(features[index]['properties']['t_utc'])))
                results.append(features[index])
            index_t += resolution

        log.info("--> done temporal_filter")
        return results
    except Exception as e:
        log.error(log.exc(e))
Exemplo n.º 12
0
def ingest_audio(path, i, t_protect):
    log.info("ingest_audio %s" % path)
    dt = datetime.datetime.strptime(path.split('/')[-1], "audio %d%m%Y_%H%M.mp3")
    tz = pytz.timezone(config['local_tz'])
    dt = tz.localize(dt)
    t = util.timestamp(dt)    
    # if t <= t_protect:
    #     log.warning("Protected t, skipping...")
    #     return    
    fixed_path = path.replace(".mp3", ".amr")
    shutil.move(path, fixed_path)
    new_path = os.path.join(os.path.dirname(__file__), "static", "data", "audio", "%s-%s.wav" % (t, i))    

    log.debug("CONVERTING SOUND.")
    try:
        log.debug("--> converting [%s] to [%s]" % (fixed_path, new_path))
        log.debug("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path)))
        subprocess.check_call("%s -y -i '%s' '%s'" % (config['ffmpeg'], os.path.abspath(fixed_path), os.path.abspath(new_path)), shell=True)
    except Exception as e:
        log.error(log.exc(e))
        return

    log.debug("DONE CONVERTING SOUND.")
    feature = geojson.Feature(properties={'utc_t': t, 'ContentType': "audio", 'url': "/static/data/audio/%s-%s.wav" % (t, i), 'DateTime': dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z")})
    feature_id = model.insert_feature('audio', t, geojson.dumps(feature))
Exemplo n.º 13
0
def parse(request):
    log.info("sensor.parse")
    try:
        message = ingest_plain_body(request)
        data = net.urldecode(message)    
        log.debug(data)
        number = data['From']
        body = data['Body']
        content = strings.singlespace(body.strip())

        # 1436477098,Lat,-18.111946,Long,21.665733,TDS,4.000,Sal,0.000,WatTemp,16.88,AirTemp,18.50,Hum,48.90
        tokens = content.split(',')
        t = util.delocalize_timestamp(strings.as_numeric(tokens[0]), config['local_tz'])        
        data = {'t_utc': t, 'FeatureType': "sensor", 'CoreExpedition': False}
        tokens = tokens[1:]
        keys = tokens[0::2]
        values = tokens[1::2]
        for k, key in enumerate(keys):
            data[strings.camelcase(key)] = strings.as_numeric(values[k])


        # tokens = content.split(" ")

        # if tokens[0] == 'location':
        #     lon, lat = tokens[3], tokens[2]   ## note reversal                        
        #     # insert a record for this location update
        #     data = {'t_utc': util.timestamp(), 'SensorName': tokens[1], 'longitude': lon, 'latitude': lat, 'LocationUpdate': True}

        #     # send a verification
        #     send_sms(number, tokens[1], lon, lat)

        # else:
        #     # find the most recent location update for this sensor
        #     db = Application.instance.db            
        #     results = list(db.features.find({'properties.FeatureType': "sensor", 'properties.SensorName': tokens[1], 'properties.LocationUpdate': True}).sort([('properties.t_utc', DESCENDING)]).limit(1))
        #     georef = results[0] if len(results) else None
        #     geometry = georef['geometry'] if georef is not None else None
        #     log.debug(geometry)
        #     if tokens[0] == "*":
        #         t = util.timestamp()
        #     else:                    
        #         t = util.delocalize_timestamp(tokens[0], config['local_tz']) # converts from local time timestamp to t_utc
        #     data = {'t_utc': t, 'SensorName': tokens[1], strings.camelcase(tokens[2]): tokens[3], 'LocationUpdate': False, 'geometry': geometry}

    except Exception as e:
        log.error("Twilio post is malformed: %s" % log.exc(e))
        return None
    return data
Exemplo n.º 14
0
 def get_timeline(self):
     skip = self.get_argument('skip', 1)
     kinds = self.get_argument('types', "beacon").split(',')
     kinds = [kind.rstrip('s') for kind in kinds if kind.rstrip('s') in ['ambit', 'ambit_geo', 'sighting', 'breadcrumb', 'image', 'audio', 'breadcrumb', 'beacon', 'heart_spike']]   # sanitizes
     try:
         dt = self.get_argument('date', datetime.datetime.now(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%d"))
         log.debug(dt)
         dt = util.parse_date(dt, tz=config['local_tz'])
         days = int(self.get_argument('days', 1))
     except Exception as e:
         return self.error("Bad parameters: %s" % log.exc(e))
     t = util.timestamp(dt)        
     log.debug("--> search for kinds: %s" % kinds)
     features = model.fetch_features(kinds, t, t + (days * (24 * 60 * 60)), skip)
     feature_collection = geojson.FeatureCollection(features)
     return self.json(feature_collection)
Exemplo n.º 15
0
 def on_close(self):
     log.info("//////////// WebSocket.on_close")
     user_id = None
     for uid, instance in WebSocket.sockets.items():
         if instance == self:
             user_id = uid
     log.info("--> closing user_id %s" % user_id)                
     if user_id is None:
         log.warning("socket for %s not found" % user_id)
         return
     if user_id in WebSocket.users:
         del WebSocket.users[user_id]
     for uid, partner_id in WebSocket.users.items():
         if partner_id == user_id:
             WebSocket.send(uid, "unentangled")
             WebSocket.users[uid] = None   
     log.debug("--> users %s" % WebSocket.users)
     log.info("--> complete")
Exemplo n.º 16
0
def retrieve(db, source, start, end, filters, page=None):
    if filters == None:
        filters = {}
    sources = [clean(source) for source in source.split(",")]    
    start_t = 0 if start == "*" else util.timestamp(util.parse_date(start, tz=config['tz']))
    end_t = min(2147483647, sys.maxsize) if end == "*" else util.timestamp(util.parse_date(end, tz=config['tz']))
    template = {'t_utc': {'$gt': start_t, '$lt': end_t}, '$or': [{'source': source} for source in sources]}
    template.update(filters)
    log.info("QUERY %s" % template)    
    results = db.entries.find(template).sort('t_utc')
    count = results.count()
    if page is None:
        page = (count // 100) + 1
    skip = (page - 1) * 100
    log.debug("page %s, skip %s" % (page, skip))
    results = results.skip(skip).limit(100)    
    log.info("--> done")
    return list(results), start_t, end_t, count, page
Exemplo n.º 17
0
 def open(self):
     log.info("//////////// WebSocket.open")
     user_id = strings.random_string(10)
     WebSocket.sockets[user_id] = self
     log.info("--> new user_id %s" % user_id)
     available_users = [uid for uid in WebSocket.users.keys() if WebSocket.users[uid] is None]
     log.info("--> available users: %s" % available_users)
     if len(available_users):
         partner_id = random.choice(available_users)
         WebSocket.users[partner_id] = user_id
         WebSocket.users[user_id] = partner_id
         log.info("--> entangled %s with %s" % (user_id, partner_id))
         WebSocket.send(user_id, "entangled")
         WebSocket.send(partner_id, "entangled")
     else:
         WebSocket.users[user_id] = None
         log.debug("--> no partner to entangle")
     log.debug("--> users %s" % WebSocket.users)
     WebSocket.send(user_id, user_id)
Exemplo n.º 18
0
def ingest_beacon(content):
    log.info("ingest_beacon")
    t_protect = model.get_protect('beacon')    
    properties = {}
    coordinates = [None, None, None]
    t = None
    try:
        lines = content.split('\n')
        for line in lines:
            log.debug("%s" % line)
            try:
                if "Position Time:" in line:
                    line = line.replace("Position Time:", "").strip()
                    dt = util.parse_date(line)
                    t = util.timestamp(dt)
                    properties['DateTime'] = dt.astimezone(pytz.timezone(config['local_tz'])).strftime("%Y-%m-%dT%H:%M:%S%z")
                    properties['t_utc'] = t
                if "Map:" in line:
                    line = line.split('?')[1].strip()
                    result = net.urldecode(line)
                    lat, lon = result['q'].split(' ')[0].split(',')
                    coordinates[0], coordinates[1] = strings.as_numeric(lon), strings.as_numeric(lat)
                if "Altitude:" in line:
                    altitude = strings.as_numeric(line.replace("Altitude:", "").replace("meters", "").strip())
                    coordinates[2] = altitude
                if "Speed:" in line:
                    speed = strings.as_numeric(line.replace("Speed:", "").replace("Knots", "").strip())
                    properties['Speed'] = speed
                if "Heading:" in line:
                    heading = strings.as_numeric(line.replace("Heading:", "").replace("°", "").strip())
                    properties['Heading'] = heading
            except Exception as e:
                log.error(log.exc(e))
                continue
        # if t <= t_protect:
        #     log.warning("Protected t, skipping...")
        #     return                                
        feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coordinates}, properties=properties)
        feature_id = model.insert_feature('beacon', t, geojson.dumps(feature))
    except Exception as e:
        log.error(log.exc(e))
Exemplo n.º 19
0
def format_csv(data):
    import csv    
    features = data[0]['features']

    # build header
    header = []
    for feature in features:
        feature.update(feature['properties'])
        if 'Taxonomy' in feature and feature['Taxonomy'] is not None:
            feature.update(feature['Taxonomy'])
            del feature['Taxonomy']
        if feature['geometry'] is not None:
            feature.update({"Longitude": feature['geometry']['coordinates'][0], "Latitude": feature['geometry']['coordinates'][1]})
        del feature['properties']
        del feature['geometry']
        for key in feature:            
            if key not in header:
                header.append(key)
    header.sort()
    log.debug(header)

    # populate rows
    csv = []
    csv.append(','.join(header))
    with open('data.csv', 'w', newline='') as csvfile:
        for feature in features:
            row = []
            for column in header:
                if column in feature:
                    value = feature[column]
                    if type(value) == str:
                        value = strings.singlespace(value)
                        value.replace('"', "'")
                        value = "%s" % value
                    row.append(str(value).replace(",", ""))
                else:
                    row.append("None")
            csv.append(','.join(row))
    return '\n'.join(csv)

    # print(json.dumps(features, indent=4, default=lambda x: str(x)))
Exemplo n.º 20
0
def main():
    global t, db, ctx

    t = int(time.time())
    # t = int(time.mktime(util.parse_date('2013-07-10 19:00:00').timetuple()))

    filename = "signals/%s_%s.json" % (t, DURATION)
    log.info("Generating %s..." % filename)
    db = CrashDB(filename)

    if config['draw']:
        from housepy import drawing
        ctx = drawing.Context(width=2000, height=500, background=(0., 0., 1.), hsv=True, flip=True, relative=True)

    process_readings('heat', (0., 1., 1.))   # red
    # # process_readings('rain', (.1, 1., 1.))    # orange
    process_readings('humidity', (.1, 1., 1.))    # orange
    process_readings('wind', (.3, 1., 1.))  # green
    process_readings('visibility', (.6, 1., 1.))    # blue
    process_readings('sun', (0., 0., 0.))    # black
    # process_readings('tide', (0., 0., 0.5))    # gray

    process_readings('checkins', (.8, .8, 1.))    # purple
    process_readings('checkouts', (.9, .8, 1.), 1)    # thin purple

    process_events('tweets', (0.55, 1., 1.))    # matrix
    # process_events('motion', (0.76, 1., 1.))    # 
    process_events('sound', (0.92, 1., 1.))    # crimson

    db.close()

    log.debug("%s" % (db.keys(),))
    log.info("--> ok")

    if config['draw']:
        image_filename = "signals/%s_%s.png" % (t, DURATION)
        ctx.image.save(image_filename, "PNG")
        if __name__ == "__main__":
            ctx.show()        

    return filename
Exemplo n.º 21
0
def parse(request):
    log.info("sensornet.parse")
    data = ingest_json_body(request)
    try:
        t_local = strings.as_numeric(data['t_local'])
        data = data['data']        
        log.debug(json.dumps(data, indent=4, default=lambda x: str(x)))
        if 'gps_long' in data and 'gps_lat' in data:
            data['Longitude'] = data['gps_long']
            del data['gps_long']
            data['Latitude'] = data['gps_lat']
            del data['gps_lat']
        data['FeatureType'] = "sensor"
        data['FeatureSubType'] = "hybrid"
        data['SensorName'] = "sensornet"
        data['CoreExpedition'] = False
        data['t_utc'] = util.delocalize_timestamp(t_local, tz=config['local_tz'])
    except Exception as e:
        log.error("--> failed: %s" % log.exc(e))
        return None, "Unexpected format"   
    return data
Exemplo n.º 22
0
def get_data(hidden=False):

    log.debug("HIDDEN %s" % hidden)

    data = {}

    try:

        walks = model.fetch_walks(hidden=hidden)

        notes = []
        v = 0
        ids = []
        for walk in walks:
            sequence = model.fetch_sequence(walk['id'])
            if len(sequence) < config['min_steps']:
                continue
            for step in sequence:  #[:config['max_steps']]:
                notes.append((step[0], v, 0 if step[1] == 'left' else 1))
            v += 1
            ids.append(walk['id'])

        # sort and normalize onsets
        notes.sort(key=lambda x: x[0])
        onsets = [note[0] for note in notes]
        onsets = sp.normalize(onsets)
        notes = [(onsets[i], note[1], note[2])
                 for (i, note) in enumerate(notes)]

        log.info("NOTES %s" % len(notes))

        data['notes'] = notes
        data['walk_ids'] = ids

    except Exception as e:
        log.error(log.exc(e))
        return {}

    return json.dumps(data)
Exemplo n.º 23
0
def get_data(hidden=False):

    log.debug("HIDDEN %s" % hidden)

    data = {}

    try:

        walks = model.fetch_walks(hidden=hidden)

        notes = []
        v = 0
        ids = []
        for walk in walks:
            sequence = model.fetch_sequence(walk['id'])
            if len(sequence) < config['min_steps']:
                continue
            for step in sequence:#[:config['max_steps']]:
                notes.append((step[0], v, 0 if step[1] == 'left' else 1))
            v += 1
            ids.append(walk['id'])

        # sort and normalize onsets
        notes.sort(key=lambda x: x[0])
        onsets = [note[0] for note in notes]
        onsets = sp.normalize(onsets)
        notes = [(onsets[i], note[1], note[2]) for (i, note) in enumerate(notes)]

        log.info("NOTES %s" % len(notes))

        data['notes'] = notes
        data['walk_ids'] = ids

    except Exception as e:
        log.error(log.exc(e))
        return {}

    return json.dumps(data)
Exemplo n.º 24
0
 def run(self):
     while True:
         try:
             data = self.xbee.wait_read_frame()
             if self.verbose:
                 log.debug(data)
             response = {}
             if 'source_addr' in data:
                 response['sensor'] = int(data['source_addr'][1])
             if 'frame_id' in data:
                 response['frame'] = str(data['frame_id'], 'ascii')
             if 'parameter' in data:
                 response['parameter'] = int.from_bytes(data['parameter'], 'little')
             if 'rssi' in data:
                 response['rssi'] = int.from_bytes(data['rssi'], 'little')
             if 'samples' in data:
                 samples = list(data['samples'][0].items())
                 samples.sort(key=lambda item: item[0])
                 response['samples'] = [s[1] for s in samples]
             if self.message_handler is not None:
                 self.message_handler(response)
         except Exception as e:
             log.error(log.exc(e))
Exemplo n.º 25
0
def parse(request):
    log.info("beacon.parse")
    content = ingest_plain_body(request)
    if content is None:
        return content, "Could not parse"

    data = {}
    lines = content.split('\n')
    for line in lines:
        log.debug("%s" % line)
        try:
            if "sat4rent" in line.lower():
                data['Satellite'] = line[-8:].upper()
            if "Position Time:" in line:
                line = line.replace("Position Time:", "").strip()
                dt = util.parse_date(line)
                t = util.timestamp(dt)
                data['t_utc'] = t
            if "Map:" in line:
                line = line.split('?')[1].strip()
                result = net.urldecode(line)
                lat, lon = result['q'].split(' ')[0].split(',')
                data['longitude'], data['latitude'] = strings.as_numeric(lon), strings.as_numeric(lat)
            if "Altitude:" in line:
                altitude = strings.as_numeric(line.replace("Altitude:", "").replace("meters", "").strip())
                data['altitude'] = altitude
            if "Speed:" in line:
                speed = strings.as_numeric(line.replace("Speed:", "").replace("Knots", "").strip())
                data['Speed'] = speed
            if "Heading:" in line:
                heading = strings.as_numeric(line.replace("Heading:", "").replace("°", "").strip())
                data['Heading'] = heading
        except Exception as e:
            log.error(log.exc(e))
            continue

    return data
Exemplo n.º 26
0
def parse(request):
    log.info("databoat.parse")
    data = ingest_json_file(request)
    try:
        for key, reading in data.items():
            t_local = strings.as_numeric(reading['t_local'])
            reading = reading['data']        
            log.debug(json.dumps(reading, indent=4, default=lambda x: str(x)))
            if 'gps_long' in reading and 'gps_lat' in reading:
                reading['Longitude'] = reading['gps_long']
                del reading['gps_long']
                reading['Latitude'] = reading['gps_lat']
                del reading['gps_lat']
            reading['FeatureType'] = "sensor"
            reading['FeatureSubType'] = "hybrid"
            reading['SensorName'] = "databoat"
            reading['t_utc'] = util.delocalize_timestamp(t_local, tz=config['local_tz'])
            success, value = ingest_data("sensor", reading)
            if not success:
                log.error("--> failed: %s" % value)            
    except Exception as e:
        log.error("--> failed: %s" % log.exc(e))
        return None, "Unexpected format"   
    return True
Exemplo n.º 27
0
 def current_week(self):
     now = datetime.datetime.now()
     year = now.year
     week = now.isocalendar()[1] - 1
     log.debug("Current year: %s" % year)
     log.debug("Current week: %s" % week)
     try:
         current_week = self[year][week]
     except Exception:
         log.debug("Week not found")
     else:
         return current_week
Exemplo n.º 28
0
def on_mouse_press(data):
    global waiting, transmitting, current_string, incoming_message

    # waiting mode, nothing happens
    if waiting:
        log.debug("MODE: waiting")
        return

    # transmitting mode, we've clicked, so go into receiving mode
    if transmitting:
        log.debug("MODE: transmitting")
        ctx.textures = []
        incoming_message = []
        label.text = ""
        ctx.objects = [o for o in ctx.objects if o is label]
        draw_reception()
        transmitting = False
        flush_messages()
        return

    # receiving mode: process clicks and build message
    log.debug("MODE: receiving")
    x, y, button, modifiers = data
    x *= ctx.width
    y *= ctx.height
    for c, coord in enumerate(coords):
        if x > coord[0][0] and x < coord[1][0] and y > coord[0][
                1] and y < coord[1][1]:
            if c == len(CHARACTERS) + 1:
                sender.messages.put("DONE")
                ctx.textures = []
                incoming_message = []
                label.text = ""
                ctx.objects = [o for o in ctx.objects if o is label]
                result = subprocess.run(
                    ["osascript", "focus.scpt", "main_terminal"],
                    stdout=subprocess.PIPE)
                log.info(result)
                waiting = True
            elif c == len(CHARACTERS):
                if len(incoming_message):
                    incoming_message.pop()
                    sender.messages.put("ERASE")
            else:
                character = CHARACTERS[c]
                if len(incoming_message) < 30:
                    incoming_message.append(character)
                    sender.messages.put(character)
            label.text = "".join(incoming_message)
            break
Exemplo n.º 29
0
    params = net.urlencode(params)
    request_string = "https://api.foursquare.com/v2/venues/explore?%s" % params

    try:
        response = net.read(request_string)
    except Exception as e:
        log.error(log.exc(e))
        continue
    data = json.loads(response)    
    # print(json.dumps(data, indent=4))
    try:
        venues = data['response']['groups'][0]['items']
        for venue in venues:
            venue = venue['venue']
            checkins = venue['stats']['checkinsCount']
            if checkins == 0:
                continue
            if 'hereNow' in venue:
                people = venue['hereNow']['count']
            else:
                people = 0
            venue_id = venue['id']
            venue = {'venue_id': venue_id, 'people': people}
            model.add_venue(venue)
            total_venues += 1
    except Exception as e:
        log.error(log.exc(e))
        log.error(json.dumps(data, indent=4))

log.debug("added %s venues" % total_venues)
Exemplo n.º 30
0
#!/usr/bin/env python

import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import json, model, datetime, math
from housepy import config, log, net

log.info("////////// foursquare //////////")

update = []
checkins = 0
checkouts = 0

for venue in model.get_venues():
    log.debug("checking %s" % venue['venue_id'])
    try:
        params = {'client_id': config['foursquare']['key'], 'client_secret': config['foursquare']['secret'], 'v': "20130704"}
        params = net.urlencode(params)
        request_string = "https://api.foursquare.com/v2/venues/%s/herenow?%s" % (venue['venue_id'], params)
        response = net.read(request_string, timeout=1)
        data = json.loads(response)
        people = data['response']['hereNow']['count']
        if people != venue['people']:
            if people > venue['people']:
                checkins += people - venue['people']
            else:
                checkouts += venue['people'] - people
            venue['people'] = people
            update.append(venue)
    except Exception as e:
        log.error(log.exc(e))
Exemplo n.º 31
0
                    site = random.choice(self.nexts)    # duplicate entries mean prob distribution is correct, abusing memory space a bit, how's that gonna scale...
            else:                                       
                site = random.choice(Model.sites_exclude(self))              # jump the chain to a random site in the model
            page = random.choice(site.pages) if len(site.pages) else '/'     # keep with deep paths if possible
            return site.host, page, int(seconds * 1000)


    def __str__(self):
        return "%s\n\t%f, %f\n\t%s\n" % (self.host, np.mean(self.durations) if len(self.durations) else 0.0, np.std(self.durations) if len(self.durations) else 0.0, self.pages)


def find_future(user_id, host):
    if Model.build(user_id, host) is None:
        return None
    if __name__ == "__main__":
        for host, site in Model.sites.items():
            log.debug(site)
        log.debug("Novelty: %f" % Model.calc_novelty())
    if len(Model.sites) < max(2, MIN_MODEL_SIZE):                                # the model is too small
        return None
    future = Model.get(host).find_next()
    return future        


if __name__ == "__main__":
    user_id = "e29d909b34d7ced11f67440a74f95f1e"
    t = time.clock()
    future = find_future(user_id, "twitter.com")
    log.debug(future)
    log.info("%fms" % ((time.clock() - t) * 1000))
Exemplo n.º 32
0
def hide(db, walk_id, hidden=True):
    log.debug("Hiding walk %s: %s" % (walk_id, hidden))
    db.execute("UPDATE walks SET hidden=? WHERE id=?", (hidden, walk_id,))
Exemplo n.º 33
0
def process(t):

    log.info("////////// process %s //////////" % t)
    filename = "audio_tmp/%s.wav" % t
    sample_rate, signal = wavfile.read(filename)
    log.info("AUDIO SAMPLES %s" % len(signal))
    log.info("SAMPLE RATE %s" % sample_rate)
    duration = float(len(signal)) / sample_rate
    log.info("AUDIO DURATION %ss" % util.format_time(duration))
    signal = (np.array(signal).astype('float') / (2**16 * 0.5))   # assuming 16-bit PCM, -1 - 1

    log.info("--> preprocessing")
    magnitude = abs(signal)
    thresholded_magnitude = (magnitude > THRESHOLD) * magnitude
    # level = sp.smooth(thresholded_magnitude, size=10000)      # shit -- smooth is too expensive for raspi
    level = thresholded_magnitude

    log.info("--> scanning")
    TOLERANCE = sample_rate / 10    # within a tenth of a second, same sound (poor man's smoothing?)
    indexes = []
    maxes = []
    durations = []
    zeros = 0
    on_chunk = False
    for index, sample in enumerate(level):
        if sample > 0.0:
            if not on_chunk:
                indexes.append(index)                
                durations.append(0)
                maxes.append(0)
                on_chunk = True              
            durations[-1] += 1
            if sample > maxes[-1]:
                maxes[-1] = sample
            zeros = 0            
        if sample == 0.0:            
            if on_chunk:
                zeros += 1
                if zeros == TOLERANCE:
                    on_chunk = False
    events = []
    for i in xrange(len(indexes)):
        value, t_, duration = maxes[i], t + int(float(indexes[i]) / sample_rate), float(durations[i]) / sample_rate
        events.append((value, t_, duration))
    for event in events:
        log.debug(event)

    if 'draw' in config and config['draw']:   
        from housepy import drawing    
        log.info("--> drawing")
        ctx = drawing.Context(width=2000, height=500, background=(0., 0., 1.), hsv=True, flip=True, relative=True)
        ctx.line([(float(i) / len(magnitude), sample) for (i, sample) in enumerate(magnitude)], thickness=1, stroke=(0., 0., 0.5))
        ctx.line([(float(i) / len(thresholded_magnitude), sample) for (i, sample) in enumerate(thresholded_magnitude)], thickness=1, stroke=(0., 0., 0.))
        ctx.line([(float(i) / len(level), sample) for (i, sample) in enumerate(level)], thickness=1, stroke=(0., 1., 1.))
        level = sp.normalize(level)
        ctx.line([(float(i) / len(level), sample) for (i, sample) in enumerate(level)], thickness=1, stroke=(0.15, 1., 1.))
        ctx.line(0.0, THRESHOLD, 1.0, THRESHOLD, thickness=1, stroke=(0.55, 1., 1.))
        ctx.show()

    try:
        data = []
        for event in events:
            value, t_, duration = event
            data.append({'device': config['device'], 'kind': "sound", 'value': value, 't': t_, 'duration': duration})
        response = net.read("http://%s:%s" % (config['server']['host'], config['server']['port']), json.dumps(data))
        log.info(response)
    except Exception as e:
        log.error(log.exc(e))

    if config['device'] != "Granu":
        os.remove(filename)
Exemplo n.º 34
0
import time, json, threading, subprocess, queue, platform, os, sys
import numpy as np
from housepy import log, config, strings, net, s3, util, process, drawing
from scipy.io import wavfile

DURATION = 10
AUDIO_TMP = os.path.abspath(os.path.join(os.path.dirname(__file__), "audio_tmp"))

t = sys.argv[1]

filename = "%s/%s.wav" % (AUDIO_TMP, t)
sample_rate, signal = wavfile.read(filename)
log.debug("samples %s" % len(signal))
log.debug("sample_rate %s" % sample_rate)
duration = float(len(signal)) / sample_rate
log.debug("duration %ss" % strings.format_time(duration))
signal = (np.array(signal).astype('float') / (2**16 * 0.5))   # assuming 16-bit PCM, -1 - 1
signal = abs(signal)    # magnitude

ctx = drawing.Context()
ctx.plot(signal)
ctx.line(0, config['noise_threshold'], 1, config['noise_threshold'], stroke=(255, 0, 0))
ctx.output("screenshots")

log.debug("noise threshold is %s" % config['noise_threshold'])
log.debug("found magnitude")
content_samples = 0
for sample in signal:
    if sample > config['noise_threshold']:
        content_samples += 1
total_content_time = float(content_samples) / sample_rate
Exemplo n.º 35
0
            }
        }
    }
})

log.info("POINTS %s" % results.count())
users = len(results.distinct('user_id'))
log.info("USERS %s" % users)

points = np.array([(result['location']['coordinates'][0],
                    result['location']['coordinates'][1], result['user_id'])
                   for result in results])

min_lon, max_lon = (np.min(points[:, 0]), np.max(points[:, 0]))
min_lat, max_lat = (np.min(points[:, 1]), np.max(points[:, 1]))
log.debug("%f %f %f %f" % (min_lon, max_lon, min_lat, max_lat))

min_x, max_y = geo.project((min_lon, max_lat))
max_x, min_y = geo.project((max_lon, min_lat))

ratio = (max_x - min_x) / (max_y - min_y)

ctx = drawing.Context(1000,
                      int(1000 / ratio),
                      relative=True,
                      flip=True,
                      hsv=True)
log.info("Drawing %d %d..." % (ctx.width, ctx.height))

for point in points:
Exemplo n.º 36
0
def estimate_geometry(data, db):
    """Estimate the location of a geotagged object for a new feature that's missing it"""
    """For data tagged to a Member, find something else that's geotagged with that Member, best case ambit_geo, worst case take the beacon if they are core, otherwise fail"""
    """For non-member data, just tag it to the beacon"""
    log.info("Estimating geometry...")
    t = data['properties']['t_utc']
    feature_type = data['properties']['FeatureType']
    log.info("--> t is %s (%s)" % (t, util.datestring(t, tz=config['local_tz'])))
    try:

        # find geodata from this Member
        ## bh16 restrict this to ambit_geo. why wasnt it before?
        member_closest_before = None
        member_closest_after = None
        if 'Member' in data['properties'] and data['properties']['Member'] is not None:
            member = data['properties']['Member']
            log.info("--> member is %s" % member)
            try:
                # member_closest_before = list(db.features.find({'properties.Member': member, 'geometry': {'$ne': None}, 'properties.t_utc': {'$lte': t}, 'properties.EstimatedGeometry': {'$exists': False}}).sort('properties.t_utc', -1).limit(1))[0]
                # member_closest_after =  list(db.features.find({'properties.Member': member, 'geometry': {'$ne': None}, 'properties.t_utc': {'$gte': t}, 'properties.EstimatedGeometry': {'$exists': False}}).sort('properties.t_utc', 1).limit(1))[0]
                member_closest_before = list(db.features.find({'properties.Member': member, 'properties.FeatureType': "ambit_geo", 'properties.t_utc': {'$lte': t}}).sort('properties.t_utc', -1).limit(1))[0]
                member_closest_after =  list(db.features.find({'properties.Member': member, 'properties.FeatureType': "ambit_geo", 'properties.t_utc': {'$gte': t}}).sort('properties.t_utc', 1).limit(1))[0]
            except IndexError:
                pass

        # # core?   # eliminate, bh16
        # if 'CoreExpedition' in data['properties']:
        #     core = data['properties']['CoreExpedition']
        # else:   # there should never be an else
        #     log.warning("--> no CoreExpedition for estimator")
        #     core = False
        # log.info("--> core is %s" % core)

        # find geodata from the nearest beacon
        # but only do it if there is no Member (always core, unless overridden), or the Member is/was core at that point
        core_sat = config['satellites'][0] # first satellite is core expedition
        beacon_closest_before = None
        beacon_closest_after = None

        # bh16 temporarily ignoring satellites
        #
        # if core and not feature_type == "ambit":  ## don't let ambit readings pop to beacons
        #     try:
        #         beacon_closest_before = list(db.features.find({'$or': [{'properties.t_utc': {'$lte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$exists': False}}, {'properties.t_utc': {'$lte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$eq': core_sat}}]}).sort('properties.t_utc', -1).limit(1))[0]
        #         beacon_closest_after = list(db.features.find({'$or': [{'properties.t_utc': {'$gte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$exists': False}}, {'properties.t_utc': {'$gte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$eq': core_sat}}]}).sort('properties.t_utc', 1).limit(1))[0]
        #     except IndexError:
        #         pass

        # pick the best ones
        if member_closest_before is not None and beacon_closest_before is not None:
            closest_before = beacon_closest_before if beacon_closest_before['properties']['t_utc'] > member_closest_before['properties']['t_utc'] else member_closest_before
        elif member_closest_before is not None:
            closest_before = member_closest_before
        else:
            closest_before = beacon_closest_before

        if member_closest_after is not None and beacon_closest_after is not None:
            closest_after = beacon_closest_after if beacon_closest_after['properties']['t_utc'] < member_closest_after['properties']['t_utc'] else member_closest_after
        elif member_closest_after is not None:
            closest_after = member_closest_after
        else:
            closest_after = beacon_closest_after

        if closest_before is None or closest_after is None:
            data['properties']['EstimatedGeometry'] = None
            log.warning("--> closest not found")
            return data

        # average the times and positions: this is naive calculation not taking projection into account
        data['geometry'] = closest_before['geometry'] # make sure the fields are there (if theres an error it will default to this assignment)
        if len(data['geometry']['coordinates']) == 2:  # add altitude if it's missing
            data['geometry']['coordinates'].append(None)
        data['properties']['EstimatedGeometry'] = closest_before['properties']['FeatureType']   # note what we used

        t1 = closest_before['properties']['t_utc']
        t2 = closest_after['properties']['t_utc']
        if t1 != t2:
            p = (t - t1) / (t2 - t1)
            data['geometry']['coordinates'][0] = (closest_before['geometry']['coordinates'][0] * (1 - p)) + (closest_after['geometry']['coordinates'][0] * p)
            data['geometry']['coordinates'][1] = (closest_before['geometry']['coordinates'][1] * (1 - p)) + (closest_after['geometry']['coordinates'][1] * p)
            try:
                data['geometry']['coordinates'][2] = (closest_before['geometry']['coordinates'][2] * (1 - p)) + (closest_after['geometry']['coordinates'][2] * p)            
            except:
                data['geometry']['coordinates'][2] = None
        log.debug(data['geometry']['coordinates'])

        log.info("--> derived from %s" % data['properties']['EstimatedGeometry'])

    except Exception as e:
        log.error(log.exc(e))
    return data
Exemplo n.º 37
0
def parse(request):
    log.info("sighting.parse")

    paths = save_files(request)
    if not len(paths):
        return None, "No files"

    # process the json
    data = None
    for path in paths:
        if path[-4:] == "json":
            try:
                with open(path) as f:
                    data = json.loads(f.read())
            except Exception as e:
                log.error(log.exc(e))
                return None, "Could not parse JSON"
            break
    if data is None:
        return None, "No data"
        
    # make corrections
    # Bird Name should be SpeciesName    
    for key, item in data.items():
        modkey = key.strip().lower().replace(' ', '')
        if modkey == "birdname":
            data['SpeciesName'] = item
            del data[key] 
    if 'TeamMember' in data:
        data['Member'] = data['TeamMember']
        del data['TeamMember']          

    # purge blanks
    data = {key: value for (key, value) in data.items() if type(value) != str or len(value.strip())}
    if 'SpeciesName' not in data:
        log.error("Missing SpeciesName")
        return None, "Missing SpeciesName"
    data['SpeciesName'] = strings.titlecase(data['SpeciesName'])       

    if 'Count' not in data and 'count' not in data:
        data['Count'] = 1
    log.debug(json.dumps(data, indent=4))
    data['Taxonomy'] = get_taxonomy(data['SpeciesName'])


    # process the image
    images = []
    for path in paths:
        if path[-4:] != "json":
            log.info("Inserting image... %s" % path.split('/')[-1])
            image_data = process_image(path, data['Member'] if 'Member' in data else None, data['t_utc'] if 't_utc' in data else None)
            if image_data is None:
                log.info("--> no image data")
                continue            
            success, value = ingest_data("image", image_data.copy())   # make a second request for the image featuretype
            if not success:
                log.error(value)
            if 'Member' in image_data:
                del image_data['Member']
            images.append(image_data)
            log.info("--> image added")
    data['Images'] = images

    # use image data to assign a timestamp to the sighting
    if 'getImageTimestamp' in data and data['getImageTimestamp'] == True and len(data['Images']) and 't_utc' in data['Images'][0]:
        data['t_utc'] = data['Images'][0]['t_utc']
        log.info("--> replaced sighting t_utc with image data")
    if 'getImageTimestamp' in data:
        del data['getImageTimestamp']

    return data
Exemplo n.º 38
0
#!/usr/bin/env python3

import json, os
import sender_new, sender_exit, sender_persisting, sender_alone
from housepy import config, log
from tweeter import t

# initialize Twitter and retrieve followers
try:
    result = t.followers.ids(screen_name="angryhermitbot")
except Exception as e:
    log.debug(log.exc(e))
    exit()
current_ids = result['ids']

# we are so happy
if not len(current_ids):
    log.info("No followers! So happy.")
    sender_alone.send()
    exit()

# sort followers
past_ids = []
if os.path.isfile('followers.txt'):
    with open('followers.txt', 'r') as f:
        for line in f.readlines():
            past_ids.append(int(line.strip()))
new_ids = list(set(current_ids) - set(past_ids))
exit_ids = list(set(past_ids) - set(current_ids))
persisting_ids = list(set(current_ids) - set(new_ids))
#put run info into log file
Exemplo n.º 39
0
def on_data(data):
    if config['debug']:
        log.debug("Epoch time: [{0}] - X: {1}, Y: {2}, Z: {3}".format(
            data[0], *data[1]))
    rater.queue.put(1)
    monitor.queue.put(data)