예제 #1
0
def get_sensor_id(s, hostname, type):
    if sensor_ids.has_key((hostname, type)):
        sensor_id = sensor_ids[(hostname, type)]
    else:
        sensor = lookup_sensor(s, name=hostname, type=type)
        if sensor is not None:
            sensor_id = sensor.id
            sensor_ids[(hostname, type)] = sensor_id
        else:
            logger.debug("No sensor associated with %s (%s)", hostname, type)
            return None
    return sensor_id
예제 #2
0
def get_sensor_id(s, hostname, type):
    if sensor_ids.has_key((hostname, type)):
        sensor_id = sensor_ids[(hostname, type)]
    else:
        sensor = lookup_sensor(s, name=hostname, type=type)
        if sensor is not None:
            sensor_id = sensor.id
            sensor_ids[(hostname, type)] = sensor_id
        else:
            logger.debug("No sensor associated with %s (%s)", hostname, type)
            return None
    return sensor_id
예제 #3
0
def get_sensor_id(s, mac, type):
    if sensor_ids.has_key((mac, type)):
        sensor_id = sensor_ids[(mac, type)]
    else:
        sensor = lookup_sensor(s, name=mac, type=type)
        if sensor is not None:
            sensor_id = sensor.id
            sensor_ids[(mac, type)] = sensor_id
        else:
            logger.debug("No sensor associated with %s (%s)", mac, type)
            #print "NONE SENSOR", (mac, type)
            return None
    return sensor_id
예제 #4
0
def get_sensor_id(s, mac, type):
    if sensor_ids.has_key((mac, type)):
        sensor_id = sensor_ids[(mac, type)]
    else:
        sensor = lookup_sensor(s, name=mac, type=type)
        if sensor is not None:
            sensor_id = sensor.id
            sensor_ids[(mac, type)] = sensor_id
        else:
            logger.debug("No sensor associated with %s (%s)", mac, type)
            #print "NONE SENSOR", (mac, type)
            return None
    return sensor_id
예제 #5
0
#!/usr/bin/python

from doppelserver.models import Event, load_session
from datetime import datetime
import json, urllib, time
import doppelserver.utils as utils
rooms = [274,333,348,445,474,514,548,674]

if __name__ == "__main__":
    s = load_session()
    sensors = {}
    for room in rooms:
        sensors[room] = utils.lookup_sensor(s, "rfid_%d" % room, "rfid")

    while True:
        now = datetime.now()
        for room in rooms:
            url = "http://tagnet.media.mit.edu/getRfidUserProfiles?readerid=e14-%d-1" % room
            tags = json.load(urllib.urlopen(url))["tags"]
            event_json = {"tags": []}
            for tag in tags:
                tag_json = {}
                for k in "first_name", "last_name", "id", "picture_url", "user_name":
                    if k in tag:
                        tag_json[k] = tag[k]
                if tag_json:
                    event_json["tags"].append(tag_json)
            
            # in order to save on row space, we deduplicate empty entries
            last_entry = s.query(Event).filter_by(sensor=sensors[room]).order_by(Event.time.desc()).first()
            if last_entry.json["tags"] == [] and tags == []:
예제 #6
0
    # deal with AM/PM
    if time_data["PM"] == 1:
        time_data["hour"] += 12
    del time_data["PM"]

    json["ms"] = time_data["ms"]
    del time_data["ms"]
    dt = datetime(**time_data)

    return Event(sensor=sensor, time=dt, json=json)
    
    
if __name__ == "__main__":
    s = load_session()
    s.autoflush = False
    sensor = utils.lookup_sensor(s, "Ping pong table", "ping_pong")

    while True:
        # last_time is the most recent ping pong update
        last_event = s.query(Event).filter_by(sensor=sensor).order_by(Event.time.desc()).first()

        xml = etree.XML(urllib.urlopen("http://18.111.48.39/~tmg/pppp/hits.txt").read())
        for hit in xml:
            event = hit_to_event(hit, sensor=sensor)
            # don't add stuff that's older than the last update
            if event.time <= last_event.time:
                s.expunge(event)

        s.commit()
        time.sleep(1)
예제 #7
0
#!/usr/bin/python

from doppelserver.models import Event, load_session
from datetime import datetime
import json, urllib, time
import doppelserver.utils as utils
rooms = [274, 333, 348, 445, 474, 514, 548, 674]

if __name__ == "__main__":
    s = load_session()
    sensors = {}
    for room in rooms:
        sensors[room] = utils.lookup_sensor(s, "rfid_%d" % room, "rfid")

    while True:
        now = datetime.now()
        for room in rooms:
            url = "http://tagnet.media.mit.edu/getRfidUserProfiles?readerid=e14-%d-1" % room
            tags = json.load(urllib.urlopen(url))["tags"]
            event_json = {"tags": []}
            for tag in tags:
                tag_json = {}
                for k in "first_name", "last_name", "id", "picture_url", "user_name":
                    if k in tag:
                        tag_json[k] = tag[k]
                if tag_json:
                    event_json["tags"].append(tag_json)

            # in order to save on row space, we deduplicate empty entries
            last_entry = s.query(Event).filter_by(
                sensor=sensors[room]).order_by(Event.time.desc()).first()
예제 #8
0
#!/usr/bin/python

from doppelserver.models import load_session, StaticSample
import urllib, time
import doppelserver.utils as utils
from lxml import etree

if __name__ == "__main__":
    s = load_session()
    while True:
        f = urllib.urlopen("http://tac.mit.edu/E14/data.asp")
        xml = etree.XML(f.read())
        f.close()
        power = int(xml.xpath("string()").strip())
        sensor = utils.lookup_sensor(s, "Building power usage", "power")
        sample = StaticSample(time=None, data=power, sensor=sensor)
        s.add(sample)
        s.commit()
        time.sleep(45)
예제 #9
0
    # deal with AM/PM
    if time_data["PM"] == 1:
        time_data["hour"] += 12
    del time_data["PM"]

    json["ms"] = time_data["ms"]
    del time_data["ms"]
    dt = datetime(**time_data)

    return Event(sensor=sensor, time=dt, json=json)


if __name__ == "__main__":
    s = load_session()
    s.autoflush = False
    sensor = utils.lookup_sensor(s, "Ping pong table", "ping_pong")

    while True:
        # last_time is the most recent ping pong update
        last_event = s.query(Event).filter_by(sensor=sensor).order_by(
            Event.time.desc()).first()

        xml = etree.XML(
            urllib.urlopen("http://18.111.48.39/~tmg/pppp/hits.txt").read())
        for hit in xml:
            event = hit_to_event(hit, sensor=sensor)
            # don't add stuff that's older than the last update
            if event.time <= last_event.time:
                s.expunge(event)

        s.commit()