def send_tag_updates(self, tags): "Send location information given in the dictionary 'tags' (tag_id -> location)" # FIXME: Uses plain text interface. Should use proper LAT XML interface. result = {} # Add these tag updates to our history for tag_id, location in tags.iteritems(): if not self.tags.has_key(tag_id): self.tags[tag_id] = [] self.tags[tag_id].append(location) now = clock.get_time() if now - self.last_update_time > self.update_period: for tag_id, locations in self.tags.iteritems(): x, y = locations[-1] result[tag_id] = x, y logging.info("Sending tag update: %d (%.2f, %.2f)" % (tag_id, x, y)) if self.socket: self.socket.send("%d %.2f %.2f\r\n" % (tag_id, x, y)) # Reset for next time. self.tags = {} self.last_update_time = clock.get_time() return result
def loop(self): self.rdset = [self.socket] self.clients = [] logging.info("Starting fake location server loop.") while True: # Deal with any incoming data first. (readers, writers, exceptors) = select.select(self.rdset + self.clients,[], [], 0.1) for reader in readers: if reader == self.socket: # Accecpt a new connection from this socket. conn, addr = self.socket.accept() self.clients.append(conn) logging.info("New client connection: %s" % (str(addr))) else: # Service any data that has come from a client connection. message = reader.recv(1024) if message: logging.info("Received message from client: %s" % message.strip()) else: logging.info("Dropping client connection") for i, client in enumerate(self.clients): if client == reader: del self.clients[i] # Then, see if it's time to push some data. if clock.get_time() > self.next_update_time: self.push_updates()
def push_updates(self): "Send distance readings to all the clients, update self.next_update_time." now = clock.get_time() updates = "" reading = self.readings[self.next_reading] while reading["timestamp"] < now: distance_reading = Reading(distance=reading["distance"], tag_id=reading["tag_id"], anchor_id=reading["anchor_id"], error_code=0) updates += "%s%s" % (str(distance_reading), LocationServer.line_separator) self.next_reading += 1 if self.next_reading >= len(self.readings): if self.loop: # Set the next reading to the first, and set the time three seconds before the first reading. self.next_reading = 0 first_timestamp = self.readings[0]["timestamp"] clock.set_time(first_timestamp - 3.0) self.next_update_time = first_timestamp break reading = self.readings[self.next_reading] if updates: logging.info("Pushing updates of length %d" % len(updates)) logging.debug("Updates:\n%s" % updates) for client in self.clients: client.send(updates)
def ground_truth(self, tag_id, timestamp=None): if timestamp == None: timestamp = clock.get_time() cursor = self.connection.cursor() cursor.execute(self.ground_truth_sql, (tag_id, timestamp, timestamp)) rows = cursor.fetchall() if not rows: return None if len(rows) > 1: print rows raise Exception("Tag %d at %.2f matches more than one ground truth (%d)" % (tag_id, timestamp, len(rows))) start_x, start_y, end_x, end_y, start_time, end_time = rows[0] # Static point if end_x == None and end_y == None: return start_x, start_y # Linear interpolation between the start and end points. alpha = (timestamp - start_time) / (end_time - start_time) dx = alpha * (end_x - start_x) dy = alpha * (end_y - start_y) x = start_x + dx y = start_y + dy return x, y
def start_ground_truth(self, tag_id, label, location): x, y = location self.cursor.execute(self.start_ground_truth_sql, (label, tag_id, clock.get_time(), x, y)) ground_truth_id = self.last_rowid() self.connection.commit() return ground_truth_id
def perturb(self): """Perturb each of the particles in the cloud according to the given perturbation function""" now = clock.get_time() period = now - self.last_perturb_time for particle in self.particles: particle.perturb(period)
def add_estimate(self, tag_id, x, y): "Add an estimate by the current location module." if self.configuration_id: sql = "INSERT INTO estimate(tag_id, x, y, timestamp, configuration_id) VALUES (?, ?, ?, ?, ?)" self.insert(sql, (tag_id, x, y, clock.get_time(), self.configuration_id)) else: logging.warning("Ignored distance estimate because current configuration ID is not set.")
def __init__(self, anchor_id, tag_id, distance, timestamp=None): super(DistanceReading, self).__init__() self.anchor_id = anchor_id self.tag_id = tag_id self.distance = distance if timestamp == None: timestamp = clock.get_time() self.timestamp = timestamp
def median_distance(self, distance_readings, max_age=2.0): "Return the median distance of all the readings within max_age" now = clock.get_time() distances = [r.distance for r in distance_readings if now - r.timestamp < max_age] if distances: logging.debug("Returning median of %d distances" % len(distances)) return numpy.median(distances) else: return None
def end_ground_truth(self, ground_truth_id, location=None, label=None): if location == None: x, y = None, None else: x, y = location self.cursor.execute(self.end_ground_truth_sql, (clock.get_time(), x, y, ground_truth_id)) if label != None: self.cursor.execute(self.update_ground_truth_label_sql, (label, ground_truth_id)) self.connection.commit()
def cull_old(self): "Delete any position updates older than 'max_age'" oldest = clock.get_time() - self.max_age for tag_id, position_updates in self.tag_updates.iteritems(): i = 0 while i < len(position_updates): if position_updates[i].timestamp < oldest: del position_updates[i] else: i += 1
def ground_truth_id(self, tag_id, timestamp=None): if timestamp == None: timestamp = clock.get_time() sql = "SELECT id FROM ground_truth WHERE tag_id = ? AND start_time <= ? AND end_time > ?" rows = self.query(sql, (tag_id, timestamp, timestamp)).fetchall() if not rows: return None if len(rows) > 1: raise Exception("Tag %d at %.2f matches more than one ground truth (%d)" % (tag_id, timestamp, len(rows))) return rows[0][0]
def tag_locations(self, tag_ids=[]): "A dictionary of tag locations: tag_id -> (x,y)" now = clock.get_time() result = {} self.cull_old() for tag_id in tag_ids: if self.update_rate and (now - self.last_updates[tag_id] < self.update_rate): continue position_updates = self.tag_updates.get(tag_id) if position_updates: result[tag_id] = self.filter_function(position_updates) self.last_updates[tag_id] = now return result
def run_locmod(experiment, locmod, config): "Run the locmod for a particular configuration against the distance readings to generate a new set of estimates." last_anchor_id = 0 logging.info("Running location module: %s" % (config.locmod_filename)) experiment.register_configuration(config.filename, config.text, config.locmod_filename, config.locmod_text) sql = "SELECT anchor_id, tag_id, distance, timestamp FROM distance_reading ORDER BY timestamp" for anchor_id, tag_id, distance, timestamp in experiment.query(sql): # Skip if the reading is not relavent to this configuration if not anchor_id in config.anchors: logging.debug("Ignoring reading from unknown anchor: %d" % anchor_id) continue if not tag_id in config.tag_ids: logging.debug("Ignoring reading from unknown tag: %d" % tag_id) continue # Anchor IDs come in order, so if we roll back to an earlier one it means we have all the reading for this update, and it's time to do the location update. if anchor_id < last_anchor_id: update = locmod.update_locations([tag_id]) if update.has_key(tag_id): location = update[tag_id] x, y = location ground_truth = experiment.ground_truth(tag_id) if ground_truth: gx, gy = ground_truth error = math.hypot(x - gx, y - gy) ground_truth_id = experiment.ground_truth_id(tag_id) sql = "INSERT INTO estimate(tag_id, x, y, timestamp, ground_truth_id, error, configuration_id) VALUES (?, ?, ?, ?, ?, ?, ?)" experiment.cursor.execute(sql, (tag_id, x, y, clock.get_time(), ground_truth_id, error, experiment.configuration_id)) logging.debug("Inserted estimate: (%06.2f, %06.2f) error %05.2fm from (%06.2f, %06.2f)" % (x, y, error, gx, gy)) else: logging.debug("Not adding estimate, location not known.") clock.set_time(timestamp) locmod.add_reading(anchor_id, tag_id, distance) last_anchor_id = anchor_id experiment.connection.commit()
def add_readings(self, tag_id, distances, ground_truth_id=None): "Like add reading for many readings, but all with an identical timestamp." timestamp = clock.get_time() for anchor_id, distance in distances: self.cursor.execute(self.add_reading_sql, (anchor_id, tag_id, distance, ground_truth, timestamp))
def add_reading(self, anchor_id, tag_id, distance, ground_truth=None): "Add a reading, now. Don't worry about comparing to the ground truth distance." self.cursor.execute(self.add_reading_sql, (anchor_id, tag_id, distance, ground_truth, clock.get_time())) self.connection.commit()
from Almada.clock import shared_clock as clock import clock_test_b import time if __name__ == "__main__": b = clock_test_b.B() for i in range(10): start_time = clock.get_time() b.advance_time(i) time.sleep(1) wait_time = clock.get_time() - start_time print "Slept for %.2f seconds" % (wait_time)
def __init__(self, x, y): super(PositionUpdate, self).__init__() self.x = x self.y = y self.timestamp = clock.get_time()
def advance_time(self, interval): now = clock.get_time() should_be = now + interval clock.set_time(should_be) print "Error: %.2f" % (clock.get_time() - should_be)