def _setup(self, settings_module=None): """ Load the settings module pointed to by the environment variable. This is used the first time we need any settings at all, if the user has not previously configured the settings manually. """ if settings_module is None: try: settings_module = os.environ[ENVIRONMENT_VARIABLE] if not settings_module: # If it's set but is an empty string. raise KeyError except KeyError: settings_module = None logger.warning("Environment variable %s is undefined, using " "global_settings." % ENVIRONMENT_VARIABLE) else: logger.info("Settings module is specified in environment" "variable %s." % (ENVIRONMENT_VARIABLE)) if settings_module is not None: logger.info("Using module %s to override global_settings."\ % (settings_module)) self._wrapped = Settings(settings_module)
def __init__(self, environment=None, channelType=None, algorithms=(), networkRouting=True, propagation_type=2, **kwargs): Graph.__init__(self) self._environment = environment or Environment() # assert(isinstance(self.environment, Environment)) self.channelType = channelType or ChannelType(self._environment) if isinstance(self.channelType, Doi): doi = kwargs.pop('doi', 0) print "In DOI %s" % doi self.channelType.set_params(doi=doi) self.channelType.environment = self._environment self.propagation = propagation.PropagationModel( propagation_type=propagation_type) self.pos = {} self.ori = {} self.labels = {} #self.star = star_graph self.name = "WSN" self._algorithms = () self.algorithms = algorithms or settings.ALGORITHMS self.algorithmState = {'index': 0, 'step': 1, 'finished': False} self.outbox = [] self.networkRouting = networkRouting self.comm_range = kwargs.pop('commRange', None) or settings.COMM_RANGE logger.info("Instance of Network has been initialized with %s (%s)" % (self.propagation, self.comm_range))
def __init__(self, environment=None, channelType=None, algorithms=(), networkRouting=True, propagation_type=2, **kwargs): Graph.__init__(self) self._environment = environment or Environment() # assert(isinstance(self.environment, Environment)) self.channelType = channelType or ChannelType(self._environment) if isinstance(self.channelType, Doi): doi = kwargs.pop('doi', 0) print "In DOI %s" %doi self.channelType.set_params(doi=doi) self.channelType.environment = self._environment self.propagation = propagation.PropagationModel(propagation_type=propagation_type) self.pos = {} self.ori = {} self.labels = {} #self.star = star_graph self.name = "WSN" self._algorithms = () self.algorithms = algorithms or settings.ALGORITHMS self.algorithmState = {'index': 0, 'step': 1, 'finished': False} self.outbox = [] self.networkRouting = networkRouting self.comm_range = kwargs.pop('commRange', None) or settings.COMM_RANGE logger.info("Instance of Network has been initialized with %s (%s)" % (self.propagation, self.comm_range))
def remove_node(self, node): """ Remove node from network. """ if node not in self.nodes(): logger.error("Node not in network") return Graph.remove_node(self, node) del self.pos[node] del self.labels[node] node.network = None logger.info('Node with id %d is removed.' % node.id)
def estimate_position(self, node): TRESHOLD = .1 MAX_ITER = 10 landmarks = [] # get landmarks with hopsize data if self.dataKey in node.memory: landmarks = node.memory[self.dataKey].keys() # calculate estimated distances if len(landmarks) >= 3: dist = lambda x, y: sqrt(dot(x - y, x - y)) landmark_max_positions = [ array(node.memory[self.dataKey][lm][:2]) for lm in landmarks ] # take centroid as initial estimation pos = average(landmark_max_positions, axis=0) landmark_distances = [] landmark_positions = [] # only reliable anchors for lp in node.memory[self.dataKey].values(): threshold = FloodingUpdate.lookup.get(lp[2], 0.75) * node.commRange hl = dist(lp[:2], pos) / lp[2] logger.debug("Node=%s, Hop=%s, threshold=%s, hoplen=%s" % (node.id, lp[2], threshold, hl)) if hl > threshold and self.hopsizeKey in node.memory: # reliable landmark_distances.append( lp[2] * (node.memory[self.hopsizeKey] or 1)) landmark_positions.append(array(lp[:2])) # take centroid as initial estimation W = diag(ones(len(landmark_positions))) counter = 0 while True: J = array([(lp - pos) / dist(lp, pos) for lp in landmark_positions]) range_correction = array([ dist(landmark_positions[li], pos) - landmark_distances[li] for li, lm in enumerate(landmark_positions) ]) pos_correction = dot(linalg.inv(dot(dot(J.T, W), J)), dot(dot(J.T, W), range_correction)) logger.debug("Est. %s, %s, %s" % (node.id, pos, pos_correction)) pos = pos + pos_correction counter += 1 if sqrt(sum(pos_correction ** 2)) < \ TRESHOLD or counter >= MAX_ITER: logger.info("Trilaterate break %s" % counter) break if counter <= MAX_ITER: node.memory[self.positionKey] = pos node.memory['reliable'] = landmark_positions
def read_pickle(path, not_found_raises=True): """ Read object in Python pickle format. If not_found_raises is True then raise an exception if file is missing. """ try: fh = _get_fh(str(path), 'rb') obj = pickle.load(fh) logger.info('instance of %s loaded: %s' % (str(obj.__class__), path)) return obj except IOError, e: # if error is some other than errno.ENOENT ='file not found raise if not_found_raises or e.errno!=errno.ENOENT: raise return None
def read_pickle(path, not_found_raises=True): """ Read object in Python pickle format. If not_found_raises is True then raise an exception if file is missing. """ try: fh = _get_fh(str(path), 'rb') obj = pickle.load(fh) logger.info('instance of %s loaded: %s' % (str(obj.__class__), path)) return obj except IOError, e: # if error is some other than errno.ENOENT ='file not found raise if not_found_raises or e.errno != errno.ENOENT: raise return None
def __init__(self, environment=None, channelType=None, algorithms=(), networkRouting=True, **kwargs): self._environment = environment or Environment() # assert(isinstance(self.environment, Environment)) self.channelType = channelType or ChannelType(self._environment) self.channelType.environment = self._environment self.pos = {} self.ori = {} self.labels = {} Graph.__init__(self) self._algorithms = () self.algorithms = algorithms or settings.ALGORITHMS self.algorithmState = {"index": 0, "step": 1, "finished": False} self.outbox = [] self.networkRouting = networkRouting logger.info("Instance of Network has been initialized.")
def estimate_position(self, node): TRESHOLD = .1 MAX_ITER = 10 landmarks = [] # get landmarks with hopsize data if self.dataKey in node.memory: landmarks = node.memory[self.dataKey].keys() # calculate estimated distances if len(landmarks) >= 3: landmark_distances = [] landmark_positions = [] try: landmark_distances = [ node.memory[self.dataKey][lm][2] * node.memory[self.hopsizeKey] for lm in landmarks ] landmark_positions = [ array(node.memory[self.dataKey][lm][:2]) for lm in landmarks ] except: pass # take centroid as initial estimation pos = average(landmark_positions, axis=0) W = diag(ones(len(landmarks))) counter = 0 dist = lambda x, y: sqrt(dot(x - y, x - y)) while pos.any() != nan: J = array([(lp - pos) / dist(lp, pos) for lp in landmark_positions]) range_correction = array([ dist(landmark_positions[li], pos) - landmark_distances[li] for li, lm in enumerate(landmarks) ]) pos_correction = dot(linalg.inv(dot(dot(J.T, W), J)), dot(dot(J.T, W), range_correction)) pos = pos + pos_correction logger.debug("Est. %s, %s, %s" % (node.id, pos, pos_correction)) counter += 1 if sqrt(sum(pos_correction ** 2)) < \ TRESHOLD or counter >= MAX_ITER: logger.info("Trilaterate break %s" % counter) break if counter <= MAX_ITER: node.memory[self.positionKey] = pos node.memory['reliable'] = landmark_positions
def __init__(self, environment=None, channelType=None, algorithms=(), networkRouting=True, **kwargs): self._environment = environment or Environment() # assert(isinstance(self.environment, Environment)) self.channelType = channelType or ChannelType(self._environment) self.channelType.environment = self._environment self.pos = {} self.ori = {} self.labels = {} Graph.__init__(self) self._algorithms = () self.algorithms = algorithms or settings.ALGORITHMS self.algorithmState = {'index': 0, 'step': 1, 'finished': False} self.outbox = [] self.networkRouting = networkRouting logger.info("Instance of Network has been initialized.")
def estimate_position(self, node): TRESHOLD = .1 MAX_ITER = 10 landmarks = [] # get landmarks with hopsize data if self.dataKey in node.memory: landmarks = node.memory[self.dataKey].keys() # calculate estimated distances if len(landmarks) >= 3: dist = lambda x, y: sqrt(dot(x - y, x - y)) landmark_max_positions = [array(node.memory[self.dataKey][lm][:2]) for lm in landmarks] # take centroid as initial estimation pos = average(landmark_max_positions, axis=0) landmark_distances = [] landmark_positions = [] # only reliable anchors for lp in node.memory[self.dataKey].values(): threshold = FloodingUpdate.lookup.get(lp[2], 0.75) * node.commRange hl = dist(lp[:2], pos)/lp[2] logger.debug("Node=%s, Hop=%s, threshold=%s, hoplen=%s" %(node.id, lp[2], threshold, hl)) if hl > threshold and self.hopsizeKey in node.memory: # reliable landmark_distances.append(lp[2] * (node.memory[self.hopsizeKey] or 1)) landmark_positions.append(array(lp[:2])) # take centroid as initial estimation W = diag(ones(len(landmark_positions))) counter = 0 while True: J = array([(lp - pos) / dist(lp, pos) for lp in landmark_positions]) range_correction = array([dist(landmark_positions[li], pos) - landmark_distances[li] for li, lm in enumerate(landmark_positions)]) pos_correction = dot(linalg.inv(dot(dot(J.T, W), J)), dot(dot(J.T, W), range_correction)) logger.debug("Est. %s, %s, %s" %(node.id, pos, pos_correction)) pos = pos + pos_correction counter += 1 if sqrt(sum(pos_correction ** 2)) < \ TRESHOLD or counter >= MAX_ITER: logger.info("Trilaterate break %s" % counter) break if counter <= MAX_ITER: node.memory[self.positionKey] = pos node.memory['reliable'] = landmark_positions
def validate_params(self, params): """ Validate if given network params match its real params. """ logger.info('Validating params') count = params.get('count', None) # for unit tests if count: if isinstance(count, list): assert (len(self) in count) else: assert (len(self) == count) n_min = params.get('n_min', 0) n_max = params.get('n_max', Inf) assert (len(self) >= n_min and len(self) <= n_max) for param, value in params.items(): if param == 'connected': assert (not value or is_connected(self)) elif param == 'degree': assert (allclose(self.avg_degree(), value, atol=settings.DEG_ATOL)) elif param == 'environment': assert (self.environment.__class__ == value.__class__) elif param == 'channelType': assert (self.channelType.__class__ == value.__class__) elif param == 'comm_range': for node in self: assert (node.commRange == value) elif param == 'sensors': compositeSensor = CompositeSensor(Node(), value) for node in self: assert (all( map(lambda s1, s2: pymote_equal_objects(s1, s2), node.sensors, compositeSensor.sensors))) elif param == 'aoa_pf_scale': for node in self: for sensor in node.sensors: if sensor.name() == 'AoASensor': assert (sensor.probabilityFunction.scale == value) elif param == 'dist_pf_scale': for node in self: for sensor in node.sensors: if sensor.name() == 'DistSensor': assert (sensor.probabilityFunction.scale == value) #TODO: refactor this part as setting algorithms resets nodes """
def estimate_position(self, node): TRESHOLD = .1 MAX_ITER = 10 landmarks = [] # get landmarks with hopsize data if self.dataKey in node.memory: landmarks = node.memory[self.dataKey].keys() # calculate estimated distances if len(landmarks) >= 3: landmark_distances = [] landmark_positions = [] try : landmark_distances = [node.memory[self.dataKey][lm][2] * node.memory[self.hopsizeKey] for lm in landmarks] landmark_positions = [array(node.memory[self.dataKey][lm][:2]) for lm in landmarks] except: pass # take centroid as initial estimation pos = average(landmark_positions, axis=0) W = diag(ones(len(landmarks))) counter = 0 dist = lambda x, y: sqrt(dot(x - y, x - y)) while pos.any() != nan: J = array([(lp - pos) / dist(lp, pos) for lp in landmark_positions]) range_correction = array([dist(landmark_positions[li], pos) - landmark_distances[li] for li, lm in enumerate(landmarks)]) pos_correction = dot(linalg.inv(dot(dot(J.T, W), J)), dot(dot(J.T, W), range_correction)) pos = pos + pos_correction logger.debug("Est. %s, %s, %s" %(node.id, pos, pos_correction)) counter += 1 if sqrt(sum(pos_correction ** 2)) < \ TRESHOLD or counter >= MAX_ITER: logger.info("Trilaterate break %s" % counter) break if counter <= MAX_ITER: node.memory[self.positionKey] = pos node.memory['reliable'] = landmark_positions
def validate_params(self, params): """ Validate if given network params match its real params. """ logger.info('Validating params') count = params.get('count', None) # for unit tests if count: if isinstance(count, list): assert(len(self) in count) else: assert(len(self)==count) n_min = params.get('n_min', 0) n_max = params.get('n_max', Inf) assert(len(self)>=n_min and len(self)<=n_max) for param, value in params.items(): if param=='connected': assert(not value or is_connected(self)) elif param=='degree': assert(allclose(self.avg_degree(), value, atol=settings.DEG_ATOL)) elif param=='environment': assert(self.environment.__class__==value.__class__) elif param=='channelType': assert(self.channelType.__class__==value.__class__) elif param=='comm_range': for node in self: assert(node.commRange==value) elif param=='sensors': compositeSensor = CompositeSensor(Node(), value) for node in self: assert(all(map(lambda s1, s2: pymote_equal_objects(s1, s2), node.sensors, compositeSensor.sensors))) elif param=='aoa_pf_scale': for node in self: for sensor in node.sensors: if sensor.name()=='AoASensor': assert(sensor.probabilityFunction.scale==value) elif param=='dist_pf_scale': for node in self: for sensor in node.sensors: if sensor.name()=='DistSensor': assert(sensor.probabilityFunction.scale==value) #TODO: refactor this part as setting algorithms resets nodes """
def validate_params(self, params): """ Validate if given network params match its real params. """ logger.info("Validating params") count = params.get("count", None) # for unit tests if count: if isinstance(count, list): assert len(self) in count else: assert len(self) == count n_min = params.get("n_min", 0) n_max = params.get("n_max", Inf) assert len(self) >= n_min and len(self) <= n_max for param, value in params.items(): if param == "connected": assert not value or is_connected(self) elif param == "degree": assert allclose(self.avg_degree(), value, atol=settings.DEG_ATOL) elif param == "environment": assert self.environment.__class__ == value.__class__ elif param == "channelType": assert self.channelType.__class__ == value.__class__ elif param == "comm_range": for node in self: assert node.commRange == value elif param == "sensors": compositeSensor = CompositeSensor(Node(), value) for node in self: assert all(map(lambda s1, s2: pymote_equal_objects(s1, s2), node.sensors, compositeSensor.sensors)) elif param == "aoa_pf_scale": for node in self: for sensor in node.sensors: if sensor.name() == "AoASensor": assert sensor.probabilityFunction.scale == value elif param == "dist_pf_scale": for node in self: for sensor in node.sensors: if sensor.name() == "DistSensor": assert sensor.probabilityFunction.scale == value # TODO: refactor this part as setting algorithms resets nodes """
def __init__(self, settings_module=None): # update this dict from global settings, but only for ALL_CAPS settings for setting in dir(global_settings): if setting == setting.upper(): logger.info('Setting %s on global value: %s' % \ (setting, str(getattr(global_settings, setting)))) setattr(self, setting, getattr(global_settings, setting)) # store the settings module in case someone later cares self.SETTINGS_MODULE = settings_module if (self.SETTINGS_MODULE): try: mod = import_module(self.SETTINGS_MODULE) except ImportError, e: raise ImportError("Could not import settings '%s' (Is it on " "sys.path? Does it have syntax errors?): %s"\ % (self.SETTINGS_MODULE, e)) for setting in dir(mod): if setting == setting.upper(): logger.info('Override %s on value in module: %s' % (setting, str(getattr(mod, setting)))) setattr(self, setting, getattr(mod, setting))
def reset_all_nodes(self): for node in self.nodes(): node.reset() logger.info('Resetting all nodes.')
def reset(self): logger.info('Resetting network.') self.algorithmState = {'index': 0, 'step': 1, 'finished': False} self.reset_all_nodes()
def reset(self): logger.info("Resetting network.") self.algorithmState = {"index": 0, "step": 1, "finished": False} self.reset_all_nodes()
def write_pickle(obj, path, makedir=True): """Write object in Python pickle format.""" # TODO: use normal pickling by implementing pickling protocol for Network # class http://docs.python.org/library/pickle.html#the-pickle-protocol # TODO: find out origin of maximum recursion depth problem, hack solution: sys.setrecursionlimit(6000) try: os.makedirs(os.path.split(path)[0]) except OSError, e: if e.errno != errno.EEXIST and e.filename != '': raise fh = _get_fh(str(path), mode='wb') pickle.dump(obj, fh, pickle.HIGHEST_PROTOCOL) fh.close() logger.info('instance of %s saved in %s' % (str(obj.__class__), path)) write_npickle = write_pickle def read_pickle(path, not_found_raises=True): """ Read object in Python pickle format. If not_found_raises is True then raise an exception if file is missing. """ try: fh = _get_fh(str(path), 'rb') obj = pickle.load(fh) logger.info('instance of %s loaded: %s' % (str(obj.__class__), path)) return obj
def wait(self, node, message): if message.header == self.TRA: print("------------------------------") logger.info("Message to sink received!") print("------------------------------")
def write_pickle(obj, path, makedir=True): """Write object in Python pickle format.""" # TODO: use normal pickling by implementing pickling protocol for Network # class http://docs.python.org/library/pickle.html#the-pickle-protocol # TODO: find out origin of maximum recursion depth problem, hack solution: sys.setrecursionlimit(6000) try: os.makedirs(os.path.split(path)[0]) except OSError, e: if e.errno!=errno.EEXIST and e.filename!='': raise fh = _get_fh(str(path), mode='wb') pickle.dump(obj, fh, pickle.HIGHEST_PROTOCOL) fh.close() logger.info('instance of %s saved in %s' % (str(obj.__class__), path)) write_npickle = write_pickle def read_pickle(path, not_found_raises=True): """ Read object in Python pickle format. If not_found_raises is True then raise an exception if file is missing. """ try: fh = _get_fh(str(path), 'rb') obj = pickle.load(fh) logger.info('instance of %s loaded: %s' % (str(obj.__class__), path)) return obj except IOError, e: