def __new__(cls): """Create new instance.""" self = super().__new__(cls) LOG.debug('Starting publisher') self.pub = NoisyPublisher('l2processor') self.pub.start() return self
def __init__(self, config): # Configuration dictionary for ForestFire class self.config = config # Unprojected data with all the required channels, angles and # coordinates self.data = None # Channel metadata self.metadata = None # Common mask for all the datasets in self.data # All invalid pixels are set as True. After processing the locations # marked as False are the valid forest fires. self.mask = None # Built-in cloud mask self.cloud_mask = None # NWC SAF PPS cloud mask self.nwc_mask = None # Result of fire mapping self.fires = {} # Publisher, if configured if "publisher" in self.config and NoisyPublisher: self._pub = NoisyPublisher("satfire", **self.config["publisher"]) self.pub = self._pub.start() else: self._pub = None self.pub = None
def __init__(self, topic, instrument, config_item, posttroll_port=0, filepattern=None, aliases=None, tbus_orbit=False, history=0, granule_length=0, custom_vars=None, nameservers=[], watchManager=None): super(EventHandler, self).__init__() self._pub = NoisyPublisher("trollstalker_" + config_item, posttroll_port, topic, nameservers=nameservers) self.pub = self._pub.start() self.topic = topic self.info = OrderedDict() if filepattern is None: filepattern = '{filename}' self.file_parser = Parser(filepattern) self.instrument = instrument self.aliases = aliases self.custom_vars = custom_vars self.tbus_orbit = tbus_orbit self.granule_length = granule_length self._deque = deque([], history) self._watchManager = watchManager self._watched_dirs = dict()
def __init__(self, config): self.config = config.copy() if isinstance(config["filepattern"], (str, bytes)): self.config["filepattern"] = [self.config["filepattern"]] self.parsers = [ Parser(filepattern) for filepattern in self.config["filepattern"] ] self.aliases = parse_aliases(config) self.topic = self.config["topic"] self.tbus_orbit = self.config.get("tbus_orbit", False) logger.debug("Looking for: %s", str([parser.globify() for parser in self.parsers])) AbstractWatchDogProcessor.__init__( self, [parser.globify() for parser in self.parsers], config.get("watcher", "Observer")) self._pub = NoisyPublisher("trollstalker", int(self.config["posttroll_port"]), self.config["topic"]) self.pub = None obsolete_keys = [ "topic", "filepattern", "tbus_orbit", "posttroll_port", "watch", "config_item", "configuration_file" ] for key in list(self.config.keys()): if key.startswith("alias_") or key in obsolete_keys: del self.config[key]
def _setup_and_start_communication(self): """Set up the Posttroll communication and start the publisher.""" LOG.debug("Input topic: %s", self.input_topic) self.listener = ListenerContainer(topics=[self.input_topic]) self.publisher = NoisyPublisher("end_user_notifier") self.publisher.start() self.loop = True signal.signal(signal.SIGTERM, self.signal_shutdown)
def __setstate__(self, kwargs): """Set things running even when loading from YAML.""" LOG.debug('Starting publisher') self.port = kwargs.get('port', 0) self.nameservers = kwargs.get('nameservers', None) self.pub = NoisyPublisher('l2processor', port=self.port, nameservers=self.nameservers) self.pub.start()
def send_message(topic, info, message_type): '''Send message with the given topic and info''' pub_ = NoisyPublisher("dummy_sender", 0, topic) pub = pub_.start() time.sleep(2) msg = Message(topic, message_type, info) print "Sending message: %s" % str(msg) pub.send(str(msg)) pub_.stop()
def create_publisher(cfgfile): cfg = ConfigParser() cfg.read(cfgfile) try: publisher = cfg.get("local_reception", "publisher") except NoOptionError: return None if publisher: from posttroll.publisher import NoisyPublisher publisher = NoisyPublisher(publisher, 0) publisher.start() return publisher
def _setup_and_start_communication(self): """Set up the Posttroll communication and start the publisher.""" logger.debug("Starting up... Input topic: %s", self.input_topic) now = datetime_from_utc_to_local(datetime.now(), self.timezone) logger.debug("Output times for timezone: {zone} Now = {time}".format( zone=str(self.timezone), time=now)) self.listener = ListenerContainer(topics=[self.input_topic]) self.publisher = NoisyPublisher("active_fires_postprocessing") self.publisher.start() self.loop = True signal.signal(signal.SIGTERM, self.signal_shutdown)
def __setstate__(self, kwargs): """Set things running even when loading from YAML.""" LOG.debug('Starting publisher') self.port = kwargs.get('port', 0) self.nameservers = kwargs.get('nameservers', "") if self.nameservers is None: self.pub = Publisher("tcp://*:" + str(self.port), "l2processor") else: self.pub = NoisyPublisher('l2processor', port=self.port, nameservers=self.nameservers) self.pub.start()
def __init__(self, pub, interval=30, **kwargs): Thread.__init__(self) self._loop = True self._event = Event() self._to_send = kwargs self._interval = interval if pub is not None: self._pub = pub self._stop_pub = False else: self._pub = NoisyPublisher("Heart", 0) self._pub.start() self._stop_pub = True
def setup_publisher(self): """Initialize publisher.""" if self._np is None: try: nameservers = self._config["nameservers"] if nameservers: nameservers = nameservers.split() self._np = NoisyPublisher("move_it_" + self._name, port=self._config["publish_port"], nameservers=nameservers) self.publisher = self._np.start() except (KeyError, NameError): pass
def __init__(self, config_file, publish_port=None, publish_nameservers=None): """Initialize dispatcher class.""" super().__init__() self.config = None self.topics = None self.listener = None self.publisher = None if publish_port is not None: self.publisher = NoisyPublisher("dispatcher", port=publish_port, nameservers=publish_nameservers) self.publisher.start() self.loop = True self.config_handler = DispatchConfig(config_file, self.update_config) signal.signal(signal.SIGTERM, self.signal_shutdown)
def __init__(self, topic, instrument, posttroll_port=0, filepattern=None, aliases=None, tbus_orbit=False): super(EventHandler, self).__init__() self._pub = NoisyPublisher("trollstalker", posttroll_port, topic) self.pub = self._pub.start() self.topic = topic self.info = {} if filepattern is None: filepattern = '{filename}' self.file_parser = Parser(filepattern) self.instrument = instrument self.aliases = aliases self.tbus_orbit = tbus_orbit
def __init__(self, config): self.config = config self.slots = {} # Structure of self.slots is: # slots = {datetime(): {composite: {"img": None, # "num": 0}, # "timeout": None}} self._parse_settings() self._listener = ListenerContainer(topics=config["topics"]) self._set_message_settings() self._publisher = \ NoisyPublisher("WorldCompositePublisher", port=self.port, aliases=self.aliases, broadcast_interval=self.broadcast_interval, nameservers=self.nameservers) self._publisher.start() self._loop = False if isinstance(config["area_def"], str): self.adef = get_area_def(config["area_def"]) else: self.adef = config["area_def"]
def __init__(self, name, config): """Init a chain object.""" super(Chain, self).__init__() self._config = config self._name = name self.publisher = None self.listeners = {} self.listener_died_event = Event() self.running = True # Setup publisher try: nameservers = self._config["nameservers"] if nameservers: nameservers = nameservers.split() self.publisher = NoisyPublisher( "move_it_" + self._name, port=self._config["publish_port"], nameservers=nameservers) self.publisher.start() except (KeyError, NameError): pass
def test_listener_container(self): """Test listener container""" from posttroll.message import Message from posttroll.publisher import NoisyPublisher from posttroll.listener import ListenerContainer pub = NoisyPublisher("test") pub.start() sub = ListenerContainer(topics=["/counter"]) time.sleep(2) for counter in range(5): tested = False msg_out = Message("/counter", "info", str(counter)) pub.send(str(msg_out)) msg_in = sub.output_queue.get(True, 1) if msg_in is not None: self.assertEqual(str(msg_in), str(msg_out)) tested = True self.assertTrue(tested) pub.stop() sub.stop()
def __init__(self, cmd_args): """Initialize client.""" super(MoveItClient, self).__init__(cmd_args, "client") self._np = NoisyPublisher("move_it_client") self.sync_publisher = self._np.start() self.setup_watchers(cmd_args)
def reload_config(filename, disable_backlog=False): """Rebuild chains if needed (if the configuration changed) from *filename*. """ LOGGER.debug("New config file detected! %s", filename) new_chains = read_config(filename) old_glob = [] config_changed = False for key, val in new_chains.items(): identical = True if key in chains: for key2, val2 in new_chains[key].items(): if ((key2 not in ["notifier", "publisher"]) and ((key2 not in chains[key]) or (chains[key][key2] != val2))): identical = False config_changed = True break if identical: continue chains[key]["notifier"].stop() if "publisher" in chains[key]: chains[key]["publisher"].stop() chains[key] = val try: chains[key]["publisher"] = NoisyPublisher("move_it_" + key, val["publish_port"]) except (KeyError, NameError): pass chains[key]["notifier"] = create_notifier(val) # create logger too! if "publisher" in chains[key]: pub = chains[key]["publisher"].start() chains[key]["notifier"].start() old_glob.append(globify(val["origin"])) if "publisher" in chains[key]: def copy_hook(pathname, dest, val=val, pub=pub): fname = os.path.basename(pathname) destination = urlunparse((dest.scheme, dest.hostname, os.path.join(dest.path, fname), dest.params, dest.query, dest.fragment)) info = val.get("info", "") if info: info = dict( (elt.strip().split('=') for elt in info.split(";"))) for infokey, infoval in info.items(): if "," in infoval: info[infokey] = infoval.split(",") else: info = {} try: info.update( parse(os.path.basename(val["origin"]), os.path.basename(pathname))) except ValueError: info.update( parse( os.path.basename( os.path.splitext(val["origin"])[0]), os.path.basename(pathname))) info['uri'] = destination info['uid'] = fname msg = Message(val["topic"], 'file', info) pub.send(str(msg)) LOGGER.debug("Message sent: %s", str(msg)) chains[key]["copy_hook"] = copy_hook def delete_hook(pathname, val=val, pub=pub): fname = os.path.basename(pathname) info = val.get("info", "") if info: info = dict( (elt.strip().split('=') for elt in info.split(";"))) info['uri'] = pathname info['uid'] = fname msg = Message(val["topic"], 'del', info) pub.send(str(msg)) LOGGER.debug("Message sent: %s", str(msg)) chains[key]["delete_hook"] = delete_hook if not identical: LOGGER.debug("Updated %s", key) else: LOGGER.debug("Added %s", key) for key in (set(chains.keys()) - set(new_chains.keys())): chains[key]["notifier"].stop() del chains[key] LOGGER.debug("Removed %s", key) if config_changed: LOGGER.debug("Reloaded config from %s", filename) else: LOGGER.debug("No changes to reload in %s", filename) if old_glob and not disable_backlog: fnames = [] for pattern in old_glob: fnames += glob.glob(pattern) if fnames: time.sleep(3) LOGGER.debug("Touching old files") for fname in fnames: if os.path.exists(fname): fp_ = open(fname, "ab") fp_.close() old_glob = [] LOGGER.info("Old files transferred")
def __new__(cls): self = super().__new__(cls) LOG.debug('Starting publisher') self.pub = NoisyPublisher('l2processor') self.pub.start() return self
def reload_config(filename, chains, callback=request_push, pub_instance=None): """Rebuild chains if needed (if the configuration changed) from *filename*. """ LOGGER.debug("New config file detected! " + filename) new_chains = read_config(filename) # setup new chains for key, val in new_chains.items(): identical = True if key in chains: for key2, val2 in new_chains[key].items(): if ((key2 not in ["listeners", "publisher"]) and ((key2 not in chains[key]) or (chains[key][key2] != val2))): identical = False break if identical: continue if "publisher" in chains[key]: chains[key]["publisher"].stop() for provider in chains[key]["providers"]: chains[key]["listeners"][provider].stop() del chains[key]["listeners"][provider] chains[key] = val try: chains[key]["publisher"] = NoisyPublisher("move_it_" + key, val["publish_port"]) except (KeyError, NameError): pass chains[key].setdefault("listeners", {}) try: topics = [] if "topic" in val: topics.append(val["topic"]) if val.get("heartbeat", False): topics.append(HEARTBEAT_TOPIC) for provider in chains[key]["providers"]: chains[key]["listeners"][provider] = Listener( provider, topics, callback, pub_instance=pub_instance, **chains[key]) chains[key]["listeners"][provider].start() except Exception as err: LOGGER.exception(str(err)) raise # create logger too! if "publisher" in chains[key]: chains[key]["publisher"].start() if not identical: LOGGER.debug("Updated " + key) else: LOGGER.debug("Added " + key) # disable old chains for key in (set(chains.keys()) - set(new_chains.keys())): for provider, listener in chains[key]["providers"].iteritems(): listener.stop() del chains[key]["providers"][provider] if "publisher" in chains[key]: chains[key]["publisher"].stop() del chains[key] LOGGER.debug("Removed " + key) LOGGER.debug("Reloaded config from " + filename)
def _create_publisher(self): if self._publish_port is not None: self.publisher = NoisyPublisher("dispatcher", port=self._publish_port, nameservers=self._publish_nameservers) self.publisher.start()
def __init__(self, cmd_args): super(MoveItClient, self).__init__(cmd_args, "client") self._np = NoisyPublisher("move_it_client") self.pub = self._np.start() self.setup_watchers(cmd_args)
"midnight", backupCount=7) else: fh = logging.StreamHandler() formatter = logging.Formatter(log_format) fh.setFormatter(formatter) LOGGER.addHandler(fh) LOGGER = logging.getLogger('move_it_client') pyinotify.log.handlers = [fh] LOGGER.info("Starting up.") NP = NoisyPublisher("move_it_client") PUB = NP.start() mask = (pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO | pyinotify.IN_CREATE) watchman = pyinotify.WatchManager() def reload_cfg_file(filename, *args, **kwargs): reload_config(filename, chains, *args, pub_instance=PUB, **kwargs) notifier = pyinotify.ThreadedNotifier( watchman, EventHandler(reload_cfg_file, cmd_filename=cmd_args.config_file)) watchman.add_watch(os.path.dirname(cmd_args.config_file), mask) def chains_stop(*args):
def __init__(self, name, port=0): logging.Handler.__init__(self) self._publisher = NoisyPublisher(name, port) self._publisher.start()
def reload_config(filename, chains, callback=request_push, pub_instance=None): """Rebuild chains if needed (if the configuration changed) from *filename*.""" LOGGER.debug("New config file detected: %s", filename) new_chains = read_config(filename) # setup new chains for key, val in new_chains.items(): identical = True if key in chains: for key2, val2 in new_chains[key].items(): if ((key2 not in ["listeners", "publisher"]) and ((key2 not in chains[key]) or (chains[key][key2] != val2))): identical = False break if identical: continue if "publisher" in chains[key]: chains[key]["publisher"].stop() for provider in chains[key]["providers"]: chains[key]["listeners"][provider].stop() del chains[key]["listeners"][provider] chains[key] = val try: nameservers = val["nameservers"] if nameservers: nameservers = nameservers.split() chains[key]["publisher"] = NoisyPublisher("move_it_" + key, port=val["publish_port"], nameservers=nameservers) except (KeyError, NameError): pass chains[key].setdefault("listeners", {}) try: topics = [] if "topic" in val: topics.append(val["topic"]) if val.get("heartbeat", False): topics.append(SERVER_HEARTBEAT_TOPIC) for provider in chains[key]["providers"]: if '/' in provider.split(':')[-1]: parts = urlparse(provider) if parts.scheme != '': provider = urlunparse( (parts.scheme, parts.netloc, '', '', '', '')) else: # If there's no scheme, urlparse thinks the # URI is a local file provider = urlunparse( ('tcp', parts.path, '', '', '', '')) topics.append(parts.path) LOGGER.debug("Add listener for %s with topic %s", provider, str(topics)) chains[key]["listeners"][provider] = Listener( provider, topics, callback, pub_instance=pub_instance, **chains[key]) chains[key]["listeners"][provider].start() except Exception as err: LOGGER.exception(str(err)) raise # create logger too! if "publisher" in chains[key]: chains[key]["publisher"].start() if not identical: LOGGER.debug("Updated %s", key) else: LOGGER.debug("Added %s", key) # disable old chains for key in (set(chains.keys()) - set(new_chains.keys())): for provider in chains[key]["providers"]: listener = chains[key]["listeners"][provider] listener.stop() del chains[key]["listeners"][provider] if "publisher" in chains[key]: chains[key]["publisher"].stop() del chains[key] LOGGER.debug("Removed %s", key) LOGGER.debug("Reloaded config from %s", filename)