def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('-d', '--daemon', dest='daemon', action='store_true', default=False, required=False) arg_parser.add_argument('-f', '--foreground', dest='foreground', action='store_true', default=False, required=False) arg_parser.add_argument('-p', '--pidfile', dest='pidfile', default='/var/run/sensors-asus-ec.pid', required=False) arg_parser.add_argument('-P', '--port', dest='port', default=2787, required=False) args = arg_parser.parse_args() if args.daemon: reader = Reader(daemonize=True) server = Server(args.port, reader) daemon = Daemonize(app='sensors-asus-ec', pid=args.pidfile, foreground=args.foreground, action=server.run) daemon.start() else: data = None try: s = socket.socket() s.connect(('127.0.0.1', args.port)) data = s.recv(1024).decode() data = json.loads(data) except: pass if not data: try: reader = Reader() data = reader.get() except: print("Unable to read data.") sys.exit(1) for k, v in data.items(): print('{:20}: {}'.format(k, v))
def start_reactor( appname, soft_file_limit, gc_thresholds, pid_file, daemonize, cpu_affinity, logger, ): """ Run the reactor in the main process Daemonizes if necessary, and then configures some resources, before starting the reactor Args: appname (str): application name which will be sent to syslog soft_file_limit (int): gc_thresholds: pid_file (str): name of pid file to write to if daemonize is True daemonize (bool): true to run the reactor in a background process cpu_affinity (int|None): cpu affinity mask logger (logging.Logger): logger instance to pass to Daemonize """ def run(): # make sure that we run the reactor with the sentinel log context, # otherwise other PreserveLoggingContext instances will get confused # and complain when they see the logcontext arbitrarily swapping # between the sentinel and `run` logcontexts. with PreserveLoggingContext(): logger.info("Running") if cpu_affinity is not None: if not affinity: quit_with_error( "Missing package 'affinity' required for cpu_affinity\n" "option\n\n" "Install by running:\n\n" " pip install affinity\n\n" ) logger.info("Setting CPU affinity to %s" % cpu_affinity) affinity.set_process_affinity_mask(0, cpu_affinity) change_resource_limit(soft_file_limit) if gc_thresholds: gc.set_threshold(*gc_thresholds) reactor.run() if daemonize: daemon = Daemonize( app=appname, pid=pid_file, action=run, auto_close_fds=False, verbose=True, logger=logger, ) daemon.start() else: run()
def main(): '''Wrapper main(), just enough to decide to daemonize or not''' global args descr = 'load ACARS logs into a mongodb instance' parser = argparse.ArgumentParser( description=descr, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-b', '--bind', dest='bind', type=str, metavar='IP', default='localhost', help='hostname or IP address to listen on') parser.add_argument('-p', '--port', dest='port', type=int, metavar='PORT', default=5555, help='port to listen on') parser.add_argument('-f', '--file', dest='file', type=str, metavar='FILE', default=None, help='Read file instead of doing network I/O') parser.add_argument('-m', '--mongodb', dest='db', metavar='MONGO', default=None, help='MongoDB server url') parser.add_argument('-v', '--verbose', dest='verbose', action='count', default=0, help='increase verbosity') parser.add_argument('-d', '--daemon', dest='daemon', action='store_true', default=False, help='detach from controlling terminal') args = parser.parse_args() if args.daemon: procname = 'skyshark_acars_loader' pidfile = '/tmp/{}.pid'.format(procname) daemon = Daemonize(app=procname, pid=pidfile, action=skyshark_acars_loader) daemon.start() else: skyshark_acars_loader()
def start(config_options): try: config = HomeServerConfig.load_config("Synapse synchrotron", config_options) except ConfigError as e: sys.stderr.write("\n" + e.message + "\n") sys.exit(1) assert config.worker_app == "synapse.app.synchrotron" setup_logging(config, use_worker_options=True) synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts database_engine = create_engine(config.database_config) ss = SynchrotronServer( config.server_name, db_config=config.database_config, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, application_service_handler=SynchrotronApplicationService(), ) ss.setup() ss.start_listening(config.worker_listeners) def run(): # make sure that we run the reactor with the sentinel log context, # otherwise other PreserveLoggingContext instances will get confused # and complain when they see the logcontext arbitrarily swapping # between the sentinel and `run` logcontexts. with PreserveLoggingContext(): logger.info("Running") change_resource_limit(config.soft_file_limit) if config.gc_thresholds: gc.set_threshold(*config.gc_thresholds) reactor.run() def start(): ss.get_datastore().start_profiling() ss.get_state_handler().start_caching() reactor.callWhenRunning(start) if config.worker_daemonize: daemon = Daemonize( app="synapse-synchrotron", pid=config.worker_pid_file, action=run, auto_close_fds=False, verbose=True, logger=logger, ) daemon.start() else: run()
def start_replica(self): """ The method starts a new replica process. Is compulsory to specify a source name when running this method. """ replica_pid = os.path.expanduser( '%s/%s.pid' % (self.config["pid_dir"], self.args.source)) if self.args.source == "*": print("You must specify a source name using the argument --source") else: self.pg_engine.connect_db() self.logger.info( "Checking if the replica for source %s is stopped " % (self.args.source)) replica_status = self.pg_engine.get_replica_status() if replica_status in ['syncing', 'running', 'initialising']: print( "The replica process is already started or is syncing. Aborting the command." ) elif replica_status == 'error': print("The replica process is in error state.") print( "You may need to check the replica status first. To enable it run the following command." ) print("chameleon.py enable_replica --config %s --source %s " % (self.args.config, self.args.source)) else: self.logger.info( "Cleaning not processed batches for source %s" % (self.args.source)) self.pg_engine.clean_not_processed_batches() self.pg_engine.disconnect_db() if self.args.debug: self.__run_replica() else: if self.config["log_dest"] == 'stdout': foreground = True else: foreground = False print("Starting the replica process for source %s" % (self.args.source)) keep_fds = [self.logger_fds] app_name = "%s_replica" % self.args.source replica_daemon = Daemonize(app=app_name, pid=replica_pid, action=self.__run_replica, foreground=foreground, keep_fds=keep_fds) try: replica_daemon.start() except: print( "The replica process is already started. Aborting the command." )
def main(): parser = optparse.OptionParser() aws_group = optparse.OptionGroup(parser, 'AWS Options') aws_group.add_option('--region', default='us-east-1', help='AWS region to manage Spot Instances in') parser.add_option_group(aws_group) parser.add_option('--verbose', action='store_true', default=False, help='Enable verbose logging') parser.add_option('--daemon', action='store_false', dest='foreground', default=True, help='Start awsspotd in the background') parser.add_option('--pidfile', default=PIDFILE, help='Location of PID file') polling_group = optparse.OptionGroup(parser, 'Polling Options') polling_group.add_option( '--polling-interval', '-p', type='int', default=SPOT_INSTANCE_POLLING_INTERVAL, metavar='<value>', help='Polling interval in seconds (default: %default)') parser.add_option_group(polling_group) options_, args_ = parser.parse_args() result_ = [ region for region in boto.ec2.regions() if region.name == options_.region ] if not result_: sys.stderr.write('Error: Invalid EC2 region [{0}] specified\n'.format( options_.region)) sys.exit(1) cls = AWSSpotdAppClass(options_, args_) daemon = Daemonize(app='awsspotd', pid=options_.pidfile, action=cls.run, verbose=options_.verbose, foreground=options_.foreground) daemon.start()
def start(config_options): try: config = HomeServerConfig.load_config("Synapse media repository", config_options) except ConfigError as e: sys.stderr.write("\n" + e.message + "\n") sys.exit(1) assert config.worker_app == "synapse.app.media_repository" setup_logging(config.worker_log_config, config.worker_log_file) events.USE_FROZEN_DICTS = config.use_frozen_dicts database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) ss = MediaRepositoryServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() ss.get_handlers() ss.start_listening(config.worker_listeners) def run(): with LoggingContext("run"): logger.info("Running") change_resource_limit(config.soft_file_limit) if config.gc_thresholds: gc.set_threshold(*config.gc_thresholds) reactor.run() def start(): ss.get_state_handler().start_caching() ss.get_datastore().start_profiling() ss.replicate() reactor.callWhenRunning(start) if config.worker_daemonize: daemon = Daemonize( app="synapse-media-repository", pid=config.worker_pid_file, action=run, auto_close_fds=False, verbose=True, logger=logger, ) daemon.start() else: run()
def watch(): """ Watches for figures. """ daemon = Daemonize(app='inkscape-figures', pid='/tmp/inkscape-figures.pid', action=watch_daemon) daemon.start() print("Watching figures.")
def start_daemon(processor, logger, file_descriptors): daemon = Daemonize(app=processor.SYSLOG_NAME, pid=processor.pid_path, action=processor.start, keep_fds=file_descriptors, logger=logger, foreground=True) daemon.start()
def setup_daemon(): """ Setup the daemon. Set the PID_FILE, action, logger config. """ return Daemonize(app='leexportpy', pid=PID_FILE_PATH, action=start_leexportpy_jobs, logger=logging.getLogger("leexportpy_daemon"))
def main(): import argparse import logging import os import setproctitle import traceback from daemonize import Daemonize from kolejka.common.settings import OBSERVER_SOCKET from kolejka.observer import KolejkaObserverServer parser = argparse.ArgumentParser(description='KOLEJKA observer') parser.add_argument("-s", "--socket", type=str, default=OBSERVER_SOCKET, help='listen on socket') parser.add_argument("-v", "--verbose", action="store_true", default=False, help='show more info') parser.add_argument("-d", "--debug", action="store_true", default=False, help='show debug info') parser.add_argument("--detach", action="store_true", default=False, help='run in background') parser.add_argument("--pid-file", type=str, default=OBSERVER_PID_FILE, help='pid file') args = parser.parse_args() level = logging.WARNING if args.verbose: level = logging.INFO if args.debug: level = logging.DEBUG logging.basicConfig(level=level) setproctitle.setproctitle('kolejka-observer') with KolejkaObserverServer(args.socket) as server: def action(): return server.serve_forever() if args.detach: daemon = Daemonize(app='kolejka-observer', pid=args.pid_file, action=action, verbose=(args.debug or args.verbose)) daemon.start() else: action()
def start(): logger.info('Starting TPyNode in Daemon Mode') pidfile = config['TPyNode'].get('pidfile') def runner(): run_tpynode(config, modules) daemon = Daemonize(app="tpynode", pid=pidfile, action=runner) daemon.start()
def main(): f = Monitor() if len(sys.argv) > 1 and sys.argv[1] == '-d': pid = '/tmp/watchman.pid' daemon = Daemonize(app='monitoring', pid=pid, action=f.run) daemon.start() else: f.run()
def create_daemon(config, server): pid_path = config["locald"]["pid_path"] daemon = Daemonize( app="locald", pid=pid_path, action=server.start, ) return daemon
def start(self): self.initLog() pidfile = self.prefix + ".pid" daemon = Daemonize(app=self.prefix, pid=pidfile, action=self.loopRun, auto_close_fds=False, chdir=".", foreground=False, logger=self.slogger) daemon.start()
def run(background=False): if background == True: pid = os.fork() if pid: p = basename(sys.argv[0]) myname, file_extension = os.path.splitext(p) pidfile = '/tmp/%s.pid' % myname daemon = Daemonize(app=myname, pid=pidfile, action=runAgent) daemon.start() else: runAgent()
def start(config_options): try: config = HomeServerConfig.load_config("Synapse synchrotron", config_options) except ConfigError as e: sys.stderr.write("\n" + e.message + "\n") sys.exit(1) assert config.worker_app == "synapse.app.synchrotron" setup_logging(config.worker_log_config, config.worker_log_file) database_engine = create_engine(config.database_config) ss = SynchrotronServer( config.server_name, db_config=config.database_config, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, application_service_handler=SynchrotronApplicationService(), ) ss.setup() ss.start_listening(config.worker_listeners) def run(): with LoggingContext("run"): logger.info("Running") change_resource_limit(config.soft_file_limit) if config.gc_thresholds: gc.set_threshold(*config.gc_thresholds) reactor.run() def start(): ss.get_datastore().start_profiling() ss.replicate() ss.get_state_handler().start_caching() reactor.callWhenRunning(start) if config.worker_daemonize: daemon = Daemonize( app="synapse-synchrotron", pid=config.worker_pid_file, action=run, auto_close_fds=False, verbose=True, logger=logger, ) daemon.start() else: run()
def __init__(self): """ Init all objects and init handlers """ self.services = [self.communication_handler] self.handlers = [ self.handle_datalink, self.handle_networklayer, self.handle_transportlayer, self.handle_dns ] self.daemonObj = Daemonize(app=config.APP_NAME, pid=config.PID_FILE, action=self.start_app_service)
def _start_service(port): child_pid = os.fork() if child_pid == 0: def start_server(): server = ServerProcess(port) server.start() server.join() daemon = Daemonize( app='ancypwn_terminal_server', pid=DAEMON_PID, action=start_server) daemon.start()
def lockdown_forward(lockdown, src_port, dst_port, daemonize): """ forward tcp port """ forwarder = TcpForwarder(lockdown, src_port, dst_port) if daemonize: with tempfile.NamedTemporaryFile('wt') as pid_file: daemon = Daemonize(app=f'forwarder {src_port}->{dst_port}', pid=pid_file.name, action=forwarder.start) daemon.start() else: forwarder.start()
def start(configfile): _load_config(configfile) os.environ["UPD89_DATADIR"] = _config.getDataDir() newpid = os.fork() if newpid == 0: import websrv websrv.start(_config) else: daemon = Daemonize(app="test_app", pid=_pid, action=main, keep_fds=_log.getKeepfds()) daemon.start()
def main(args: Optional[Sequence[str]] = None) -> int: """ main(args: Optional[Sequence[str]]) -> int Main entrypoint of the program. """ config_filename = DDBUM_CONFIG_FILENAME foreground = False pidfile = "/var/run/dynamodb-user-manager.pid" if args is None: args = argv[1:] try: opts, args = getopt(list(args), "hc:fp:", ["help", "config=", "foreground", "pidfile="]) for opt, val in opts: if opt in ("-h", "--help"): usage(stdout) return 0 if opt in ("-c", "--config"): config_filename = val if opt in ("-f", "--foreground"): foreground = True if opt in ("-p", "--pidfile"): pidfile = val except GetoptError as e: print(str(e), file=stderr) return 1 if args: print(f"Unknown argument {args[0]}", file=stderr) usage() return 1 # Ingest the config file. try: config = parse_config(config_filename) except IOError as e: print(f"{config_filename}: {e}", file=stderr) return 1 if not foreground: daemonize = Daemonize(app="dynamodb-user-manager", pid=pidfile, action=partial(run, config)) daemonize.start() return 0 return run(config)
def handle(self, verbosity, *args, **options): log_handler = None log_file = options.get('log_file') if log_file is not None: # Create a new log file every 30 days log_handler = logging.handlers.TimedRotatingFileHandler( log_file, when='D', interval=30) log_handler.suffix = '%Y%m%d' if verbosity >= 3: configure_logging(level=logging.DEBUG, handler=log_handler) configure_logging(level=logging.DEBUG, logger='ExchangeSession', handler=log_handler) elif verbosity >= 2 or log_handler is not None: configure_logging(handler=log_handler) pid_file = options.get('pid_file') if options['daemonize']: if not pid_file: self.stderr.write( self.style.ERROR( "--daemonize requires also the --pid-file argument")) exit(1) kwargs = {} if log_handler: kwargs['keep_fds'] = [log_handler.stream.fileno()] listener = None def run_listener(): atexit.register(stop_listener) listener = NotificationListener(sync_after_start=True) listener.start() def stop_listener(): logger.info("Stopping listener") listener.close() daemon = Daemonize(app='respa_exchange_listen', pid=pid_file, action=run_listener, logger=logger, **kwargs) daemon.start() else: pid_file = options.get('pid_file') if pid_file: pid = str(os.getpid()) with open(pid_file, 'w') as f: f.write(pid) with closing(NotificationListener()) as listener: listener.start()
def watch(daemon): """ Watches for figures. """ if daemon: daemon = Daemonize(app='inkscape-figures', pid='/tmp/inkscape-figures.pid', action=watch_daemon) daemon.start() log.info("Watching figures.") else: log.info("Watching figures.") watch_daemon()
def run(daemonize, logfile, pidfile, max_inactive_minutes, notebook_instance_name): setup_logger(dst=logfile) max_inactive_minutes = int(max_inactive_minutes) if daemonize: if logfile == '-': print('Logfile cannot be stdout if --daemonize is passed') exit(1) action = functools.partial(knockout_loop, max_inactive_minutes=max_inactive_minutes, notebook_instance_name=notebook_instance_name) daemon = Daemonize(app='sagemaker_knockout', pid=pidfile, action=action, auto_close_fds=False, logger=logging.getLogger('')) daemon.start() else: knockout_loop(max_inactive_minutes=max_inactive_minutes, notebook_instance_name=notebook_instance_name)
def main(argv=None): logger.setLevel(CONF.log_level) formatter = logging.Formatter(CONF.log_format) handler = logging.FileHandler(CONF.log_file) handler.setFormatter(formatter) logger.addHandler(handler) daemon = Daemonize(app="Downtimer", pid=CONF.pid_file, action=downtimer_starter, logger=logger, keep_fds=[handler.stream.fileno()]) daemon.start()
def start_server(self, debug=False): keep_fds = init_logging(self.log_dict_path) if debug: self.ignite_server() else: print(self.pid_path) log_daemon = Daemonize(app='HedgeLog', pid=self.pid_path, action=self.ignite_server, keep_fds=keep_fds, foreground=False) try: log_daemon.start() print("HedgeLog started...") except: print("HedgeLog is already started OR port %d is occupied // %s" % (self.port, self.pid_path))
def main(): '''Wrapper main(), just enough to decide to daemonize or not''' global args args = do_argparse() if args.cache: args.cache = realpath(args.cache) if args.daemon: procname='skyshark_adsb_loader' pidfile='/tmp/{}.pid'.format(procname) daemon = Daemonize(app=procname, pid=pidfile, action=skyshark_adsb_loader) daemon.start() else: skyshark_adsb_loader()
def main(): parser, opts = parseopt() if path.exists(opts.socket): print('+ socket file {} already exists'.format(opts.socket)) exit(1) if opts.daemonize: print('+ daemonizing - pid file {}'.format(opts.pidfile)) daemon = Daemonize(app='module_launcher', pid=opts.pidfile, action=lambda: _main(opts)) daemon.start() else: _main(opts)
def start_daemon(): logger.info("Prepare to daemonize program <%s>." % daemon_config.app_name) target = run_subprocess daemon = Daemonize(app=daemon_config.app_name, pid=daemon_config.pidfile, action=target, keep_fds=keep_fds) logger.info("Program <%s> will be started!" % daemon_config.app_daemon_name) daemon.start() # logger.info("Program <%s> has started successfully!" % daemon_config.app_daemon_name) logger.info("<%s> has done!\n" % daemon_config.app_daemon_name) # End of the daemon program