示例#1
0
def start_daemon_process():
    """Starts the daemon."""
    config = configparser.ConfigParser()
    config.read(GSHELL_CONFIG)
    if not os.path.exists(config[MAIN_SECTION][utils.Section.CREDENTIALS]):
        raise ValueError('Credentials json file not found. Make sure you have '
                         'the correct path in the gshell.config file')
    if utils.needs_setup(STORAGE_PATH, FILE_ID_PATH, TOKEN_PATH):
        utils.create_lib_dir(STORAGE_PATH)
        # For some weird reason having arguments kills the setup method.
        # More precisely on the _get_http_client function.
        sys.argv = []
        fid = setup(config)
        url = 'https://docs.google.com/document/d/{}'.format(fid)
        print('Document created successfully')
        print('You can access your document here:', url)

    pid_path = config[MAIN_SECTION][utils.Section.PID_PATH]
    sleep_time = int(config[MAIN_SECTION][utils.Section.SLEEP_TIME])
    with daemon.DaemonContext(
            working_directory='./',
            umask=0o002,
            files_preserve=[handler.stream],
            stderr=handler.stream,
            pidfile=pidfile.PIDLockFile(pid_path),
    ):
        logger.info('Starting daemon')
        while True:
            logger.info('Running gdoc process.')
            _run()
            time.sleep(sleep_time)
示例#2
0
文件: cbot.py 项目: a-yarohovich/bot
def main(argv):
    app = Application(argv)
    file_pid_lock = __file__ + ".lock"
    LOG.debug("{}".format(file_pid_lock))
    try:

        def run():
            if app.run():
                gloop.global_ev_loop.run_forever()

        if not app.initialize():
            sys.exit(5)
        if app.is_daemon():
            LOG.info(
                "Daemonize this application. Lock file: {} Pwd: {}".format(
                    file_pid_lock, os.getcwd()))
            with daemon.DaemonContext(
                    working_directory=os.getcwd(),  # Get working dir
                    umask=0o002,
                    pidfile=pidfile.PIDLockFile(
                        file_pid_lock),  # Create a pid file
                    files_preserve=[
                        LOG.log_file_handler.stream,
                    ],  # Keep the logger file after fork
            ):
                run()
        else:
            run()
    except Exception as ex:
        LOG.error("Unknown exception has caught:{}".format(ex.args[-1]))
示例#3
0
 def start(self) -> None:
     """
     Starts the daemon.
     """
     if not self.__get_pid():
         with DaemonContext(pidfile=pidfile.PIDLockFile(self.__file_path),
                            working_directory=WADES_DIR_PATH):
             self.run()
示例#4
0
 def run(self):
     """
     Run gRPC server with new daemon process.
     """
     pid_lock_file = pidfile.PIDLockFile(self.pidfile_path)
     with daemon.DaemonContext(stdout=self.stdout_file,
                               stderr=self.stderr_file,
                               pidfile=pid_lock_file,
                               detach_process=True):
         self.serve()
示例#5
0
def stop(args):
    """Stop a running core server. """
    _setup_console_logging(args)

    if not os.path.exists(args.pid_file):
        logging.warning("Nothing to stop. Quitting.")
    else:
        lock = pidlockfile.PIDLockFile(args.pid_file)

        if lock.is_locked():
            pid = lock.read_pid()
            logging.info("Sending TERM signal to core process (pid=%s)" % pid)
            os.kill(pid, signal.SIGTERM)
        else:
            logging.warning("No core running but lock file found. Cleaning up.")
            os.unlink(args.pid_file)
示例#6
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('env',
                        help='environment (e.g. development)',
                        nargs='?',
                        default=None)
    args = parser.parse_args()

    env = args.env or os.getenv('aker_warehouse_service_env', 'development')

    if env not in ('development', 'test', 'staging', 'production'):
        raise ValueError("Unrecognised environment: %r" % env)

    config = Config('%s/config/%s.cfg' %
                    (os.path.dirname(os.path.realpath(__file__)), env))

    # See https://pagure.io/python-daemon/blob/master/f/daemon/daemon.py#_63 for docs
    with DaemonContext(working_directory=os.getcwd(),
                       stdout=open(config.process.logfile, 'a'),
                       stderr=open(config.process.errorlog, 'a'),
                       pidfile=pidfile.PIDLockFile(config.process.pidfile)):

        on_message_partial = partial(on_message, env=env, config=config)

        try:
            credentials = pika.PlainCredentials(config.message_queue.user,
                                                config.message_queue.password)
            parameters = pika.ConnectionParameters(
                config.message_queue.host, config.message_queue.port,
                config.message_queue.virtual_host, credentials)

            with closing(pika.BlockingConnection(parameters)) as connection:
                channel = connection.channel()
                channel.basic_consume(on_message_partial,
                                      config.message_queue.queue)
                try:
                    print('Listening on %s ...' % config.message_queue.queue)
                    channel.start_consuming()
                finally:
                    channel.stop_consuming()
        except Exception:
            print('Error connecting to RabbitMQ...')
示例#7
0
    signal.SIGTERM: shutdown,
    signal.SIGTSTP: shutdown,
    signal.SIGUSR1: shutdown,
}

if __name__ == '__main__':
    log = AppLogger(name='streamfixdaemon')

    log.info('[STARTUP]')
    log.info('Entering daemon context')
    with DaemonContext(
            signal_map=signal_map,
            detach_process=True,  # False for running front
            stdin=None,
            stdout=sys.stdout,  # None
            stderr=sys.stderr,  # None
            pidfile=pidfile.PIDLockFile('{}/{}'.format(os.getcwd(),
                                                       STREAMFIX_PID_FILE)),
            chroot_directory=None,  # Same
            working_directory='{}/'.format(os.getcwd())):

        stream = NifStream()
        time.sleep(1)
        log.info('RECOVERY start errors False')
        stream.recover(errors=False)
        time.sleep(1)
        log.info('RECOVERY start errors True')
        stream.recover(errors=True)
        log.info('[FINISHED] all done fixing')
        log.info('Exiting daemon context')
示例#8
0
    print "</service>"
    sys.exit()

if command == 'start':
    if not daemon_supported:
        print >> sys.stderr, "daemon mode not supported on this platform"
        sys.exit(-1)

    # run in the background
    d = os.path.dirname(out_file_path)
    if not os.path.exists(d):
        os.makedirs(d)

    with open(out_file_path, 'a+') as out:
        context = daemon.DaemonContext(
            pidfile=pidfile.PIDLockFile(pid_file_path),
            stdout=out,
            stderr=out,
        )
        with context:
            # this block is the main() for the forked daemon process
            child = None
            cmd = java_cmd % {'java': java, 'root_logger': 'INFO'}

            # notify the child when we're killed
            def handler(signum, frame):
                if child:
                    child.send_signal(signum)
                sys.exit(0)

            signal.signal(signal.SIGTERM, handler)
示例#9
0
文件: kibun.py 项目: Hanaasagi/kibun
nlp = BosonNLP(bosonnlp_token)
sentiment = partial(nlp.sentiment, model='weibo')

auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)


def kibun_job():
    today = datetime.utcnow().date()
    recent_tweets = api.user_timeline()
    tweets = takewhile(lambda x: x.created_at.date() == today, recent_tweets)

    grouped_probability = zip(*map(lambda x: sentiment(x.text).pop(), tweets))
    result = map(lambda nums: sum(nums) / len(nums), grouped_probability)

    api.update_status(template.format(*(list(result) or (0, 0))))


if __name__ == '__main__':
    import os
    from daemon import DaemonContext, pidfile
    current_path = os.getcwd()
    pid_path = os.path.join(current_path, 'kibun.pid')
    with DaemonContext(working_directory=current_path,
                       pidfile=pidfile.PIDLockFile(pid_path)):
        schedule.every().day.at(exec_time).do(kibun_job)
        while True:
            schedule.run_pending()
            time.sleep(10)
示例#10
0
    signal.SIGTSTP: shutdown_workers,
    signal.SIGUSR1: reboot_workers,
}

if __name__ == '__main__':
    log = AppLogger(name='syncdaemon')

    log.info('[STARTUP]')
    log.info('** ENV: {} **'.format(NIF_REALM))
    log.info('Entering daemon context')
    with DaemonContext(signal_map=signal_map,
                       detach_process=True,  # False for running front
                       stdin=None,
                       stdout=None,  # sys.stdout,  # None
                       stderr=None,  # sys.stderr,  # None
                       pidfile=pidfile.PIDLockFile(
                           '{}/{}'.format(os.getcwd(), SYNCDAEMON_PID_FILE)),
                       chroot_directory=None,  # Same
                       working_directory='{}/'.format(os.getcwd())
                       ):

        pyro = PyroWrapper(workers_stop=workers_stop,
                           pyro_stop=pyro_stop,
                           workers_started=workers_started
                           )
        pyro.start()

        # Main loop
        while not pyro_stop.is_set():
            # Housecleaning?
            time.sleep(5)
示例#11
0
def main():

if __name__ == '__main__':

    bc = Client('','')
    bd = BinanceDB()
    exchinf = bc.get_exchange_info()
#   bd.UpdateCommonSchema(exchinf)
#   bd.UpdateSymbolSchema(exchinf)
    trades   = ['{symbol}@trade'.format(symbol=s['symbol'].lower()) for s in exchinf['symbols']]
    klines = [['{symbol}@kline_{interval}'.format(symbol=s['symbol'].lower(),interval=kl) for s in exchinf['symbols']] for kl in enum_kline_intervals]
    minitickers = '!miniTicker@arr'
    tickers = '!ticker@arr'
#   wss_data = [ trades, ] + klines + [ minitickers , tickers ]
    wss_data = klines
    
    bm = BinanceSocketManager(bc)
    # start any sockets here, i.e a trade socket
    conns = [ bm.start_multiplex_socket(d, bd.wssSaveMsg) for d in wss_data ]
    # then start the socket manager
    bm.start()

if __name__ == '__main__':
    
    ARGS = parse_args()

    CONFIG = load_config(ARGS.config)
    if CONFIG is None:
        exit(2)

    if getattr(logging, ARGS.log_level.upper()) < getattr(logging, CONFIG['DAEMON_OPTIONS']['log_level'].upper()):
        CONFIG['DAEMON_OPTIONS']['log_level'] = ARGS.log_level

    pw = get_user_info(CONFIG['DAEMON_OPTIONS']['user'])
    if pw is not None:
        CONFIG['DAEMON_OPTIONS']['user_id'] = pw.pw_uid
        CONFIG['DAEMON_OPTIONS']['group_id'] = pw.pw_gid
    else:
        exit(2)

    if not make_directory(CONFIG['DAEMON_OPTIONS']['pid_file'],CONFIG['DAEMON_OPTIONS']['user_id'],CONFIG['DAEMON_OPTIONS']['group_id']):
        exit(2)

    with DaemonContext(uid=CONFIG['DAEMON_OPTIONS']['user_id'],
                       gid=CONFIG['DAEMON_OPTIONS']['group_id'],
                       pidfile=pidfile.PIDLockFile(CONFIG['DAEMON_OPTIONS']['pid_file']),
                       signal_map= {
                           signal.SIGTERM: signal_handler
                       }):

        if not make_directory(CONFIG['DAEMON_OPTIONS']['log_file'],CONFIG['DAEMON_OPTIONS']['user_id'],CONFIG['DAEMON_OPTIONS']['group_id']):
            exit(2)
    
        LOG = init_daemon_log(DAEMON_INFO['prog'],CONFIG['DAEMON_OPTIONS']['log_file'],CONFIG['DAEMON_OPTIONS']['log_level'])
        if LOG is None:
            exit(2)
    
        LOG.info('Binance Exchange data collector service started')
        LOG.info('PID file {fname} created successfully.'.format(fname=CONFIG['DAEMON_OPTIONS']['pid_file']))
        LOG.info('Current process id {pid}.'.format(pid=str(os.getpid())))
        LOG.info('Log file {fname} created successfully.'.format(fname=CONFIG['DAEMON_OPTIONS']['log_file']))
        LOG.info('Log level {l}'.format(l=CONFIG['DAEMON_OPTIONS']['log_level']))
    
        DO_MAIN()
        
        LOG.info('Main worker stoped.')
        try:
            os.remove(CONFIG['DAEMON_OPTIONS']['pid_file'])
        except:
            LOG.error('Cannot remove PID file {fname}'.format(fname=CONFIG['DAEMON_OPTIONS']['pid_file']))
    
        LOG.info('Exit')