Example #1
0
def main(args=None):
    if args is None:
        args = sys.argv[1:]
    parsed_args = docopt(__doc__, args)
    if parsed_args['--debug']:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)
    logger.debug("Debugging enabled")
    logger.debug(parsed_args)
    if parsed_args['status']:
        pid = pidlockfile.read_pid_from_pidfile(PID_FILE)
        if pid:
            logger.debug("Pid is %s", pid)
            p = psutil.Process(pid)
            if not p.is_running():
                logger.warning("db-designer not running")
                return 0
            else:
                logger.info("db-designer is running")
                return 0
        else:
            logger.warning("db-designer not running")
            return 1
    if parsed_args['stop']:
        pid = pidlockfile.read_pid_from_pidfile(PID_FILE)
        if pid:
            logger.debug("Sending terminate to %s", pid)
            os.kill(pid, signal.SIGTERM)
        else:
            logger.warning("db-designer not running")
        return
    elif parsed_args['kill']:
        pid = pidlockfile.read_pid_from_pidfile(PID_FILE)
        logger.debug("Sending kill to %s", pid)
        os.kill(pid, signal.SIGKILL)
        return
    elif parsed_args['--foreground']:
        context = NullContext()
        with context:
            server_main(int(parsed_args['--port']))
    else:
        log_file = open(LOG_FILE, 'a')
        pid_file = TimeoutPIDLockFile(PID_FILE, 1)
        context = daemon.DaemonContext(pidfile=pid_file,
                                       stdout=log_file,
                                       stderr=log_file,
                                       files_preserve=range(10),
                                       signal_map={signal.SIGTSTP: None,
                                                   signal.SIGTTIN: None,
                                                   signal.SIGTTOU: None,
                                                   signal.SIGINT: terminate,
                                                   signal.SIGTERM: terminate,
                                                   signal.SIGHUP: terminate,
                                                   signal.SIGUSR1: terminate,
                                                   signal.SIGUSR2: terminate})
        with context:
            server_main(int(parsed_args['--port']))
 def test_returns_none_when_file_nonexist(self):
     """ Should return None when the PID file does not exist. """
     pidfile_path = self.scenario['path']
     self.scenario = self.scenarios['nonexist']
     pid = pidlockfile.read_pid_from_pidfile(pidfile_path)
     scaffold.mock_restore()
     self.failUnlessIs(None, pid)
 def test_returns_pid_given_valid_input(self):
     """ Should return the PID if valid input file content. """
     pidfile_path = self.scenario['path']
     expect_pid = self.scenario['pidfile_pid']
     valid_inputs = [
         template % vars()
         for template in [
             "%(expect_pid)d\n",
             "%(expect_pid)09d\n",
             "%(expect_pid)d",
             "  %(expect_pid)09d  \n",
             "%(expect_pid)d\t",
             "%(expect_pid)d\n\n",
             "%(expect_pid)d\nFOO\n",
             ]
         ]
     for input_text in valid_inputs:
         self.scenario['pidfile'] = FakeFileDescriptorStringIO(input_text)
         pid = pidlockfile.read_pid_from_pidfile(pidfile_path)
         self.failUnlessEqual(
             expect_pid, pid,
             msg=(
                 "Input file content %(input_text)r"
                 " should give PID result %(expect_pid)r"
                 " (instead got %(pid)r)"
                 % vars()))
 def test_reads_pid_from_file(self):
     """ Should read the PID from the specified file. """
     pidfile_path = self.scenario['path']
     expect_pid = self.scenario['pidfile_pid']
     pid = pidlockfile.read_pid_from_pidfile(pidfile_path)
     scaffold.mock_restore()
     self.failUnlessEqual(expect_pid, pid)
Example #5
0
 def stop(self):
     pid = pidlockfile.read_pid_from_pidfile(settings.PIDFILE)
     if pid is None:
         sys.stderr.write('Failed to read PID from pidfile "%(pidfile)s".\n' % {
             'pidfile': settings.PIDFILE,
         })
         sys.exit(1)
     elif not self._is_pid_running(pid):
         sys.stderr.write('PID %(pid)s in pidfile "%(pidfile)s" is not running.\n' % {
             'pid': pid,
             'pidfile': settings.PIDFILE,
         })
         sys.exit(7)
     else:
         try:
             os.kill(pid, signal.SIGTERM)
             sys.stdout.write('Stopping')
             while self._is_pid_running(pid):
                 sys.stdout.write('.')
                 sys.stdout.flush()
                 time.sleep(1)
             sys.stdout.write(' Ok!\n')
             pidlockfile.remove_existing_pidfile(settings.PIDFILE)
             sys.exit(0)
         except OSError as e:
             sys.stderr.write('Failed to terminate PID %(pid)s: %(message)s.\n' % {
                 'pid': pid,
                 'message': e,
             })
             sys.exit(1)
Example #6
0
 def status(self):
     pid = pidlockfile.read_pid_from_pidfile(settings.PIDFILE)
     if pid is None or not self._is_pid_running(pid):
         sys.stdout.write('Stopped.\n')
         sys.exit(3)
     else:
         sys.stdout.write('Running (PID %s).\n' % pid)
         sys.exit(0)
 def test_opens_specified_filename(self):
     """ Should attempt to open specified pidfile filename. """
     pidfile_path = self.scenario['path']
     expect_mock_output = """\
         Called __builtin__.open(%(pidfile_path)r, 'r')
         """ % vars()
     dummy = pidlockfile.read_pid_from_pidfile(pidfile_path)
     scaffold.mock_restore()
     self.failUnlessMockCheckerMatch(expect_mock_output)
def stop_worker(args):
    """Sends SIGTERM to Celery worker"""
    # Read PID from file
    pid_file_path, _, _, _ = setup_locations(process=WORKER_PROCESS_NAME)
    pid = read_pid_from_pidfile(pid_file_path)

    # Send SIGTERM
    if pid:
        worker_process = psutil.Process(pid)
        worker_process.terminate()

    # Remove pid file
    remove_existing_pidfile(pid_file_path)
 def test_returns_none_given_invalid_input(self):
     """ Should return None if invalid input file content. """
     pidfile_path = self.scenario['path']
     invalid_inputs = [
         "",
         "\n",
         "B0GUS\n",
         "0x42\n",
         "  1e17  \n",
         ]
     for input_text in invalid_inputs:
         self.scenario['pidfile'] = FakeFileDescriptorStringIO(input_text)
         pid = pidlockfile.read_pid_from_pidfile(pidfile_path)
         self.failUnlessEqual(
             None, pid,
             msg=(
                 "Input file content %(input_text)r"
                 " should return None (instead got %(pid)r)"
                 % vars()))
Example #10
0
def webserver(args):
    """Starts Airflow Webserver"""
    print(settings.HEADER)

    access_logfile = args.access_logfile or conf.get('webserver',
                                                     'access_logfile')
    error_logfile = args.error_logfile or conf.get('webserver',
                                                   'error_logfile')
    num_workers = args.workers or conf.get('webserver', 'workers')
    worker_timeout = args.worker_timeout or conf.get(
        'webserver', 'web_server_worker_timeout')
    ssl_cert = args.ssl_cert or conf.get('webserver', 'web_server_ssl_cert')
    ssl_key = args.ssl_key or conf.get('webserver', 'web_server_ssl_key')
    if not ssl_cert and ssl_key:
        raise AirflowException(
            'An SSL certificate must also be provided for use with ' + ssl_key)
    if ssl_cert and not ssl_key:
        raise AirflowException(
            'An SSL key must also be provided for use with ' + ssl_cert)

    if args.debug:
        print(
            f"Starting the web server on port {args.port} and host {args.hostname}."
        )
        app = create_app(testing=conf.getboolean('core', 'unit_test_mode'))
        app.run(
            debug=True,
            use_reloader=not app.config['TESTING'],
            port=args.port,
            host=args.hostname,
            ssl_context=(ssl_cert, ssl_key) if ssl_cert and ssl_key else None,
        )
    else:
        # This pre-warms the cache, and makes possible errors
        # get reported earlier (i.e. before demonization)
        os.environ['SKIP_DAGS_PARSING'] = 'True'
        app = cached_app(None)
        os.environ.pop('SKIP_DAGS_PARSING')

        pid_file, stdout, stderr, log_file = setup_locations(
            "webserver", args.pid, args.stdout, args.stderr, args.log_file)

        # Check if webserver is already running if not, remove old pidfile
        check_if_pidfile_process_is_running(pid_file=pid_file,
                                            process_name="webserver")

        print(
            textwrap.dedent('''\
                Running the Gunicorn Server with:
                Workers: {num_workers} {workerclass}
                Host: {hostname}:{port}
                Timeout: {worker_timeout}
                Logfiles: {access_logfile} {error_logfile}
                =================================================================\
            '''.format(
                num_workers=num_workers,
                workerclass=args.workerclass,
                hostname=args.hostname,
                port=args.port,
                worker_timeout=worker_timeout,
                access_logfile=access_logfile,
                error_logfile=error_logfile,
            )))

        run_args = [
            'gunicorn',
            '--workers',
            str(num_workers),
            '--worker-class',
            str(args.workerclass),
            '--timeout',
            str(worker_timeout),
            '--bind',
            args.hostname + ':' + str(args.port),
            '--name',
            'airflow-webserver',
            '--pid',
            pid_file,
            '--config',
            'python:airflow.www.gunicorn_config',
        ]

        if args.access_logfile:
            run_args += ['--access-logfile', str(args.access_logfile)]

        if args.error_logfile:
            run_args += ['--error-logfile', str(args.error_logfile)]

        if args.daemon:
            run_args += ['--daemon']

        if ssl_cert:
            run_args += ['--certfile', ssl_cert, '--keyfile', ssl_key]

        run_args += ["airflow.www.app:cached_app()"]

        gunicorn_master_proc = None

        def kill_proc(signum, _):  # pylint: disable=unused-argument
            log.info("Received signal: %s. Closing gunicorn.", signum)
            gunicorn_master_proc.terminate()
            with suppress(TimeoutError):
                gunicorn_master_proc.wait(timeout=30)
            if gunicorn_master_proc.poll() is not None:
                gunicorn_master_proc.kill()
            sys.exit(0)

        def monitor_gunicorn(gunicorn_master_pid: int):
            # Register signal handlers
            signal.signal(signal.SIGINT, kill_proc)
            signal.signal(signal.SIGTERM, kill_proc)

            # These run forever until SIG{INT, TERM, KILL, ...} signal is sent
            GunicornMonitor(
                gunicorn_master_pid=gunicorn_master_pid,
                num_workers_expected=num_workers,
                master_timeout=conf.getint('webserver',
                                           'web_server_master_timeout'),
                worker_refresh_interval=conf.getint('webserver',
                                                    'worker_refresh_interval',
                                                    fallback=30),
                worker_refresh_batch_size=conf.getint(
                    'webserver', 'worker_refresh_batch_size', fallback=1),
                reload_on_plugin_change=conf.getboolean(
                    'webserver', 'reload_on_plugin_change', fallback=False),
            ).start()

        if args.daemon:
            handle = setup_logging(log_file)

            base, ext = os.path.splitext(pid_file)
            with open(stdout, 'w+') as stdout, open(stderr, 'w+') as stderr:
                ctx = daemon.DaemonContext(
                    pidfile=TimeoutPIDLockFile(f"{base}-monitor{ext}", -1),
                    files_preserve=[handle],
                    stdout=stdout,
                    stderr=stderr,
                )
                with ctx:
                    subprocess.Popen(run_args, close_fds=True)

                    # Reading pid of gunicorn master as it will be different that
                    # the one of process spawned above.
                    while True:
                        sleep(0.1)
                        gunicorn_master_proc_pid = read_pid_from_pidfile(
                            pid_file)
                        if gunicorn_master_proc_pid:
                            break

                    # Run Gunicorn monitor
                    gunicorn_master_proc = psutil.Process(
                        gunicorn_master_proc_pid)
                    monitor_gunicorn(gunicorn_master_proc.pid)

        else:
            gunicorn_master_proc = subprocess.Popen(run_args, close_fds=True)
            monitor_gunicorn(gunicorn_master_proc.pid)
def webserver(args):
    """Starts Airflow Webserver"""
    print(settings.HEADER)

    # Check for old/insecure config, and fail safe (i.e. don't launch) if the config is wildly insecure.
    if conf.get('webserver', 'secret_key') == 'temporary_key':
        from rich import print as rich_print

        rich_print(
            "[red][bold]ERROR:[/bold] The `secret_key` setting under the webserver config has an insecure "
            "value - Airflow has failed safe and refuses to start. Please change this value to a new, "
            "per-environment, randomly generated string, for example using this command `[cyan]openssl rand "
            "-hex 30[/cyan]`",
            file=sys.stderr,
        )
        sys.exit(1)

    access_logfile = args.access_logfile or conf.get('webserver',
                                                     'access_logfile')
    error_logfile = args.error_logfile or conf.get('webserver',
                                                   'error_logfile')
    access_logformat = args.access_logformat or conf.get(
        'webserver', 'access_logformat')
    num_workers = args.workers or conf.get('webserver', 'workers')
    worker_timeout = args.worker_timeout or conf.get(
        'webserver', 'web_server_worker_timeout')
    ssl_cert = args.ssl_cert or conf.get('webserver', 'web_server_ssl_cert')
    ssl_key = args.ssl_key or conf.get('webserver', 'web_server_ssl_key')
    if not ssl_cert and ssl_key:
        raise AirflowException(
            'An SSL certificate must also be provided for use with ' + ssl_key)
    if ssl_cert and not ssl_key:
        raise AirflowException(
            'An SSL key must also be provided for use with ' + ssl_cert)

    if args.debug:
        print(
            f"Starting the web server on port {args.port} and host {args.hostname}."
        )
        app = create_app(testing=conf.getboolean('core', 'unit_test_mode'))
        app.run(
            debug=True,
            use_reloader=not app.config['TESTING'],
            port=args.port,
            host=args.hostname,
            ssl_context=(ssl_cert, ssl_key) if ssl_cert and ssl_key else None,
        )
    else:

        pid_file, stdout, stderr, log_file = setup_locations(
            "webserver", args.pid, args.stdout, args.stderr, args.log_file)

        # Check if webserver is already running if not, remove old pidfile
        check_if_pidfile_process_is_running(pid_file=pid_file,
                                            process_name="webserver")

        print(
            textwrap.dedent(f'''\
                Running the Gunicorn Server with:
                Workers: {num_workers} {args.workerclass}
                Host: {args.hostname}:{args.port}
                Timeout: {worker_timeout}
                Logfiles: {access_logfile} {error_logfile}
                Access Logformat: {access_logformat}
                ================================================================='''
                            ))

        run_args = [
            sys.executable,
            '-m',
            'gunicorn',
            '--workers',
            str(num_workers),
            '--worker-class',
            str(args.workerclass),
            '--timeout',
            str(worker_timeout),
            '--bind',
            args.hostname + ':' + str(args.port),
            '--name',
            'airflow-webserver',
            '--pid',
            pid_file,
            '--config',
            'python:airflow.www.gunicorn_config',
        ]

        if args.access_logfile:
            run_args += ['--access-logfile', str(args.access_logfile)]

        if args.error_logfile:
            run_args += ['--error-logfile', str(args.error_logfile)]

        if args.access_logformat and args.access_logformat.strip():
            run_args += ['--access-logformat', str(args.access_logformat)]

        if args.daemon:
            run_args += ['--daemon']

        if ssl_cert:
            run_args += ['--certfile', ssl_cert, '--keyfile', ssl_key]

        run_args += ["airflow.www.app:cached_app()"]

        gunicorn_master_proc = None

        def kill_proc(signum, _):
            log.info("Received signal: %s. Closing gunicorn.", signum)
            gunicorn_master_proc.terminate()
            with suppress(TimeoutError):
                gunicorn_master_proc.wait(timeout=30)
            if gunicorn_master_proc.poll() is not None:
                gunicorn_master_proc.kill()
            sys.exit(0)

        def monitor_gunicorn(gunicorn_master_pid: int):
            # Register signal handlers
            signal.signal(signal.SIGINT, kill_proc)
            signal.signal(signal.SIGTERM, kill_proc)

            # These run forever until SIG{INT, TERM, KILL, ...} signal is sent
            GunicornMonitor(
                gunicorn_master_pid=gunicorn_master_pid,
                num_workers_expected=num_workers,
                master_timeout=conf.getint('webserver',
                                           'web_server_master_timeout'),
                worker_refresh_interval=conf.getint('webserver',
                                                    'worker_refresh_interval',
                                                    fallback=30),
                worker_refresh_batch_size=conf.getint(
                    'webserver', 'worker_refresh_batch_size', fallback=1),
                reload_on_plugin_change=conf.getboolean(
                    'webserver', 'reload_on_plugin_change', fallback=False),
            ).start()

        if args.daemon:
            # This makes possible errors get reported before daemonization
            os.environ['SKIP_DAGS_PARSING'] = 'True'
            app = create_app(None)
            os.environ.pop('SKIP_DAGS_PARSING')

            handle = setup_logging(log_file)

            base, ext = os.path.splitext(pid_file)
            with open(stdout, 'w+') as stdout, open(stderr, 'w+') as stderr:
                ctx = daemon.DaemonContext(
                    pidfile=TimeoutPIDLockFile(f"{base}-monitor{ext}", -1),
                    files_preserve=[handle],
                    stdout=stdout,
                    stderr=stderr,
                )
                with ctx:
                    subprocess.Popen(run_args, close_fds=True)

                    # Reading pid of gunicorn master as it will be different that
                    # the one of process spawned above.
                    while True:
                        sleep(0.1)
                        gunicorn_master_proc_pid = read_pid_from_pidfile(
                            pid_file)
                        if gunicorn_master_proc_pid:
                            break

                    # Run Gunicorn monitor
                    gunicorn_master_proc = psutil.Process(
                        gunicorn_master_proc_pid)
                    monitor_gunicorn(gunicorn_master_proc.pid)

        else:
            with subprocess.Popen(run_args,
                                  close_fds=True) as gunicorn_master_proc:
                monitor_gunicorn(gunicorn_master_proc.pid)
Example #12
0
def stop_daemon(pidfile):
    pid = read_pid_from_pidfile(pidfile)
    os.kill(pid, signal.SIGKILL)
    os.unlink(pidfile)
Example #13
0
    def start(self, detach=True, debug=False):
        # Check if the service is already running.
        pid = pidlockfile.read_pid_from_pidfile(settings.PIDFILE)
        if pid is not None and self._is_pid_running(pid):
            sys.stderr.write('PID %(pid)s in pidfile "%(pidfile)s" is already running.\n' % {
                'pid': pid,
                'pidfile': settings.PIDFILE,
            })
            sys.exit(1)

        # Remove pidfile (may not exists).
        pidlockfile.remove_existing_pidfile(settings.PIDFILE)

        # Adjust daemon context.
        if not detach:
            self.detach_process = False
            self.stdout = sys.stdout
            self.stderr = sys.stderr

        # Enter daemon context.
        with self:
            # Initializations.
            pid = os.getpid()
            self._shutdown_event = multiprocessing.Event()
            self._logging_queue = multiprocessing.Queue(-1)
            self._init_logger(debug=debug, console=not detach)

            # Log.
            logging.getLogger('varnishsentry').info(
                'Starting varnishsentry service (PID %d)', pid)

            # Launch consumers.
            for id, config in settings.WORKERS.iteritems():
                worker = Consumer(
                    pid,
                    self._shutdown_event,
                    self._logging_queue,
                    id,
                    config,
                    debug)
                self._workers.append(worker)
                worker.start()

            # Periodically check for termination and for terminated workers.
            while not self._sigterm:
                try:
                    # Wait for incoming messages (up to 1 second).
                    record = self._logging_queue.get(True, 1)

                    # Process incoming message.
                    if record is not None:
                        logging.getLogger(record.name).handle(record)
                except:
                    pass

                # Some worker has terminated? => rebuild the list of workers.
                if self._sigchld > 0:
                    workers = []
                    for worker in self._workers:
                        if worker.is_alive():
                            workers.append(worker)
                        else:
                            worker.terminate()
                            logging.getLogger('varnishsentry').error(
                                'Worker %s terminated unexpectedly with status '
                                'code %d', worker.name, worker.exitcode)
                            workers.append(worker.restart())
                    self._workers = workers
                    self._sigchld -= 1

            # Set shutdown event and wait a few seconds for a graceful shutdown
            # of all workers.
            self._shutdown_event.set()
            retries = 5
            while retries > 0:
                if any([worker.is_alive() for worker in self._workers]):
                    retries = retries - 1
                    time.sleep(1)
                else:
                    break

            # After timeout, force shutdown of any alive worker.
            for worker in self._workers:
                if worker.is_alive():
                    worker.terminate()

            # Wait for all workers termination.
            for worker in self._workers:
                worker.join()

        # Clean up and exit.
        pidlockfile.remove_existing_pidfile(settings.PIDFILE)
        logging.getLogger('varnishsentry').info(
            'Shutting down varnishsentry service (PID %d)', pid)
        sys.exit(0)