コード例 #1
0
def start_arcyd(daemonize=True, loop=True, restart=False, stop_message=''):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            if restart:
                stop_arcyd_pid(pid, fs.layout.killfile, stop_message)
            else:
                raise Exception("already running")

        if daemonize:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if not loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    def logger_config():
        _setup_logger(fs, daemonize)

    with phlsys_multiprocessing.logging_context(logger_config):
        _LOGGER.debug("start with args: {}".format(args))
        while True:
            _LOGGER.info("arcyd started")
            try:
                exit_code = abdi_processrepos.process(args, repo_configs)
                _LOGGER.debug("arcyd process loop exit_code: %s" % exit_code)
                if exit_code == abdi_processexitcodes.ExitCodes.ec_exit:
                    break
            finally:
                _LOGGER.info("arcyd stopped")

            _LOGGER.debug("reloading arcyd configuration")
            try:
                with fs.lockfile_context():
                    repo_configs = abdi_repoargs.parse_config_file_list(
                        fs.repo_config_path_list())
            except phlsys_fs.LockfileExistsError:
                _LOGGER.error("couldn't acquire lockfile, reload failed")
コード例 #2
0
def process(args):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    pid = fs.get_pid_or_none()
    if pid is not None and phlsys_pid.is_running(pid):
        raise Exception("already running")

    if not args.foreground:
        phlsys_daemonize.do(
            stdout_path=fs.layout.stdout,
            stderr_path=fs.layout.stderr)

    # important that we do this *after* daemonizing
    pid = phlsys_pid.get()
    fs.set_pid(pid)

    parser = argparse.ArgumentParser()
    params = []

    for line in open(fs.layout.root_config):
        params.append(line.strip())

    if args.no_loop:
        params.append('--no-loop')

    abdi_processrepos.setupParser(parser)
    args = parser.parse_args(params)
    abdi_processrepos.process(args, fs.repo_config_path_list())
コード例 #3
0
    def test_A_Breathing(self):
        # CONCERN: can run phlsys_signal.set_exit_on_sigterm
        phlsys_signal.set_exit_on_sigterm()

        # CONCERN: exit_level is 0 before exit contexts are active
        self.assertEqual(phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
                         0)
        with phlsys_signal.no_exit_context():
            # CONCERN: exit_level is 1 while single exit context is active
            self.assertEqual(
                phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level, 1)
        # CONCERN: exit_level is 0 after single exit contexts finishes
        self.assertEqual(phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
                         0)
コード例 #4
0
def start_arcyd(daemonize=True, loop=True, restart=False):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            if restart:
                stop_arcyd_pid(pid)
            else:
                raise Exception("already running")

        if daemonize:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if not loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    def logger_config():
        _setup_logger(fs)

    with phlsys_multiprocessing.logging_context(logger_config):
        _LOGGER.debug("start with args: {}".format(args))
        _LOGGER.info("arcyd started")
        try:
            abdi_processrepos.process(args, repo_configs)
        finally:
            _LOGGER.info("arcyd stopped")
コード例 #5
0
def process(args):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("already running")

        if not args.foreground:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if args.no_loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    # setup to log everything to fs.layout.log_info, with a timestamp
    logging.Formatter.converter = time.gmtime
    logging.basicConfig(
        format='%(asctime)s UTC: %(levelname)s: %(message)s',
        level=logging.INFO,
        filename=fs.layout.log_info)

    _LOGGER.info("arcyd started")
    try:
        abdi_processrepos.process(args, repo_configs)
    finally:
        _LOGGER.info("arcyd stopped")
コード例 #6
0
    def test_A_Breathing(self):
        # CONCERN: can run phlsys_signal.set_exit_on_sigterm
        phlsys_signal.set_exit_on_sigterm()

        # CONCERN: exit_level is 0 before exit contexts are active
        self.assertEqual(
            phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
            0)
        with phlsys_signal.no_exit_context():
            # CONCERN: exit_level is 1 while single exit context is active
            self.assertEqual(
                phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
                1)
        # CONCERN: exit_level is 0 after single exit contexts finishes
        self.assertEqual(
            phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
            0)
コード例 #7
0
    def test_A_Breathing(self):
        # We must restore the original signal handler when we're done testing
        # or nose will hang indefinitely when we run the tests.
        handler = signal.getsignal(signal.SIGTERM)

        # CONCERN: can run phlsys_signal.set_exit_on_sigterm
        phlsys_signal.set_exit_on_sigterm()

        # CONCERN: exit_level is 0 before exit contexts are active
        self.assertEqual(phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
                         0)
        with phlsys_signal.no_exit_context():
            # CONCERN: exit_level is 1 while single exit context is active
            self.assertEqual(
                phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level, 1)
        # CONCERN: exit_level is 0 after single exit contexts finishes
        self.assertEqual(phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
                         0)

        signal.signal(signal.SIGTERM, handler)
コード例 #8
0
def process(args):

    phlsys_signal.set_exit_on_sigterm()
    if args.sendmail_binary:
        phlsys_sendmail.Sendmail.set_default_binary(
            args.sendmail_binary)

    if args.sendmail_type:
        phlsys_sendmail.Sendmail.set_default_params_from_type(
            args.sendmail_type)

    reporter_data = abdt_shareddictoutput.ToFile(args.status_path)
    reporter = abdt_arcydreporter.ArcydReporter(
        reporter_data, args.io_log_file)

    if args.external_error_logger:
        full_path = os.path.abspath(args.external_error_logger)
        reporter.set_external_system_error_logger(full_path)

    on_exception = abdt_exhandlers.make_exception_message_handler(
        args, reporter, None, "arcyd stopped with exception", "")

    arcyd_reporter_context = abdt_logging.arcyd_reporter_context
    with contextlib.closing(reporter), arcyd_reporter_context(reporter):

        try:
            _process(args, reporter)
        except BaseException:
            on_exception("Arcyd will now stop")
            print "stopping"
            raise

        if not args.no_loop:
            # we should never get here, raise and handle an exception if we do
            try:
                raise Exception("Arcyd stopped unexpectedly")
            except Exception:
                on_exception("Arcyd will now stop")
コード例 #9
0
    def test_A_Breathing(self):
        # We must restore the original signal handler when we're done testing
        # or nose will hang indefinitely when we run the tests.
        handler = signal.getsignal(signal.SIGTERM)

        # CONCERN: can run phlsys_signal.set_exit_on_sigterm
        phlsys_signal.set_exit_on_sigterm()

        # CONCERN: exit_level is 0 before exit contexts are active
        self.assertEqual(
            phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
            0)
        with phlsys_signal.no_exit_context():
            # CONCERN: exit_level is 1 while single exit context is active
            self.assertEqual(
                phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
                1)
        # CONCERN: exit_level is 0 after single exit contexts finishes
        self.assertEqual(
            phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
            0)

        signal.signal(signal.SIGTERM, handler)
コード例 #10
0
import os
import sys

# append our module dirs to sys.path, which is the list of paths to search
# for modules this is so we can import our libraries directly
# N.B. this magic is only really passable up-front in the entrypoint module
PARENT_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
BASE_DIR = os.path.dirname(PARENT_DIR)
sys.path.append(os.path.join(BASE_DIR, "py", "phl"))

import phlsys_fs
import phlsys_signal

phlsys_signal.set_exit_on_sigterm()

filename = "testfile"
count = 0
with phlsys_fs.lockfile_retry_context("lockfile", attempts=3, wait_secs=0.1):

    if os.path.isfile(filename):
        with open(filename) as f:
            count = int(f.read())

    count += 1
    with open(filename, "w") as f:
        f.write(str(count))


# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
コード例 #11
0
import os
import sys

# append our module dirs to sys.path, which is the list of paths to search
# for modules this is so we can import our libraries directly
# N.B. this magic is only really passable up-front in the entrypoint module
PARENT_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
BASE_DIR = os.path.dirname(PARENT_DIR)
sys.path.append(os.path.join(BASE_DIR, "py", "phl"))

import phlsys_fs
import phlsys_signal

phlsys_signal.set_exit_on_sigterm()

filename = 'testfile'
count = 0
with phlsys_fs.lockfile_retry_context('lockfile', attempts=3, wait_secs=0.1):

    if os.path.isfile(filename):
        with open(filename) as f:
            count = int(f.read())

    count += 1
    with open(filename, 'w') as f:
        f.write(str(count))

# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");