def process(args):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    pid = fs.get_pid_or_none()
    if pid is not None and phlsys_pid.is_running(pid):
        raise Exception("already running")

    if not args.foreground:
        phlsys_daemonize.do(
            stdout_path=fs.layout.stdout,
            stderr_path=fs.layout.stderr)

    # important that we do this *after* daemonizing
    pid = phlsys_pid.get()
    fs.set_pid(pid)

    parser = argparse.ArgumentParser()
    params = []

    for line in open(fs.layout.root_config):
        params.append(line.strip())

    if args.no_loop:
        params.append('--no-loop')

    abdi_processrepos.setupParser(parser)
    args = parser.parse_args(params)
    abdi_processrepos.process(args, fs.repo_config_path_list())
Beispiel #2
0
def start_arcyd(daemonize=True, loop=True, restart=False, stop_message=''):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            if restart:
                stop_arcyd_pid(pid, fs.layout.killfile, stop_message)
            else:
                raise Exception("already running")

        if daemonize:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if not loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    def logger_config():
        _setup_logger(fs, daemonize)

    with phlsys_multiprocessing.logging_context(logger_config):
        _LOGGER.debug("start with args: {}".format(args))
        while True:
            _LOGGER.info("arcyd started")
            try:
                exit_code = abdi_processrepos.process(args, repo_configs)
                _LOGGER.debug("arcyd process loop exit_code: %s" % exit_code)
                if exit_code == abdi_processexitcodes.ExitCodes.ec_exit:
                    break
            finally:
                _LOGGER.info("arcyd stopped")

            _LOGGER.debug("reloading arcyd configuration")
            try:
                with fs.lockfile_context():
                    repo_configs = abdi_repoargs.parse_config_file_list(
                        fs.repo_config_path_list())
            except phlsys_fs.LockfileExistsError:
                _LOGGER.error("couldn't acquire lockfile, reload failed")
Beispiel #3
0
    def test_B_Stdio(self):
        # lookup file descriptors for original stdio objects
        stdio_fo = [sys.__stdin__, sys.__stdout__, sys.__stderr__]
        stdio_fd = [f.fileno() for f in stdio_fo]

        # create subprocess to deamonize. pipe will be used to communicate
        # back information about daemonized process
        pipe_r, pipe_w = os.pipe()

        # forkpty is used to make situation more tricky for phlsys_daemonize:
        # the child process will be a controlling process which makes its
        # children potential targets of SIGHUP on its exit
        child_pid, child_pty = os.forkpty()

        if child_pid == 0:
            os.close(pipe_r)

            phlsys_daemonize.do()

            pipe_wf = os.fdopen(pipe_w, 'w')
            try:
                # report that process survived the daemonization
                pipe_wf.write('daemonized\n')

                # dump file names of files opened as stdin, stdout and stderr
                proc_dir = '/proc/%i/' % os.getpid()
                for fd in stdio_fd:
                    proc_entry = 'fd/%i' % fd
                    target = os.readlink(proc_dir + proc_entry)
                    pipe_wf.write(proc_entry + ' => ' + target + '\n')

            except Exception, e:
                pipe_wf.write(str(e))

            sys.exit(0)
def start_arcyd(daemonize=True, loop=True, restart=False):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            if restart:
                stop_arcyd_pid(pid)
            else:
                raise Exception("already running")

        if daemonize:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if not loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    def logger_config():
        _setup_logger(fs)

    with phlsys_multiprocessing.logging_context(logger_config):
        _LOGGER.debug("start with args: {}".format(args))
        _LOGGER.info("arcyd started")
        try:
            abdi_processrepos.process(args, repo_configs)
        finally:
            _LOGGER.info("arcyd stopped")
Beispiel #5
0
def process(args):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("already running")

        if not args.foreground:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if args.no_loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    # setup to log everything to fs.layout.log_info, with a timestamp
    logging.Formatter.converter = time.gmtime
    logging.basicConfig(
        format='%(asctime)s UTC: %(levelname)s: %(message)s',
        level=logging.INFO,
        filename=fs.layout.log_info)

    _LOGGER.info("arcyd started")
    try:
        abdi_processrepos.process(args, repo_configs)
    finally:
        _LOGGER.info("arcyd stopped")