Example #1
0
def start_arcyd(daemonize=True, loop=True, restart=False, stop_message=''):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            if restart:
                stop_arcyd_pid(pid, fs.layout.killfile, stop_message)
            else:
                raise Exception("already running")

        if daemonize:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if not loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    def logger_config():
        _setup_logger(fs, daemonize)

    with phlsys_multiprocessing.logging_context(logger_config):
        _LOGGER.debug("start with args: {}".format(args))
        while True:
            _LOGGER.info("arcyd started")
            try:
                exit_code = abdi_processrepos.process(args, repo_configs)
                _LOGGER.debug("arcyd process loop exit_code: %s" % exit_code)
                if exit_code == abdi_processexitcodes.ExitCodes.ec_exit:
                    break
            finally:
                _LOGGER.info("arcyd stopped")

            _LOGGER.debug("reloading arcyd configuration")
            try:
                with fs.lockfile_context():
                    repo_configs = abdi_repoargs.parse_config_file_list(
                        fs.repo_config_path_list())
            except phlsys_fs.LockfileExistsError:
                _LOGGER.error("couldn't acquire lockfile, reload failed")
Example #2
0
def _determine_name_from_url(fs, repo_url):
    """Return the string name of the repository for 'repo_url' or raise.

    If there is not exactly one repository config that refers to 'repo_url'
    then raise.

    :fs: abdt_fs.Accessor to use to determine the repo name from the url
    :repo_url: string clone url of the repository we fetch from / push to
    :returns: string name of the matching config

    """
    repo_config_path_list = fs.repo_config_path_list()
    repo_name_config_list = abdi_repoargs.parse_config_file_list(
        repo_config_path_list)

    candidate_names = []

    for repo_name, repo_config in repo_name_config_list:
        url = abdi_repoargs.get_repo_url(repo_config)
        if repo_url == url:
            candidate_names.append(repo_name)

    if not candidate_names:
        raise Exception("url '{url}' didn't match any names".format(
            url=repo_url))

    if len(candidate_names) > 1:
        raise Exception("url '{url}' matches many names:\n{names}\n".format(
            url=repo_url,
            names=candidate_names))

    return candidate_names[0]
Example #3
0
def process(args):

    fs = abdt_fs.make_default_accessor()

    exit_code = 0

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("cannot fsck whilst arcyd is running.")

        repo_config_path_list = _determine_repo_config_path_list(
            fs, args.repos)

        if not _check_repo_config_path_list(repo_config_path_list):
            exit_code = 1

        repo_name_config_list = abdi_repoargs.parse_config_file_list(
            repo_config_path_list)

        if not _check_repo_name_config_list(args, repo_name_config_list):
            exit_code = 1

    if exit_code != 0 and not args.fix:
        print("use '--fix' to attempt to fix the issues")

    return exit_code
def _determine_name_from_url(fs, repo_url):
    """Return the string name of the repository for 'repo_url' or raise.

    If there is not exactly one repository config that refers to 'repo_url'
    then raise.

    :fs: abdt_fs.Accessor to use to determine the repo name from the url
    :repo_url: string clone url of the repository we fetch from / push to
    :returns: string name of the matching config

    """
    repo_config_path_list = fs.repo_config_path_list()
    repo_name_config_list = abdi_repoargs.parse_config_file_list(
        repo_config_path_list)

    candidate_names = []

    for repo_name, repo_config in repo_name_config_list:
        url = abdi_repoargs.get_repo_url(repo_config)
        if repo_url == url:
            candidate_names.append(repo_name)

    if not candidate_names:
        raise Exception(
            "url '{url}' didn't match any names".format(url=repo_url))

    if len(candidate_names) > 1:
        raise Exception("url '{url}' matches many names:\n{names}\n".format(
            url=repo_url, names=candidate_names))

    return candidate_names[0]
Example #5
0
def process(args):

    _ = args  # NOQA
    fs = abdt_fs.make_default_accessor()

    repo_config_path_list = fs.repo_config_path_list()
    repo_name_config_list = abdi_repoargs.parse_config_file_list(
        repo_config_path_list)

    url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
        fs.layout.urlwatcher_cache_path)

    url_watcher_wrapper.watcher.refresh()

    for repo_name, repo_config in repo_name_config_list:
        print(repo_name + '..', end=' ')
        snoop_url = abdi_repoargs.get_repo_snoop_url(repo_config)

        abd_repo = abdt_git.Repo(
            phlsys_git.Repo(repo_config.repo_path),
            "origin",
            repo_config.repo_desc)

        did_fetch = abdi_processrepoargs.fetch_if_needed(
            url_watcher_wrapper.watcher,
            snoop_url,
            abd_repo,
            repo_config.repo_desc)

        if did_fetch:
            print('fetched')
        else:
            print('skipped')

        url_watcher_wrapper.save()
def process(args):

    _ = args  # NOQA
    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("cannot fetch whilst arcyd is running.")

        repo_config_path_list = fs.repo_config_path_list()
        repo_name_config_list = abdi_repoargs.parse_config_file_list(
            repo_config_path_list)

        url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
            fs.layout.urlwatcher_cache_path)

        # Let the user know what's happening before potentially blocking for a
        # while.
        print('Refreshing repository snoop status ..', end=' ')
        # Make sure that the output is actually visible by flushing stdout
        # XXX: Will use 'flush' parameter to 'print()' in Python 3.3
        sys.stdout.flush()
        print("done")

        url_watcher_wrapper.watcher.refresh()

        for repo_name, repo_config in repo_name_config_list:
            print(repo_name + ' ..', end=' ')

            # Make sure that the output is actually visible by flushing stdout
            # XXX: Will use 'flush' parameter to 'print()' in Python 3.3
            sys.stdout.flush()

            snoop_url = abdi_repoargs.get_repo_snoop_url(repo_config)

            sys_repo = phlsys_git.Repo(repo_config.repo_path)
            refcache_repo = phlgitx_refcache.Repo(sys_repo)
            differ_cache = abdt_differresultcache.Cache(refcache_repo)
            abd_repo = abdt_git.Repo(
                refcache_repo,
                differ_cache,
                "origin",
                repo_config.repo_desc)

            did_fetch = abdi_processrepoarglist.fetch_if_needed(
                url_watcher_wrapper.watcher,
                snoop_url,
                abd_repo,
                repo_config.repo_desc)

            if did_fetch:
                print('fetched')
            else:
                print('skipped')

            url_watcher_wrapper.save()
def process(args):

    _ = args  # NOQA
    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("cannot fetch whilst arcyd is running.")

        repo_config_path_list = fs.repo_config_path_list()
        repo_name_config_list = abdi_repoargs.parse_config_file_list(
            repo_config_path_list)

        url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
            fs.layout.urlwatcher_cache_path)

        # Let the user know what's happening before potentially blocking for a
        # while.
        print('Refreshing repository snoop status ..', end=' ')
        # Make sure that the output is actually visible by flushing stdout
        # XXX: Will use 'flush' parameter to 'print()' in Python 3.3
        sys.stdout.flush()
        print("done")

        url_watcher_wrapper.watcher.refresh()

        for repo_name, repo_config in repo_name_config_list:
            print(repo_name + ' ..', end=' ')

            # Make sure that the output is actually visible by flushing stdout
            # XXX: Will use 'flush' parameter to 'print()' in Python 3.3
            sys.stdout.flush()

            snoop_url = abdi_repoargs.get_repo_snoop_url(repo_config)

            sys_repo = phlsys_git.Repo(repo_config.repo_path)
            refcache_repo = phlgitx_refcache.Repo(sys_repo)
            differ_cache = abdt_differresultcache.Cache(refcache_repo)
            abd_repo = abdt_git.Repo(refcache_repo, differ_cache, "origin",
                                     repo_config.repo_desc)

            did_fetch = abdi_processrepoarglist.fetch_if_needed(
                url_watcher_wrapper.watcher, snoop_url, abd_repo,
                repo_config.repo_desc)

            if did_fetch:
                print('fetched')
            else:
                print('skipped')

            url_watcher_wrapper.save()
def do(
        repo_config_path_list,
        sys_admin_emails,
        kill_file,
        reset_file,
        pause_file,
        sleep_secs,
        is_no_loop,
        reporter):

    repo_configs = abdi_repoargs.parse_config_file_list(repo_config_path_list)

    # TODO: test write access to repos here

    operations = []
    conduits = {}

    fs_accessor = abdt_fs.make_default_accessor()
    url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
        fs_accessor.layout.urlwatcher_cache_path)

    # refresh cache after loading and before any repos are processed, otherwise
    # we may not pull when we need to on the first run around the loop.
    # TODO: wrap in usual retry handlers so that we can start up in unstable
    #       environments
    url_watcher_wrapper.watcher.refresh()

    _append_operations_for_repos(
        operations,
        reporter,
        conduits,
        url_watcher_wrapper,
        sys_admin_emails,
        repo_configs)

    _append_interrupt_operations(
        operations,
        sys_admin_emails,
        kill_file,
        reset_file,
        pause_file,
        sleep_secs,
        reporter)

    operations.append(
        abdi_operation.RefreshCaches(
            conduits, url_watcher_wrapper.watcher, reporter))

    _process_operations(
        is_no_loop, operations, sys_admin_emails, reporter)
Example #9
0
def process(args):
    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():

        repo_config_path_list = fs.repo_config_path_list()
        repo_name_config_list = abdi_repoargs.parse_config_file_list(
            repo_config_path_list)

        for _, repo_config in repo_name_config_list:
            if args.only_formatted_repo_urls:
                print(abdi_repoargs.get_repo_url(repo_config))
            else:
                print(repo_config)
def start_arcyd(daemonize=True, loop=True, restart=False):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            if restart:
                stop_arcyd_pid(pid)
            else:
                raise Exception("already running")

        if daemonize:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if not loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    def logger_config():
        _setup_logger(fs)

    with phlsys_multiprocessing.logging_context(logger_config):
        _LOGGER.debug("start with args: {}".format(args))
        _LOGGER.info("arcyd started")
        try:
            abdi_processrepos.process(args, repo_configs)
        finally:
            _LOGGER.info("arcyd stopped")
Example #11
0
def process(args):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("already running")

        if not args.foreground:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if args.no_loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    # setup to log everything to fs.layout.log_info, with a timestamp
    logging.Formatter.converter = time.gmtime
    logging.basicConfig(
        format='%(asctime)s UTC: %(levelname)s: %(message)s',
        level=logging.INFO,
        filename=fs.layout.log_info)

    _LOGGER.info("arcyd started")
    try:
        abdi_processrepos.process(args, repo_configs)
    finally:
        _LOGGER.info("arcyd stopped")
def process(args):
    # exit gracefully if this process is killed
    phlsys_signal.set_exit_on_sigterm()

    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("already running")

        if not args.foreground:
            phlsys_daemonize.do(
                stdout_path=fs.layout.stdout,
                stderr_path=fs.layout.stderr)

        # important that we do this *after* daemonizing
        pid = phlsys_pid.get()
        fs.set_pid(pid)

        parser = argparse.ArgumentParser()
        params = []

        for line in open(fs.layout.root_config):
            params.append(line.strip())

        if args.no_loop:
            params.append('--no-loop')

        repo_configs = abdi_repoargs.parse_config_file_list(
            fs.repo_config_path_list())

        abdi_processrepos.setupParser(parser)
        args = parser.parse_args(params)

    abdi_processrepos.process(args, repo_configs)