예제 #1
0
def process(args):

    fs = abdt_fs.make_default_accessor()

    exit_code = 0

    for repo in fs.repo_config_path_list():
        parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
        abdi_repoargs.setup_parser(parser)

        with open(repo) as f:
            repo_params = parser.parse_args(
                line.strip() for line in f)

        if not os.path.isdir(repo_params.repo_path):
            print "'{}' is missing repo '{}'".format(
                repo, repo_params.repo_path)
            if args.fix:
                repo_url = abdi_repoargs.get_repo_url(repo_params)
                print "cloning '{}' ..".format(repo_url)
                abdi_repo.setup_repo(repo_url, repo_params.repo_path)
            else:
                exit_code = 1

    if exit_code != 0 and not args.fix:
        print "use '--fix' to attempt to fix the issues"

    return exit_code
def _repos_from_configs(repo_configs):
    repos = []

    for repo in repo_configs:
        parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
        abdi_repoargs.setup_parser(parser)
        repo_name = repo[0]  # oddly this comes to us as a list
        repo_name = repo_name[1:]  # strip off the '@' prefix
        repo_name = repo_name.split('/')[-1]  # strip off the path prefix
        repo_args = (repo_name, parser.parse_args(repo))
        abdi_repoargs.validate_args(repo_args[1])
        repos.append(repo_args)

    return repos
예제 #3
0
def process(args):

    fs = abdt_fs.make_default_accessor()

    exit_code = 0

    with fs.lockfile_context():
        for repo in fs.repo_config_path_list():
            parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
            abdi_repoargs.setup_parser(parser)

            with open(repo) as f:
                repo_params = parser.parse_args(
                    line.strip() for line in f)

            if not os.path.isdir(repo_params.repo_path):
                print "'{}' is missing repo '{}'".format(
                    repo, repo_params.repo_path)
                if args.fix:
                    repo_url = abdi_repoargs.get_repo_url(repo_params)
                    print "cloning '{}' ..".format(repo_url)
                    abdi_repo.setup_repo(repo_url, repo_params.repo_path)
                else:
                    exit_code = 1
            else:
                is_ignoring = phlgitx_ignoreident.is_repo_definitely_ignoring
                if not is_ignoring(repo_params.repo_path):
                    print "'{}' is not ignoring ident attributes".format(
                        repo_params.repo_path)
                    if args.fix:
                        print "setting {} to ignore ident ..".format(
                            repo_params.repo_path)

                        phlgitx_ignoreident.ensure_repo_ignoring(
                            repo_params.repo_path)
                    else:
                        exit_code = 1

    if exit_code != 0 and not args.fix:
        print "use '--fix' to attempt to fix the issues"

    return exit_code
예제 #4
0
def process(args):

    fs = abdt_fs.make_default_accessor()

    repo_name = args.name
    if repo_name is None:
        repo_name = _repo_name_for_params(args.phabricator_name,
                                          args.repohost_name, args.repo_url)

    repo_desc = args.repo_desc
    if repo_desc is None:
        repo_desc = _repo_desc_for_params(args.phabricator_name,
                                          args.repohost_name, args.repo_url)

    try_touch_path = fs.layout.repo_try(repo_name)
    ok_touch_path = fs.layout.repo_ok(repo_name)
    repo_path = fs.layout.repo(repo_name)

    # make sure the repo doesn't exist already
    if os.path.exists(repo_path):
        raise Exception('{} already exists'.format(repo_path))

    # make sure the phabricator config exists
    phab_config_path = fs.get_phabricator_config_rel_path(
        args.phabricator_name)

    # make sure the repohost config exists
    repohost_config_path = fs.get_repohost_config_rel_path(args.repohost_name)

    # generate the config file
    config = _CONFIG.format(phabricator_config=phab_config_path,
                            repohost_config=repohost_config_path,
                            repo_desc=repo_desc,
                            repo_url=args.repo_url,
                            repo_path=repo_path,
                            try_touch_path=try_touch_path,
                            ok_touch_path=ok_touch_path)

    if args.admin_emails:
        config = '\n'.join([
            config,
            _CONFIG_ADMIN_EMAILS_FORMAT.format(
                admin_emails='\n'.join(args.admin_emails))
        ])

    # parse the arguments again, as a real repo
    parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
    abdi_repoargs.setup_parser(parser)
    repo_args = config.splitlines()
    repo_params = parser.parse_args(repo_args)

    abdi_repoargs.validate_args(repo_params)

    # make sure we can use the snoop URL
    repo_snoop_url = abdi_repoargs.get_repo_snoop_url(repo_params)
    if repo_snoop_url:
        phlurl_request.get(repo_snoop_url)

    # determine the repo url from the parsed params
    repo_url = abdi_repoargs.get_repo_url(repo_params)

    # determine the repo push url from the parsed params
    repo_push_url = abdi_repoargs.get_repo_push_url(repo_params)

    with fs.lockfile_context():
        with abdi_repo.setup_repo_context(repo_url, repo_path, repo_push_url):
            fs.create_repo_config(repo_name, config)
예제 #5
0
def process(args):

    fs = abdt_fs.make_default_accessor()

    repo_name = args.name
    if repo_name is None:
        repo_name = _repo_name_for_params(
            args.phabricator_name, args.repohost_name, args.repo_url)

    repo_desc = args.repo_desc
    if repo_desc is None:
        repo_desc = _repo_desc_for_params(
            args.phabricator_name, args.repohost_name, args.repo_url)

    try_touch_path = fs.layout.repo_try(repo_name)
    ok_touch_path = fs.layout.repo_ok(repo_name)
    repo_path = fs.layout.repo(repo_name)

    # make sure the repo doesn't exist already
    if os.path.exists(repo_path):
        raise Exception('{} already exists'.format(repo_path))

    # make sure the phabricator config exists
    phab_config_path = fs.get_phabricator_config_rel_path(
        args.phabricator_name)

    # make sure the repohost config exists
    repohost_config_path = fs.get_repohost_config_rel_path(
        args.repohost_name)

    # generate the config file
    config = _CONFIG.format(
        phabricator_config=phab_config_path,
        repohost_config=repohost_config_path,
        repo_desc=repo_desc,
        repo_url=args.repo_url,
        repo_path=repo_path,
        try_touch_path=try_touch_path,
        ok_touch_path=ok_touch_path)

    if args.admin_emails:
        config = '\n'.join([
            config,
            _CONFIG_ADMIN_EMAILS_FORMAT.format(
                admin_emails='\n'.join(args.admin_emails))])

    # parse the arguments again, as a real repo
    parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
    abdi_repoargs.setup_parser(parser)
    repo_args = config.splitlines()
    repo_params = parser.parse_args(repo_args)

    abdi_repoargs.validate_args(repo_params)

    # make sure we can use the snoop URL
    repo_snoop_url = abdi_repoargs.get_repo_snoop_url(repo_params)
    if repo_snoop_url:
        phlurl_request.get(repo_snoop_url)

    # determine the repo url from the parsed params
    repo_url = abdi_repoargs.get_repo_url(repo_params)

    with fs.lockfile_context():
        with abdi_repo.setup_repo_context(repo_url, repo_path):
            fs.create_repo_config(repo_name, config)
예제 #6
0
def _iter_repo_args(abdt_accessor):
    for repo_path in abdt_accessor.repo_config_path_list():
        parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
        abdi_repoargs.setup_parser(parser)
        yield parser.parse_args(['@{}'.format(repo_path)])
def _process(args, reporter):

    retry_delays = _get_retry_delays()

    repos = []
    for repo in args.repo_configs:
        parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
        abdi_repoargs.setup_parser(parser)
        repo_name = repo[0]  # oddly this comes to us as a list
        repo_name = repo_name[1:]  # strip off the '@' prefix
        repo_args = (repo_name, parser.parse_args(repo))
        repos.append(repo_args)

    out = phlsys_statusline.StatusLine()

    # TODO: test write access to repos here

    operations = []
    conduits = {}
    url_watcher = phlurl_watcher.Watcher()

    urlwatcher_cache_path = os.path.abspath('.arcyd.urlwatcher.cache')

    # load the url watcher cache (if any)
    if os.path.isfile(urlwatcher_cache_path):
        with open(urlwatcher_cache_path) as f:
            url_watcher.load(f)

    for repo, repo_args in repos:

        # create a function to update this particular repo.
        #
        # use partial to ensure we capture the value of the variables,
        # note that a closure would use the latest value of the variables
        # rather than the value at declaration time.
        process_func = functools.partial(
            process_single_repo,
            repo,
            repo_args,
            out,
            reporter,
            conduits,
            url_watcher,
            urlwatcher_cache_path)

        on_exception_delay = abdt_exhandlers.make_exception_delay_handler(
            args, reporter, repo)
        operation = phlsys_scheduleunreliables.DelayedRetryNotifyOperation(
            process_func,
            list(retry_delays),  # make a copy to be sure
            on_exception_delay)

        operations.append(operation)

    def on_pause():
        on_exception_delay = abdt_exhandlers.make_exception_delay_handler(
            args, reporter, None)
        on_exception_delay("until_file_removed")

    operations.append(
        FileCheckOperation(
            args.kill_file,
            args.reset_file,
            args.pause_file,
            on_pause))

    operations.append(
        DelayedRetrySleepOperation(
            out, args.sleep_secs, reporter))

    operations.append(
        RefreshCachesOperation(
            conduits, url_watcher, reporter))

    if args.no_loop:
        def process_once():
            return phlsys_scheduleunreliables.process_once(list(operations))

        new_ops = tryHandleSpecialFiles(process_once, on_exception_delay)
        if new_ops != set(operations):
            print 'ERROR: some operations failed'
            sys.exit(1)
    else:
        def loopForever():
            phlsys_scheduleunreliables.process_loop_forever(list(operations))

        while True:
            tryHandleSpecialFiles(loopForever, on_exception_delay)