def _process_repo(repo, unused_repo_name, args, arcyd_conduit, url_watcher, mail_sender): fetch_if_needed(url_watcher, abdi_repoargs.get_repo_snoop_url(args), repo, args.repo_desc) admin_emails = set(_flatten_list(args.admin_emails)) # TODO: this should be a URI for users not conduit mailer = abdmail_mailer.Mailer(mail_sender, admin_emails, args.repo_desc, args.instance_uri) branch_url_callable = None if args.branch_url_format: def make_branch_url(branch_name): return args.branch_url_format.format(branch=branch_name, repo_url=args.repo_url) branch_url_callable = make_branch_url branch_naming = abdt_compositenaming.Naming(abdt_classicnaming.Naming(), abdt_rbranchnaming.Naming()) branches = abdt_git.get_managed_branches(repo, args.repo_desc, branch_naming, branch_url_callable) abdi_processrepo.process_branches(branches, arcyd_conduit, mailer)
def process(args): _ = args # NOQA fs = abdt_fs.make_default_accessor() repo_config_path_list = fs.repo_config_path_list() repo_name_config_list = abdi_repoargs.parse_config_file_list( repo_config_path_list) url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper( fs.layout.urlwatcher_cache_path) url_watcher_wrapper.watcher.refresh() for repo_name, repo_config in repo_name_config_list: print(repo_name + '..', end=' ') snoop_url = abdi_repoargs.get_repo_snoop_url(repo_config) abd_repo = abdt_git.Repo( phlsys_git.Repo(repo_config.repo_path), "origin", repo_config.repo_desc) did_fetch = abdi_processrepoargs.fetch_if_needed( url_watcher_wrapper.watcher, snoop_url, abd_repo, repo_config.repo_desc) if did_fetch: print('fetched') else: print('skipped') url_watcher_wrapper.save()
def process(args): _ = args # NOQA fs = abdt_fs.make_default_accessor() with fs.lockfile_context(): pid = fs.get_pid_or_none() if pid is not None and phlsys_pid.is_running(pid): raise Exception("cannot fetch whilst arcyd is running.") repo_config_path_list = fs.repo_config_path_list() repo_name_config_list = abdi_repoargs.parse_config_file_list( repo_config_path_list) url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper( fs.layout.urlwatcher_cache_path) # Let the user know what's happening before potentially blocking for a # while. print('Refreshing repository snoop status ..', end=' ') # Make sure that the output is actually visible by flushing stdout # XXX: Will use 'flush' parameter to 'print()' in Python 3.3 sys.stdout.flush() print("done") url_watcher_wrapper.watcher.refresh() for repo_name, repo_config in repo_name_config_list: print(repo_name + ' ..', end=' ') # Make sure that the output is actually visible by flushing stdout # XXX: Will use 'flush' parameter to 'print()' in Python 3.3 sys.stdout.flush() snoop_url = abdi_repoargs.get_repo_snoop_url(repo_config) sys_repo = phlsys_git.Repo(repo_config.repo_path) refcache_repo = phlgitx_refcache.Repo(sys_repo) differ_cache = abdt_differresultcache.Cache(refcache_repo) abd_repo = abdt_git.Repo( refcache_repo, differ_cache, "origin", repo_config.repo_desc) did_fetch = abdi_processrepoarglist.fetch_if_needed( url_watcher_wrapper.watcher, snoop_url, abd_repo, repo_config.repo_desc) if did_fetch: print('fetched') else: print('skipped') url_watcher_wrapper.save()
def process(args): _ = args # NOQA fs = abdt_fs.make_default_accessor() with fs.lockfile_context(): pid = fs.get_pid_or_none() if pid is not None and phlsys_pid.is_running(pid): raise Exception("cannot fetch whilst arcyd is running.") repo_config_path_list = fs.repo_config_path_list() repo_name_config_list = abdi_repoargs.parse_config_file_list( repo_config_path_list) url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper( fs.layout.urlwatcher_cache_path) # Let the user know what's happening before potentially blocking for a # while. print('Refreshing repository snoop status ..', end=' ') # Make sure that the output is actually visible by flushing stdout # XXX: Will use 'flush' parameter to 'print()' in Python 3.3 sys.stdout.flush() print("done") url_watcher_wrapper.watcher.refresh() for repo_name, repo_config in repo_name_config_list: print(repo_name + ' ..', end=' ') # Make sure that the output is actually visible by flushing stdout # XXX: Will use 'flush' parameter to 'print()' in Python 3.3 sys.stdout.flush() snoop_url = abdi_repoargs.get_repo_snoop_url(repo_config) sys_repo = phlsys_git.Repo(repo_config.repo_path) refcache_repo = phlgitx_refcache.Repo(sys_repo) differ_cache = abdt_differresultcache.Cache(refcache_repo) abd_repo = abdt_git.Repo(refcache_repo, differ_cache, "origin", repo_config.repo_desc) did_fetch = abdi_processrepoarglist.fetch_if_needed( url_watcher_wrapper.watcher, snoop_url, abd_repo, repo_config.repo_desc) if did_fetch: print('fetched') else: print('skipped') url_watcher_wrapper.save()
def _process_repo( repo, unused_repo_name, args, arcyd_conduit, url_watcher, mail_sender): fetch_if_needed( url_watcher, abdi_repoargs.get_repo_snoop_url(args), repo, args.repo_desc) admin_emails = set(_flatten_list(args.admin_emails)) # TODO: this should be a URI for users not conduit mailer = abdmail_mailer.Mailer( mail_sender, admin_emails, args.repo_desc, args.instance_uri) branch_url_callable = None if args.branch_url_format: def make_branch_url(branch_name): return args.branch_url_format.format( branch=branch_name, repo_url=args.repo_url) branch_url_callable = make_branch_url branch_naming = abdt_compositenaming.Naming( abdt_classicnaming.Naming(), abdt_rbranchnaming.Naming()) branches = abdt_git.get_managed_branches( repo, args.repo_desc, branch_naming, branch_url_callable) abdi_processrepo.process_branches(branches, arcyd_conduit, mailer)
def process(args): fs = abdt_fs.make_default_accessor() repo_name = args.name if repo_name is None: repo_name = _repo_name_for_params(args.phabricator_name, args.repohost_name, args.repo_url) repo_desc = args.repo_desc if repo_desc is None: repo_desc = _repo_desc_for_params(args.phabricator_name, args.repohost_name, args.repo_url) try_touch_path = fs.layout.repo_try(repo_name) ok_touch_path = fs.layout.repo_ok(repo_name) repo_path = fs.layout.repo(repo_name) # make sure the repo doesn't exist already if os.path.exists(repo_path): raise Exception('{} already exists'.format(repo_path)) # make sure the phabricator config exists phab_config_path = fs.get_phabricator_config_rel_path( args.phabricator_name) # make sure the repohost config exists repohost_config_path = fs.get_repohost_config_rel_path(args.repohost_name) # generate the config file config = _CONFIG.format(phabricator_config=phab_config_path, repohost_config=repohost_config_path, repo_desc=repo_desc, repo_url=args.repo_url, repo_path=repo_path, try_touch_path=try_touch_path, ok_touch_path=ok_touch_path) if args.admin_emails: config = '\n'.join([ config, _CONFIG_ADMIN_EMAILS_FORMAT.format( admin_emails='\n'.join(args.admin_emails)) ]) # parse the arguments again, as a real repo parser = argparse.ArgumentParser(fromfile_prefix_chars='@') abdi_repoargs.setup_parser(parser) repo_args = config.splitlines() repo_params = parser.parse_args(repo_args) abdi_repoargs.validate_args(repo_params) # make sure we can use the snoop URL repo_snoop_url = abdi_repoargs.get_repo_snoop_url(repo_params) if repo_snoop_url: phlurl_request.get(repo_snoop_url) # determine the repo url from the parsed params repo_url = abdi_repoargs.get_repo_url(repo_params) # determine the repo push url from the parsed params repo_push_url = abdi_repoargs.get_repo_push_url(repo_params) with fs.lockfile_context(): with abdi_repo.setup_repo_context(repo_url, repo_path, repo_push_url): fs.create_repo_config(repo_name, config)
def _do(repo, args, reporter, arcyd_reporter, conduits, url_watcher): with arcyd_reporter.tag_timer_context('process branches prolog'): arcyd_reporter.tag_timer_decorate_object_methods_individually( repo, 'git') fetch_if_needed( url_watcher, abdi_repoargs.get_repo_snoop_url(args), repo, args.repo_desc) options = _determine_options(args, repo) arcyd_conduit = _connect(conduits, args, arcyd_reporter) reporter.set_config(options) sender = phlmail_sender.MailSender( phlsys_sendmail.Sendmail(), arcyd_reporter.arcyd_email) # TODO: this should be a URI for users not conduit mailer = abdmail_mailer.Mailer( sender, options.admin_emails, options.description, args.instance_uri) pluginManager = phlsys_pluginmanager.PluginManager( args.plugins, args.trusted_plugins) branch_url_callable = None if options.branch_url_format: def make_branch_url(branch_name): return options.branch_url_format.format( branch=branch_name, repo_url=args.repo_url) branch_url_callable = make_branch_url branch_naming = abdt_compositenaming.Naming( abdt_classicnaming.Naming(), abdt_rbranchnaming.Naming()) branches = abdt_git.get_managed_branches( repo, options.description, branch_naming, branch_url_callable) for branch in branches: arcyd_reporter.tag_timer_decorate_object_methods_individually( branch, 'branch') try: with arcyd_reporter.tag_timer_context('process branches'): abdi_processrepo.process_branches( branches, arcyd_conduit, mailer, pluginManager, reporter) except Exception: reporter.on_traceback(traceback.format_exc()) raise reporter.on_completed()
def process(args): fs = abdt_fs.make_default_accessor() repo_name = args.name if repo_name is None: repo_name = _repo_name_for_params( args.phabricator_name, args.repohost_name, args.repo_url) repo_desc = args.repo_desc if repo_desc is None: repo_desc = _repo_desc_for_params( args.phabricator_name, args.repohost_name, args.repo_url) try_touch_path = fs.layout.repo_try(repo_name) ok_touch_path = fs.layout.repo_ok(repo_name) repo_path = fs.layout.repo(repo_name) # make sure the repo doesn't exist already if os.path.exists(repo_path): raise Exception('{} already exists'.format(repo_path)) # make sure the phabricator config exists phab_config_path = fs.get_phabricator_config_rel_path( args.phabricator_name) # make sure the repohost config exists repohost_config_path = fs.get_repohost_config_rel_path( args.repohost_name) # generate the config file config = _CONFIG.format( phabricator_config=phab_config_path, repohost_config=repohost_config_path, repo_desc=repo_desc, repo_url=args.repo_url, repo_path=repo_path, try_touch_path=try_touch_path, ok_touch_path=ok_touch_path) if args.admin_emails: config = '\n'.join([ config, _CONFIG_ADMIN_EMAILS_FORMAT.format( admin_emails='\n'.join(args.admin_emails))]) # parse the arguments again, as a real repo parser = argparse.ArgumentParser(fromfile_prefix_chars='@') abdi_repoargs.setup_parser(parser) repo_args = config.splitlines() repo_params = parser.parse_args(repo_args) abdi_repoargs.validate_args(repo_params) # make sure we can use the snoop URL repo_snoop_url = abdi_repoargs.get_repo_snoop_url(repo_params) if repo_snoop_url: phlurl_request.get(repo_snoop_url) # determine the repo url from the parsed params repo_url = abdi_repoargs.get_repo_url(repo_params) with fs.lockfile_context(): with abdi_repo.setup_repo_context(repo_url, repo_path): fs.create_repo_config(repo_name, config)