def test_get(self): self.assertEqual( phlurl_request.get(self._url('http://{host}:{port}/index')), (401, 'Authentication required')) self.assertEqual( phlurl_request.get( self._url('http://*****:*****@{host}:{port}/index')), (200, 'Basic Zm9vOmJhcg=='))
def has_url_recently_changed(self, url): if url in self._results: old_result = self._results[url].has_changed if old_result: hash_hexdigest = self._results[url].hash_hexdigest self._results[url] = _HashHexdigestHasChanged( hash_hexdigest, False) return old_result content = phlurl_request.get(url) # pylint: disable=E1101 self._results[url] = _HashHexdigestHasChanged( hashlib.sha1(content).hexdigest(), False) # pylint: enable=E1101 return True
def process(args): fs = abdt_fs.make_default_accessor() repo_name = args.name if repo_name is None: repo_name = _repo_name_for_params(args.phabricator_name, args.repohost_name, args.repo_url) repo_desc = args.repo_desc if repo_desc is None: repo_desc = _repo_desc_for_params(args.phabricator_name, args.repohost_name, args.repo_url) try_touch_path = fs.layout.repo_try(repo_name) ok_touch_path = fs.layout.repo_ok(repo_name) repo_path = fs.layout.repo(repo_name) # make sure the repo doesn't exist already if os.path.exists(repo_path): raise Exception('{} already exists'.format(repo_path)) # make sure the phabricator config exists phab_config_path = fs.get_phabricator_config_rel_path( args.phabricator_name) # make sure the repohost config exists repohost_config_path = fs.get_repohost_config_rel_path(args.repohost_name) # generate the config file config = _CONFIG.format(phabricator_config=phab_config_path, repohost_config=repohost_config_path, repo_desc=repo_desc, repo_url=args.repo_url, repo_path=repo_path, try_touch_path=try_touch_path, ok_touch_path=ok_touch_path) if args.admin_emails: config = '\n'.join([ config, _CONFIG_ADMIN_EMAILS_FORMAT.format( admin_emails='\n'.join(args.admin_emails)) ]) # parse the arguments again, as a real repo parser = argparse.ArgumentParser(fromfile_prefix_chars='@') abdi_repoargs.setup_parser(parser) repo_args = config.splitlines() repo_params = parser.parse_args(repo_args) abdi_repoargs.validate_args(repo_params) # make sure we can use the snoop URL repo_snoop_url = abdi_repoargs.get_repo_snoop_url(repo_params) if repo_snoop_url: phlurl_request.get(repo_snoop_url) # determine the repo url from the parsed params repo_url = abdi_repoargs.get_repo_url(repo_params) # determine the repo push url from the parsed params repo_push_url = abdi_repoargs.get_repo_push_url(repo_params) with fs.lockfile_context(): with abdi_repo.setup_repo_context(repo_url, repo_path, repo_push_url): fs.create_repo_config(repo_name, config)
def process(args): fs = abdt_fs.make_default_accessor() repo_name = args.name if repo_name is None: repo_name = _repo_name_for_params( args.phabricator_name, args.repohost_name, args.repo_url) repo_desc = args.repo_desc if repo_desc is None: repo_desc = _repo_desc_for_params( args.phabricator_name, args.repohost_name, args.repo_url) try_touch_path = fs.layout.repo_try(repo_name) ok_touch_path = fs.layout.repo_ok(repo_name) repo_path = fs.layout.repo(repo_name) # make sure the repo doesn't exist already if os.path.exists(repo_path): raise Exception('{} already exists'.format(repo_path)) # make sure the phabricator config exists phab_config_path = fs.get_phabricator_config_rel_path( args.phabricator_name) # make sure the repohost config exists repohost_config_path = fs.get_repohost_config_rel_path( args.repohost_name) # generate the config file config = _CONFIG.format( phabricator_config=phab_config_path, repohost_config=repohost_config_path, repo_desc=repo_desc, repo_url=args.repo_url, repo_path=repo_path, try_touch_path=try_touch_path, ok_touch_path=ok_touch_path) if args.admin_emails: config = '\n'.join([ config, _CONFIG_ADMIN_EMAILS_FORMAT.format( admin_emails='\n'.join(args.admin_emails))]) # parse the arguments again, as a real repo parser = argparse.ArgumentParser(fromfile_prefix_chars='@') abdi_repoargs.setup_parser(parser) repo_args = config.splitlines() repo_params = parser.parse_args(repo_args) abdi_repoargs.validate_args(repo_params) # make sure we can use the snoop URL repo_snoop_url = abdi_repoargs.get_repo_snoop_url(repo_params) if repo_snoop_url: phlurl_request.get(repo_snoop_url) # determine the repo url from the parsed params repo_url = abdi_repoargs.get_repo_url(repo_params) with fs.lockfile_context(): with abdi_repo.setup_repo_context(repo_url, repo_path): fs.create_repo_config(repo_name, config)
def test_get(self): self.assertEqual( phlurl_request.get(self._url('http://{host}:{port}/index')), (200, 'OK'))
def process(args): fs = abdt_fs.make_default_accessor() try_touch_path = fs.layout.repo_try(args.name) ok_touch_path = fs.layout.repo_ok(args.name) repo_path = fs.layout.repo(args.name) # make sure the repo doesn't exist already if os.path.exists(repo_path): raise Exception('{} already exists'.format(repo_path)) # make sure the phabricator config exists phab_config_path = fs.get_phabricator_config_rel_path( args.phabricator_name) # make sure we can use the snoop URL if args.repo_snoop_url: phlurl_request.get(args.repo_snoop_url) # generate the config file config = _CONFIG.format( phabricator_config=phab_config_path, repo_desc=args.repo_desc, repo_path=repo_path, try_touch_path=try_touch_path, ok_touch_path=ok_touch_path, arcyd_email=args.arcyd_email, admin_email=args.admin_email) if args.repo_snoop_url: config = '\n'.join([ config, _CONFIG_SNOOP_URL.format( repo_snoop_url=args.repo_snoop_url)]) if args.review_url_format: config = '\n'.join([ config, _CONFIG_REVIEW_URL.format( review_url_format=args.review_url_format)]) if args.branch_url_format: config = '\n'.join([ config, _CONFIG_BRANCH_URL.format( branch_url_format=args.branch_url_format)]) # if there's any failure after cloning then we should remove the repo phlsys_subprocess.run( 'git', 'clone', args.repo_url, repo_path) try: repo = phlsys_git.Repo(repo_path) # test pushing to master repo.call('checkout', 'origin/master') phlgit_commit.allow_empty(repo, 'test commit for pushing') repo.call('push', 'origin', '--dry-run', 'HEAD:refs/heads/master') repo.call('checkout', '-') # test push to special refs repo.call( 'push', 'origin', '--dry-run', 'HEAD:refs/arcyd/test') repo.call( 'push', 'origin', '--dry-run', 'HEAD:refs/heads/dev/arcyd/test') # fetch the 'landed' and 'abandoned' refs if they exist ref_list = set(repo.call('ls-remote').split()[1::2]) special_refs = [ (abdt_git.ARCYD_ABANDONED_REF, abdt_git.ARCYD_ABANDONED_BRANCH_FQ), (abdt_git.ARCYD_LANDED_REF, abdt_git.ARCYD_LANDED_BRANCH_FQ), ] for ref in special_refs: if ref[0] in ref_list: repo.call('fetch', 'origin', '{}:{}'.format(ref[0], ref[1])) # success, write out the config fs.create_repo_config(args.name, config) except Exception: # clean up the git repo shutil.rmtree(repo_path) raise