def _verify_membership(user: str, group): members = group.members.list(all=True) owners = { m.username for m in members if m.access_level == gitlab.const.OWNER_ACCESS } if user not in owners: plug.log.warning( f"{user} is not an owner of {group.name}. " "Some features may not be available." ) non_owners = { m.username for m in members if m.access_level != gitlab.const.OWNER_ACCESS } if user not in non_owners: raise plug.BadCredentials( f"user {user} is not a member of {group.name}" ) else: plug.echo( f"SUCCESS: User {user} is an owner of group {group.name}" )
def _verify_user(self) -> None: endpoint = "/user" response = self._request(requests.get, endpoint, error_msg="bad token") if response.json()["login"] != self._user: raise plug.BadCredentials( f"token does not belong to user '{self._user}'") plug.echo("Token and user OK")
def callback(args: argparse.Namespace, api: plug.PlatformAPI) -> None: repo_name_to_team: Mapping[str, plug.StudentTeam] = { plug.generate_repo_name(student_team.name, assignment_name): student_team for student_team in args.students for assignment_name in args.assignments } repo_names = list(repo_name_to_team.keys()) if "multi_issues_file" in args and args.multi_issues_file is not None: issues_file = pathlib.Path(args.multi_issues_file).resolve() all_issues = _parse_multi_issues_file(issues_file) else: issues_dir = pathlib.Path(args.issues_dir).resolve() all_issues = _collect_issues(repo_names, issues_dir) issues = _extract_expected_issues(all_issues, repo_names, args.allow_missing) for repo_name, issue in issues: open_issue = args.batch_mode or _ask_for_open(issue, repo_name, args.truncation_length) if open_issue: repo = api.get_repo(repo_name, repo_name_to_team[repo_name].name) api.create_issue(issue.title, issue.body, repo) else: plug.echo("Skipping {}".format(repo_name))
def command(self) -> None: """Uninstall a plugin.""" installed_plugins = { name: attrs for name, attrs in disthelpers.get_installed_plugins().items() if not attrs.get("builtin") } if self.plugin_name: # non-interactive uninstall if self.plugin_name not in installed_plugins: raise plug.PlugError( f"no plugin '{self.plugin_name}' installed" ) selected_plugin_name = self.plugin_name else: # interactive uninstall if not installed_plugins: plug.echo("No plugins installed") return plug.echo("Installed plugins:") _list_installed_plugins( installed_plugins, disthelpers.get_active_plugins() ) selected_plugin_name = bullet.Bullet( prompt="Select a plugin to uninstall:", choices=list(installed_plugins.keys()), ).launch() _uninstall_plugin(selected_plugin_name, installed_plugins)
def _verify_base_url(self) -> None: response = self._request(requests.get, "/version") if response.status_code != 200: raise plug.ServiceNotFoundError( f"bad base url '{self._base_url}'", status=response.status_code ) plug.echo(f"Base url '{self._base_url}' OK")
def check_peer_review_progress( assignment_names: Iterable[str], teams: Iterable[plug.Team], title_regex: str, num_reviews: int, api: plug.PlatformAPI, ) -> None: """Check which teams have opened peer review issues in their allotted review repos Args: assignment_names: Names of assignments. teams: An iterable of student teams. title_regex: A regex to match against issue titles. num_reviews: Amount of reviews each student is expected to have made. api: An implementation of :py:class:`repobee_plug.PlatformAPI` used to interface with the platform (e.g. GitHub or GitLab) instance. """ teams = list(teams) reviews = collections.defaultdict(list) review_team_names = [ plug.generate_review_team_name(team, assignment_name) for team in teams for assignment_name in assignment_names ] review_teams = progresswrappers.get_teams(review_team_names, api, desc="Processing review teams") for review_team in review_teams: repos = list(api.get_team_repos(review_team)) if len(repos) != 1: plug.log.warning( f"Expected {review_team.name} to have 1 associated " f"repo, found {len(repos)}. " f"Skipping...") continue reviewed_repo = repos[0] expected_reviewers = set(review_team.members) reviewing_teams = _extract_reviewing_teams(teams, expected_reviewers) review_issue_authors = { issue.author for issue in api.get_repo_issues(reviewed_repo) if re.match(title_regex, issue.title) } for team in reviewing_teams: reviews[str(team)].append( plug.Review( repo=reviewed_repo.name, done=any( map(review_issue_authors.__contains__, team.members)), )) plug.echo( formatters.format_peer_review_progress_output( reviews, [team.name for team in teams], num_reviews))
def clone_repos( repos: Iterable[plug.StudentRepo], update_local: bool, api: plug.PlatformAPI, ) -> Mapping[str, List[plug.Result]]: """Clone all student repos related to the provided master repos and student teams. Args: repos: The repos to be cloned. This function does not use the ``implementation`` attribute, so it does not need to be set. update_local: Whether or nut to attempt to update student repos that already exist locally. api: An implementation of :py:class:`repobee_plug.PlatformAPI` used to interface with the platform (e.g. GitHub or GitLab) instance. Returns: A mapping from repo name to a list of hook results. """ plug.echo("Cloning into student repos ...") with tempfile.TemporaryDirectory() as tmpdir: local_repos = _clone_repos_no_check(repos, pathlib.Path(tmpdir), update_local, api) for p in plug.manager.get_plugins(): if "post_clone" in dir(p): local_repos_progress = plug.cli.io.progress_bar( local_repos, desc="Executing post_clone hooks") return plugin.execute_clone_tasks(local_repos_progress, api) return {}
def _handle_hook_results(hook_results, filepath): plug.log.warning( "Storing hook results to file is an alpha feature, the file format " "is not final") output_file = pathlib.Path(filepath) util.atomic_write(plug.result_mapping_to_json(hook_results), output_file) plug.echo("Hook results stored to {}".format(filepath))
def _list_all_plugins(plugins: dict, installed_plugins: dict, active_plugins: List[str]) -> None: headers = [ "Name", "Description", "URL", "Latest", "Installed\n(√ = active)", ] plugins_table = [] for plugin_name, attrs in plugins.items(): latest_version = list(attrs["versions"].keys())[0] installed = installed_plugins.get(plugin_name) or {} installed_version = ("built-in" if attrs.get("builtin") else (installed.get("version") or "-")) + ( " √" if plugin_name in active_plugins else "") plugins_table.append([ plugin_name, _wrap_cell(attrs["description"]), attrs["url"], latest_version, installed_version, ]) pretty_table = _format_table( plugins_table, headers, max_width=_get_terminal_width(), column_elim_order=[2, 3, 4, 1, 0], ) plug.echo(pretty_table)
def check_reviews_repobee_4(allocations_file: pathlib.Path, title_regex: str, api: plug.PlatformAPI) -> None: """Preview version of the `reviews check` command for RepoBee 4.""" data = json.loads(allocations_file.read_text(sys.getdefaultencoding())) review_allocations = data["allocations"] num_reviews = int(data["num_reviews"]) expected_reviewers = { allocation["reviewed_repo"]["url"]: allocation["review_team"]["members"] for allocation in review_allocations } reviewed_repos = progresswrappers.get_repos(expected_reviewers.keys(), api) reviews = collections.defaultdict(list) for reviewed_repo in reviewed_repos: review_issue_authors = { issue.author for issue in api.get_repo_issues(reviewed_repo) if re.match(title_regex, issue.title) } for expected_reviewer in expected_reviewers[reviewed_repo.url]: reviews[expected_reviewer].append( plug.Review( repo=reviewed_repo.name, done=expected_reviewer in review_issue_authors, )) plug.echo( formatters.format_peer_review_progress_output( reviews, list(itertools.chain.from_iterable(expected_reviewers.values())), num_reviews, ))
def open_issues_from_hook_results( hook_results: Mapping[str, List[plug.Result]], repos: Iterable[plug.StudentRepo], api: plug.PlatformAPI, ) -> None: """Open all issues from the hook results in the given repos. Issues given in the hook results that do not belong to the repos are ignored, and repos provided without corresponding issues in the hook results have no effect. Args: hook_results: A hook results dictionary. repos: Student repos to open issues in. api: plug.PlatformAPI, """ url_to_repo = {repo.url: repo for repo in repos} for repo_url, repo_data in hook_results["repos"][0].data.items(): if repo_url in url_to_repo and repo_data["issues"]: repo = url_to_repo[repo_url] platform_repo = api.get_repo(repo.name, repo.team.name) for issue_data in repo_data["issues"].values(): issue = api.create_issue(issue_data["title"], issue_data["body"], platform_repo) msg = ( f"Opened issue {repo.name}/#{issue.number}-'{issue.title}'" ) plug.echo(msg)
def _verify_org(self, org_name: str) -> None: endpoint = f"/orgs/{org_name}" self._request( requests.get, endpoint, error_msg=f"could not find organization '{org_name}'", ) plug.echo(f"Organization '{org_name}' OK")
def _ask_for_open(issue: plug.Issue, repo_name: str, trunc_len: int) -> bool: indented_body = _indent_issue_body(issue.body, trunc_len) issue_description = ( f'\nProcessing issue "{issue.title}" for {repo_name}:\n{indented_body}' ) plug.echo(issue_description) return (input(f'Open issue "{issue.title}" in repo {repo_name}? (y/n) ') == "y")
def command(self): content = _generate_multi_issues_file_content(self.args.students, self.args.assignments) pathlib.Path(MULTI_ISSUES_FILENAME).write_text( content, encoding=sys.getdefaultencoding()) plug.echo(f"Created multi-issues file '{MULTI_ISSUES_FILENAME}'")
def update_student_repos( template_repo_urls: plug.types.SizedIterable[str], teams: plug.types.SizedIterable[plug.StudentTeam], api: plug.PlatformAPI, issue: Optional[plug.Issue] = None, ) -> Mapping[str, List[plug.Result]]: """Attempt to update all student repos related to one of the master repos. Args: template_repo_urls: URLs to master repos. Must be in the organization that the api is set up for. teams: An iterable of student teams. api: An implementation of :py:class:`repobee_plug.PlatformAPI` used to interface with the platform (e.g. GitHub or GitLab) instance. issue: An optional issue to open in repos to which pushing fails. """ if len(set(template_repo_urls)) != len(template_repo_urls): raise ValueError("template_repo_urls contains duplicates") with tempfile.TemporaryDirectory() as tmpdir: workdir = pathlib.Path(tmpdir) template_repos = [ plug.TemplateRepo( name=urlutil.extract_repo_name(url), url=url, _path=workdir / api.extract_repo_name(url), ) for url in template_repo_urls ] plug.log.info("Cloning into master repos ...") _clone_all(template_repos, cwd=workdir, api=api) hook_results = plugin.execute_setup_tasks( template_repos, api, cwd=pathlib.Path(tmpdir) ) push_tuple_iter = _create_update_push_tuples( teams, template_repos, api ) push_tuple_iter_progress = plug.cli.io.progress_bar( push_tuple_iter, desc="Setting up student repos", total=len(teams) * len(template_repos), ) successful_pts, failed_pts = git.push( push_tuples=push_tuple_iter_progress ) if failed_pts and issue: plug.echo("Opening issue in repos to which push failed") urls_without_auth = [ re.sub("https://.*?@", "https://", pt.repo_url) for pt in failed_pts ] _open_issue_by_urls(urls_without_auth, issue, api) plug.log.info("Done!") return hook_results
def _ask_for_open(issue: plug.Issue, repo_name: str, trunc_len: int) -> bool: plug.echo('Processing issue "{}" for {}: {}{}'.format( issue.title, repo_name, issue.body[:trunc_len], "[...]" if len(issue.body) > trunc_len else "", )) return (input('Open issue "{}" in repo {}? (y/n) '.format( issue.title, repo_name)) == "y")
def _echo_state_change( active_before: List[str], active_after: List[str] ) -> None: activations = set(active_after) - set(active_before) deactivations = set(active_before) - set(active_after) if activations: plug.echo(f"Activating: {' '.join(activations)}") if deactivations: plug.echo(f"Deactivating: {' '.join(deactivations)}")
def _list_plugin(plugin_name: str, plugins: dict) -> None: attrs = plugins[plugin_name] table = [ ["Name", plugin_name], ["Description", _wrap_cell(attrs["description"])], ["Versions", _wrap_cell(" ".join(attrs["versions"].keys()))], ["URL", attrs["url"]], ] plug.echo(tabulate.tabulate(table, tablefmt="fancy_grid"))
def command(self) -> None: hook_results_file = pathlib.Path(self.hook_results_file).resolve() if not hook_results_file.exists(): raise plug.PlugError(f"no such file: {str(hook_results_file)}") contents = hook_results_file.read_text( encoding=sys.getdefaultencoding()) hook_results_mapping = plug.json_to_result_mapping(contents) selected_hook_results = _filter_hook_results(hook_results_mapping, self.args.students, self.args.assignments) plug.echo(formatters.format_hook_results_output(selected_hook_results))
def _uninstall_plugin(plugin_name: str, installed_plugins: dict): plugin_version = installed_plugins[plugin_name]["version"] plug.echo(f"Uninstalling {plugin_name}@{plugin_version}") if not installed_plugins[plugin_name].get("single_file"): _pip_uninstall_plugin(plugin_name) del installed_plugins[plugin_name] disthelpers.write_installed_plugins(installed_plugins) disthelpers.write_active_plugins([ name for name in disthelpers.get_active_plugins() if name != plugin_name ]) plug.echo(f"Successfully uninstalled {plugin_name}")
def dispatch_command( args: argparse.Namespace, api: plug.PlatformAPI, config: plug.Config ) -> Mapping[str, List[plug.Result]]: """Handle parsed CLI arguments and dispatch commands to the appropriate functions. Expected exceptions are caught and turned into SystemExit exceptions, while unexpected exceptions are allowed to propagate. Args: args: A namespace of parsed command line arguments. api: An initialized plug.API instance. config_file: Path to the config file. """ hook_results: Mapping[str, List[plug.Result]] = {} dispatch_table = { plug.cli.CoreCommand.repos: _dispatch_repos_command, plug.cli.CoreCommand.issues: _dispatch_issues_command, plug.cli.CoreCommand.config: _dispatch_config_command, plug.cli.CoreCommand.reviews: _dispatch_reviews_command, plug.cli.CoreCommand.teams: _dispatch_teams_command, } is_ext_command = "_extension_command" in args if is_ext_command: ext_cmd = args._extension_command res = ( ext_cmd.command(api=api) if ext_cmd.__requires_api__() else ext_cmd.command() ) hook_results = ( {str(ext_cmd.__settings__.action): [res]} if res else hook_results ) else: category = args.category hook_results = ( dispatch_table[category](args, config, api) or hook_results ) if is_ext_command or args.action in [ plug.cli.CoreCommand.repos.setup, plug.cli.CoreCommand.repos.update, plug.cli.CoreCommand.repos.clone, ]: if hook_results and any(hook_results.values()): plug.echo(formatters.format_hook_results_output(hook_results)) if hook_results and "hook_results_file" in args and args.hook_results_file: _handle_hook_results( hook_results=hook_results, filepath=args.hook_results_file ) return hook_results
def _pre_init_error_handler(): try: yield except ( exception.ParseError, exception.PluginLoadError, exception.FileError, ) as exc: plug.echo(_PRE_INIT_ERROR_MESSAGE) plug.log.error(f"{exc.__class__.__name__}: {exc}") raise except Exception as exc: plug.echo(_PRE_INIT_ERROR_MESSAGE) _handle_unexpected_exception(exc, traceback=True)
def _verify_org(org_name: str, user: str, g: github.MainClass.Github): """Check that the organization exists and that the user is an owner.""" plug.echo("Trying to fetch organization {} ...".format(org_name)) org_not_found_msg = ( "organization {} could not be found. Possible " "reasons: org does not exist, user does not have " "sufficient access to organization." ).format(org_name) with _convert_404_to_not_found_error(org_not_found_msg): org = g.get_organization(org_name) plug.echo("SUCCESS: found organization {}".format(org_name)) plug.echo( "Verifying that user {} is an owner of organization {}".format( user, org_name ) ) if user not in (m.login for m in org.get_members(role="admin")): plug.log.warning( f"{user} is not an owner of {org_name}. " "Some features may not be available." ) if user not in (m.login for m in org.get_members()): raise plug.BadCredentials( f"user {user} is not a member of {org_name}" ) else: plug.echo( "SUCCESS: user {} is an owner of organization {}".format( user, org_name ) )
def migrate_repos( template_repo_urls: plug.types.SizedIterable[str], api: plug.PlatformAPI ) -> None: """Migrate a repository from an arbitrary URL to the target organization. The new repository is added to the master_repos team, which is created if it does not already exist. Args: template_repo_urls: Local urls to repos to migrate. api: An implementation of :py:class:`repobee_plug.PlatformAPI` used to interface with the platform (e.g. GitHub or GitLab) instance. """ local_templates = [ plug.TemplateRepo(name=urlutil.extract_repo_name(url), url=url) for url in template_repo_urls ] create_repo_it = plug.cli.io.progress_bar( ( _create_or_fetch_repo( local.name, description="", private=True, api=api ) for local in local_templates ), desc="Creating remote repos", total=len(template_repo_urls), ) with tempfile.TemporaryDirectory() as tmpdir: workdir = pathlib.Path(tmpdir) _clone_all(local_templates, cwd=workdir, api=api) remote_templates = [ plug.TemplateRepo( name=repo.name, url=repo.url, _path=workdir / repo.name ) for _, repo in create_repo_it ] git.push( [ PushSpec( local_path=template_repo.path, repo_url=api.insert_auth(template_repo.url), branch=git.active_branch(template_repo.path), ) for template_repo in remote_templates ] ) plug.echo("Done!")
def _install_plugin_from_git_repo( repo_url: str, installed_plugins: dict ) -> None: url, *version = repo_url.split(PLUGIN_SPEC_SEP) plugin_name = _parse_plugin_name_from_git_url(url) install_url = f"git+{repo_url}" install_proc = _install_plugin_from_url_nocheck(install_url) if install_proc.returncode != 0: raise plug.PlugError(f"could not install plugin from {repo_url}") install_info = dict(name=url, version=repo_url) installed_plugins[plugin_name] = install_info disthelpers.write_installed_plugins(installed_plugins) plug.echo(f"Installed {plugin_name} from {repo_url}")
def command(self) -> None: """Upgrade RepoBee to the latest version.""" plug.echo(f"Upgrading RepoBee from v{_installed_version()}...") repobee_requirement = f"repobee{self.version_spec or ''}" upgrade = disthelpers.pip( "install", repobee_requirement, upgrade=True, no_cache=True, force_reinstall=True, ) if upgrade.returncode != 0: raise plug.PlugError("failed to upgrade RepoBee") plug.echo(f"RepoBee succesfully upgraded to v{_installed_version()}!")
def _list_installed_plugins(installed_plugins: dict, active_plugins: List[str]) -> None: headers = ["Name", "Installed version\n(√ = active)"] plugins_table = [] for plugin_name, attrs in installed_plugins.items(): installed_version = attrs["version"] + (" √" if plugin_name in active_plugins else "") plugins_table.append([plugin_name, installed_version]) pretty_table = _format_table( plugins_table, headers, max_width=_get_terminal_width(), column_elim_order=[1, 0], ) plug.echo(pretty_table)
def verify_settings( user: str, org_name: str, base_url: str, token: str, template_org_name: Optional[str] = None, ): """See :py:meth:`repobee_plug.PlatformAPI.verify_settings`.""" plug.echo("GitLabAPI is verifying settings ...") plug.echo("Testing Internet connection") if not http.is_internet_connection_available(): raise plug.InternetConnectionUnavailable() if not token: raise plug.BadCredentials( msg="Token is empty. Check that REPOBEE_TOKEN environment " "variable is properly set, or supply the `--token` option." ) gl = gitlab.Gitlab( base_url, private_token=token, ssl_verify=GitLabAPI._ssl_verify() ) plug.echo(f"Authenticating connection to {base_url}...") with _convert_error( gitlab.exceptions.GitlabAuthenticationError, plug.BadCredentials, "Could not authenticate token", ), _convert_error( requests.exceptions.ConnectionError, plug.PlatformError, f"Could not connect to {base_url}, please check the URL", ): gl.auth() authenticated_username = gl.user.username # type: ignore plug.echo( f"SUCCESS: Authenticated as {authenticated_username} at {base_url}" ) GitLabAPI._verify_group(org_name, gl) if template_org_name: GitLabAPI._verify_group(template_org_name, gl) plug.echo("GREAT SUCCESS: All settings check out!")
def _log_repo_issues( issues_per_repo: Iterable[Tuple[str, Iterable[plug.Issue]]], show_body: bool, title_alignment: int, ) -> List[Tuple[Any, list]]: """Log repo issues. Args: issues_per_repo: (repo_name, issue generator) pairs show_body: Include the body of the issue in the output. title_alignment: Where the issue title should start counting from the start of the line. """ even = True persistent_issues_per_repo = [] for repo_name, issues in issues_per_repo: issues = list(issues) persistent_issues_per_repo.append((repo_name, issues)) if not issues: plug.log.warning("{}: No matching issues".format(repo_name)) for issue in issues: color = (bg("grey_30") if even else bg("grey_15")) + fg("white") even = not even # cycle color adjusted_alignment = title_alignment + len( color ) # color takes character space id_ = "{}{}/#{}:".format(color, repo_name, issue.number).ljust( adjusted_alignment ) out = "{}{}{}{}created {!s} by {}".format( id_, issue.title, style.RESET, " ", issue.created_at, issue.author, ) if show_body: out += os.linesep * 2 + _limit_line_length(issue.body) plug.echo(out) return persistent_issues_per_repo
def verify_settings( user: str, org_name: str, base_url: str, token: str, template_org_name: Optional[str] = None, ): """See :py:meth:`repobee_plug.PlatformAPI.verify_settings`.""" target_api = GiteaAPI( user=user, org_name=org_name, base_url=base_url, token=token ) target_api._verify_base_url() target_api._verify_user() target_api._verify_org(org_name) if template_org_name: target_api._verify_org(template_org_name) plug.echo("GREAT SUCCESS: All settings check out!")