def run(self): created_branches = {} cmd_result = 0 languages = self.args.languages or self.config.languages commit_msg = "Regenerate client from commit {} of spec repo".format( get_current_commit(self.args.spec_repo_dir)) commit_msg = self.args.push_commit_msg or commit_msg for lang_name, lang_config in self.config.language_configs.items(): # Skip any languages not specified by the user if lang_name not in languages: continue gen_dir = self.get_generated_lang_dir(lang_name) # Assumes all generated changes are in the gen_dir directory # This is done by default in the `generate` command. with change_cwd(gen_dir): repo = "{}/{}".format(lang_config.github_org, lang_config.github_repo) branch_name = '{}/{}'.format(lang_name, time.time()) try: run_command(['git', 'checkout', '-b', branch_name]) run_command(['git', 'add', '-A']) run_command(['git', 'commit', '-a', '-m', commit_msg]) run_command(['git', 'push', 'origin', 'HEAD']) created_branches[repo] = branch_name except subprocess.CalledProcessError as e: log.error("Error running git commands: {}".format(e)) cmd_result += 1 continue log.info('Apigentools created the following branches:') log.info('\n'.join('{} : {}'.format(key, value) for key, value in created_branches.items())) return cmd_result
def run(self): created_branches = {} cmd_result = 0 languages = self.args.languages or self.config.languages commit_msg = "Regenerate client from commit {} of spec repo".format( get_current_commit(self.args.spec_repo_dir)) commit_msg = self.args.push_commit_msg or commit_msg for lang_name, lang_config in self.config.language_configs.items(): # Skip any languages not specified by the user if lang_name not in languages: continue log.info("Running push for language {}".format(lang_name)) gen_dir = self.get_generated_lang_dir(lang_name) # Assumes all generated changes are in the gen_dir directory # This is done by default in the `generate` command. with change_cwd(gen_dir): repo = "{}/{}".format(lang_config.github_org, lang_config.github_repo) branch_name = self.get_push_branch(lang_name) try: if self.args.skip_if_no_changes and self.git_status_empty( ): log.info( "Only .apigentools file changed for language {}, skipping" .format(lang_name)) continue # Update git config for this repository to use the provided author's email/name # If not specified, use the setup from the system/global if self.args.git_email: run_command([ 'git', 'config', 'user.email', self.args.git_email ], dry_run=self.args.dry_run) if self.args.git_name: run_command( ['git', 'config', 'user.name', self.args.git_name], dry_run=self.args.dry_run) run_command(['git', 'checkout', '-b', branch_name], dry_run=self.args.dry_run) run_command(['git', 'add', '-A'], dry_run=self.args.dry_run) run_command(['git', 'commit', '-a', '-m', commit_msg], dry_run=self.args.dry_run) run_command(['git', 'push', 'origin', 'HEAD'], dry_run=self.args.dry_run) created_branches[repo] = branch_name except subprocess.CalledProcessError as e: log.error("Error running git commands: {}".format(e)) cmd_result += 1 continue log.info('Apigentools created the following branches:') log.info('\n'.join('{} : {}'.format(key, value) for key, value in created_branches.items())) return cmd_result
def run(self): created_branches = {} cmd_result = 0 languages = self.args.get("languages") or self.config.languages commit_msg = "Regenerate client from commit {} of spec repo".format( get_current_commit()) commit_msg = self.args.get("push_commit_msg") or commit_msg for lang_name, lang_config in self.config.languages.items(): # Skip any languages not specified by the user if lang_name not in languages: continue log.info("Running push for language {}".format(lang_name)) gen_dir = lang_config.generated_lang_dir # Assumes all generated changes are in the gen_dir directory # This is done by default in the `generate` command. with change_cwd(gen_dir): repo = "{}/{}".format(lang_config.github_org, lang_config.github_repo) branch_name = self.get_push_branch(lang_name) try: if self.args.get( "skip_if_no_changes") and self.git_status_empty(): log.info( "Only .apigentools file changed for language {}, skipping" .format(lang_name)) continue self.setup_git_config() run_command( ["git", "checkout", "-b", branch_name], dry_run=self.args.get("dry_run"), ) run_command(["git", "add", "-A"], dry_run=self.args.get("dry_run")) run_command( ["git", "commit", "-a", "-m", commit_msg], dry_run=self.args.get("dry_run"), ) run_command( ["git", "push", "origin", "HEAD"], dry_run=self.args.get("dry_run"), ) created_branches[repo] = branch_name except subprocess.CalledProcessError as e: log.error("Error running git commands: {}".format(e)) cmd_result += 1 continue log.info("Apigentools created the following branches:") log.info("\n".join("{} : {}".format(key, value) for key, value in created_branches.items())) return cmd_result
def run(self): cmd_result = 0 log.info("Initializing a new project directory") is_repo = not self.args.get("no_git_repo") os.makedirs(self.args.get("projectdir"), exist_ok=True) with change_cwd(self.args.get("projectdir")): dirs = { "config_dir": constants.SPEC_REPO_CONFIG_DIR, "languages_config_dir": os.path.join( constants.SPEC_REPO_CONFIG_DIR, constants.SPEC_REPO_LANGUAGES_CONFIG_DIR, ), "generated_dir": constants.SPEC_REPO_GENERATED_DIR, "spec_dir": constants.SPEC_REPO_SPEC_DIR, "spec_v1_dir": os.path.join(constants.SPEC_REPO_SPEC_DIR, "v1"), "templates_dir": constants.SPEC_REPO_TEMPLATES_DIR, } for _, v in dirs.items(): os.makedirs(v, exist_ok=True) config_file = os.path.join(dirs["config_dir"], constants.DEFAULT_CONFIG_FILE) if not os.path.exists(config_file): with open(config_file, "w") as f: yaml.dump(self.CONFIG_FILE_JSON, f, indent=2) v1_header = os.path.join(dirs["spec_v1_dir"], constants.HEADER_FILE_NAME) v1_shared = os.path.join(dirs["spec_v1_dir"], constants.SHARED_FILE_NAME) if not os.path.exists(v1_header): with open(v1_header, "w") as f: yaml.dump(self.V1_HEADER_JSON, f) if not os.path.exists(v1_shared): with open(v1_shared, "w") as f: yaml.dump(self.V1_SHARED_JSON, f) if is_repo: log.info( "Creating a git repo in the new spec project directory") run_command(["git", "init"], log_level=logging.DEBUG) for d in [dirs["generated_dir"], dirs["templates_dir"]]: with open(os.path.join(d, ".gitkeep"), "w"): pass if not os.path.exists(".gitignore"): with open(".gitignore", "w") as f: f.writelines(self.GITIGNORE) return cmd_result
def cli(): toplog = logging.getLogger(__name__.split(".")[0]) set_log(toplog) args = get_cli_parser().parse_args() if args.verbose: set_log_level(toplog, logging.DEBUG) command_class = all_commands[args.action] command = command_class({}, args) if args.action == "init": sys.exit(command.run()) with change_cwd(args.spec_repo_dir): command.config = Config.from_file(os.path.join(args.config_dir, constants.DEFAULT_CONFIG_FILE)) sys.exit(command.run())
def run_language_commands(self, language, phase, cwd): """ Runs commands specified in language settings for given language and phase :param language: Language to run commands for :type language: ``str`` :param phase: Phase to run commands for (either ``pre`` or ``post``) :type phase: ``str`` :param cwd: Directory to change to while executing all commands :type cwd: ``str`` """ with change_cwd(cwd): lc = self.config.get_language_config(language) commands = lc.get_stage_commands(phase) if commands: log.info("Running '%s' commands for language '%s'", phase, language) else: log.info("No '%s' commands found for language '%s'", phase, language) for command in commands: log.info("Running command '%s'", command.description) to_run = [] for part in command.commandline: if isinstance(part, dict): allowed_functions = {"glob": glob.glob} function_name = part.get("function") function = allowed_functions.get(function_name) if function: result = function(*part.get("args", []), **part.get("kwargs", {})) # NOTE: we may need to improve this logic if/when we add more functions if isinstance(result, list): to_run.extend(result) else: to_run.append(result) else: raise ValueError( "Unknow function '{f}' in command '{d}' for language '{l}'" .format(f=function_name, d=command.description, l=language)) else: to_run.append(str(part)) run_command(to_run, additional_env=lc.command_env)
def run_command_with_config(command_class, click_ctx, **kwargs): click_ctx.obj.update(kwargs) cmd = command_class({}, click_ctx.obj) with change_cwd(click_ctx.obj.get("spec_repo_dir")): configfile = os.path.join( os.path.join(constants.SPEC_REPO_CONFIG_DIR, constants.DEFAULT_CONFIG_FILE)) try: cmd.config = Config.from_file(configfile) except OSError: check_for_legacy_config(click_ctx, configfile) try: click_ctx.exit(cmd.run()) except errors.ApigentoolsError as e: log.error("Apigentools error: %s", e) except subprocess.CalledProcessError as e: log.error("Failed running subprocess: %s", e.cmd) log.error(fmt_cmd_out_for_log(e, False)) click_ctx.exit(1)
def remove_generated_files(self, language_config): """ Remove all generated files from the generate output directory Files are deemed as "generated" if they match any regex in the .generated_files file at the root of the output repository. """ blacklist_regexes = set() output_dir = os.path.abspath(language_config.generated_lang_dir) blacklist_file = os.path.join(output_dir, GENERATION_BLACKLIST_FILENAME) log.info( f"Removing generated files from the output directory: {output_dir}" ) if not os.path.exists(blacklist_file): log.warning( f"File: {blacklist_file} doesn't exist, skipping removal of generated files" ) return # We should already be in this directory, but its explicit and safer since we're deleting with change_cwd(output_dir): # Read in and compile the regexes of files we want to delete with open(blacklist_file, "r") as blacklist_file: for line in blacklist_file.readlines(): blacklist_regexes.add(re.compile(line.strip())) # Get all files from current directory recursively all_files = [ os.path.relpath(os.path.join(root, filename), start=output_dir) for root, _, files in os.walk(output_dir) for filename in files ] # Match the regex against the list of all files and delete for file in all_files: if any( blacklist_regex.match(file) for blacklist_regex in blacklist_regexes): log.debug(f"Removing generated file: {file}") os.remove(file)
def run(self): cmd_result = 0 log.info("Initializing a new project directory") is_repo = not self.args.no_git_repo os.makedirs(self.args.projectdir, exist_ok=True) with change_cwd(self.args.projectdir): dirs = { "config_dir": constants.DEFAULT_CONFIG_DIR, "downstream_templates_dir": constants.DEFAULT_DOWNSTREAM_TEMPLATES_DIR, "languages_config_dir": os.path.join(constants.DEFAULT_CONFIG_DIR, constants.DEFAULT_LANGUAGES_CONFIG_DIR), "generated_dir": constants.DEFAULT_GENERATED_CODE_DIR, "spec_dir": constants.DEFAULT_SPEC_DIR, "spec_v1_dir": os.path.join(constants.DEFAULT_SPEC_DIR, "v1"), "template_patches_dir": constants.DEFAULT_TEMPLATE_PATCHES_DIR, "templates_dir": constants.DEFAULT_TEMPLATES_DIR, } for _, v in dirs.items(): os.makedirs(v, exist_ok=True) config_file = os.path.join(dirs["config_dir"], constants.DEFAULT_CONFIG_FILE) if not os.path.exists(config_file): with open(config_file, "w") as f: json.dump( { "codegen_exec": "openapi-generator", "languages": {}, "server_base_urls": { "v1": "https://api.myserver.com/v1", }, "spec_sections": { "v1": [], }, "spec_versions": ["v1"], }, f, indent=4, ) v1_header = os.path.join(dirs["spec_v1_dir"], constants.HEADER_FILE_NAME) v1_shared = os.path.join(dirs["spec_v1_dir"], constants.SHARED_SECTION_NAME + ".yaml") if not os.path.exists(v1_header): with open(v1_header, "w") as f: yaml.dump( { "info": { "contact": {}, "description": "Collection of all public API endpoints.", "title": "My API endpoints", "version": "1.0" }, "openapi": "3.0.0", }, f) if not os.path.exists(v1_shared): with open(v1_shared, "w") as f: yaml.dump( { "components": { "schemas": {}, "parameters": {}, "securitySchemes": {}, "requestBodies": {}, "responses": {}, "headers": {}, "examples": {}, "links": {}, "callbacks": {}, }, "security": [], "tags": [], }, f) if is_repo: log.info( "Creating a git repo in the new spec project directory") run_command(["git", "init"], log_level=logging.DEBUG) for d in [dirs["generated_dir"], dirs["templates_dir"]]: with open(os.path.join(d, ".gitkeep"), "w"): pass if not os.path.exists(".gitignore"): with open(".gitignore", "w") as f: f.write("!generated\n" "generated/*\n" "!generated/.gitkeep\n" "spec/*/full_spec.yaml\n" "!templates\n" "templates/*\n" "!templates/.gitkeep") return cmd_result
def test_change_cwd(): present_dir = os.getcwd() with tempfile.TemporaryDirectory() as target_dir: with change_cwd(target_dir): assert os.getcwd() == os.path.realpath(target_dir) assert os.getcwd() == present_dir
def run_config_command( self, command, what_command, cwd=".", chevron_vars=None, additional_functions=None, env_override=None, docker_run_options=None, ): log.info("Running command '%s'", command.description) env_override = env_override or {} if chevron_vars is None: chevron_vars = {} chevron_vars["cwd"] = cwd to_run = [] for part in self._render_command_args(command.commandline, chevron_vars): if isinstance(part, FunctionArgument): allowed_functions = {"glob": glob.glob, "glob_re": glob_re} allowed_functions.update(additional_functions or {}) function_name = part.function function = allowed_functions.get(function_name) if function: with change_cwd(cwd): result = function(*part.args, **part.kwargs) # NOTE: we may need to improve this logic if/when we add more functions result = self._render_command_args(result, chevron_vars) if isinstance(result, list): to_run.extend(result) else: to_run.append(result) else: raise ValueError( "Unknow function '{f}' in command '{d}' for '{l}'". format(f=function_name, d=command.description, l=what_command)) else: to_run.append(str(part)) additional_env = command.container_opts.environment additional_env.update(env_override) is_system = command.container_opts.system run_command_args = {} if is_system: run_command_args.update({ "additional_env": additional_env, "cwd": cwd }) else: image = command.container_opts.image if isinstance(image, ContainerImageBuild): image_name = "apigentools-test-{}".format( what_command.replace("/", "-")) dockerfile = self._render_command_args(image.dockerfile, chevron_vars) context = self._render_command_args(image.context, chevron_vars) with change_cwd(cwd): run_command([ "docker", "build", context, "-t", image_name, "-f", dockerfile ]) image = image_name # dockerize workdir = os.path.join( "/tmp/spec-repo", cwd, self._render_command_args( command.container_opts.workdir, chevron_vars, ), ) dockerized = [ "docker", "run", "--rm", "-v", "{}:{}".format(os.getcwd(), "/tmp/spec-repo"), "--workdir", workdir, ] if to_run: dockerized.extend(["--entrypoint", to_run[0]]) for k, v in additional_env.items(): dockerized.extend(["-e", "{}={}".format(k, v)]) if docker_run_options: dockerized.extend(docker_run_options) dockerized.extend([image] + to_run[1:]) to_run = dockerized run_command(to_run, **run_command_args)
def test_glob_re(tmpdir, glob_pattern, regex, expected): tmpdir.join("testx.go").ensure(file=True) tmpdir.join("x_test.go").ensure(file=True) with change_cwd(str(tmpdir)): assert list(sorted(glob_re(glob_pattern, regex))) == expected
def test_change_cwd(tmpdir): present_dir = os.getcwd() target_dir = tmpdir.mkdir("target_dir") with change_cwd(target_dir): assert os.getcwd() == os.path.realpath(target_dir) assert os.getcwd() == present_dir