def test_run_command(caplog): cmd = ["run", "this"] log_level = logging.INFO additional_env = {"SOME_ADDITIONAL_ENV": "value"} env = copy.deepcopy(os.environ) env.update(additional_env) combine_out_err = False flexmock(subprocess).should_receive("run").\ with_args(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, text=True, env=env).\ and_return(subprocess.CompletedProcess(cmd, 0, "stdout", "stderr")) res = run_command(cmd, log_level=log_level, additional_env=additional_env, combine_out_err=combine_out_err) assert res.returncode == 0 assert res.stdout == "stdout" assert res.stderr == "stderr" flexmock(subprocess).should_receive("run").\ with_args(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, text=True, env=env).\ and_raise(subprocess.CalledProcessError(1, cmd, "stdout", "stderr")) with pytest.raises(subprocess.CalledProcessError): run_command(cmd, log_level=log_level, additional_env=additional_env, combine_out_err=combine_out_err) # test that secrets are not logged set_log(log) caplog.clear() caplog.set_level(logging.DEBUG, logger=log.name) secret = "abcdefg" cmd = ["run", {"secret": True, "item": secret}] flexmock(subprocess).should_receive("run").\ with_args(["run", "abcdefg"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, text=True, env=env).\ and_return(subprocess.CompletedProcess(cmd, 0, "stdout", "stderr")) res = run_command(cmd, log_level=log_level, additional_env=additional_env, combine_out_err=combine_out_err) assert secret not in caplog.text assert REDACTED_OUT_SECRET in caplog.text
def validate_spec(self, fs_path): try: run_command([self.config.codegen_exec, "validate", "-i", fs_path]) log.info("Validation successful") return True except: log.error("Validation failed, see the output above for errors") return False
def run(self): cmd_result = 0 log.info("Initializing a new project directory") is_repo = not self.args.get("no_git_repo") os.makedirs(self.args.get("projectdir"), exist_ok=True) with change_cwd(self.args.get("projectdir")): dirs = { "config_dir": constants.SPEC_REPO_CONFIG_DIR, "languages_config_dir": os.path.join( constants.SPEC_REPO_CONFIG_DIR, constants.SPEC_REPO_LANGUAGES_CONFIG_DIR, ), "generated_dir": constants.SPEC_REPO_GENERATED_DIR, "spec_dir": constants.SPEC_REPO_SPEC_DIR, "spec_v1_dir": os.path.join(constants.SPEC_REPO_SPEC_DIR, "v1"), "templates_dir": constants.SPEC_REPO_TEMPLATES_DIR, } for _, v in dirs.items(): os.makedirs(v, exist_ok=True) config_file = os.path.join(dirs["config_dir"], constants.DEFAULT_CONFIG_FILE) if not os.path.exists(config_file): with open(config_file, "w") as f: yaml.dump(self.CONFIG_FILE_JSON, f, indent=2) v1_header = os.path.join(dirs["spec_v1_dir"], constants.HEADER_FILE_NAME) v1_shared = os.path.join(dirs["spec_v1_dir"], constants.SHARED_FILE_NAME) if not os.path.exists(v1_header): with open(v1_header, "w") as f: yaml.dump(self.V1_HEADER_JSON, f) if not os.path.exists(v1_shared): with open(v1_shared, "w") as f: yaml.dump(self.V1_SHARED_JSON, f) if is_repo: log.info( "Creating a git repo in the new spec project directory") run_command(["git", "init"], log_level=logging.DEBUG) for d in [dirs["generated_dir"], dirs["templates_dir"]]: with open(os.path.join(d, ".gitkeep"), "w"): pass if not os.path.exists(".gitignore"): with open(".gitignore", "w") as f: f.writelines(self.GITIGNORE) return cmd_result
def run(self): created_branches = {} cmd_result = 0 languages = self.args.languages or self.config.languages commit_msg = "Regenerate client from commit {} of spec repo".format( get_current_commit(self.args.spec_repo_dir)) commit_msg = self.args.push_commit_msg or commit_msg for lang_name, lang_config in self.config.language_configs.items(): # Skip any languages not specified by the user if lang_name not in languages: continue gen_dir = self.get_generated_lang_dir(lang_name) # Assumes all generated changes are in the gen_dir directory # This is done by default in the `generate` command. with change_cwd(gen_dir): repo = "{}/{}".format(lang_config.github_org, lang_config.github_repo) branch_name = '{}/{}'.format(lang_name, time.time()) try: run_command(['git', 'checkout', '-b', branch_name]) run_command(['git', 'add', '-A']) run_command(['git', 'commit', '-a', '-m', commit_msg]) run_command(['git', 'push', 'origin', 'HEAD']) created_branches[repo] = branch_name except subprocess.CalledProcessError as e: log.error("Error running git commands: {}".format(e)) cmd_result += 1 continue log.info('Apigentools created the following branches:') log.info('\n'.join('{} : {}'.format(key, value) for key, value in created_branches.items())) return cmd_result
def run_test_image(self, img_name): log.info("Running tests: %s", img_name) cmd = ["docker", "run"] for i, ce in enumerate(self.args.container_env): split = ce.split("=", 1) if len(split) != 2: raise ValueError("{} (passed in on position {})".format( REDACTED_OUT_SECRET, i)) cmd.append("-e") cmd.append({ "item": "{}={}".format(split[0], split[1]), "secret": True }) cmd.append(img_name) run_command(cmd, combine_out_err=True)
def run(self): with tempfile.TemporaryDirectory() as td: log.info("Obtaining upstream templates ...") patch_in = copy_from = td if self.args.templates_source == "openapi-jar": run_command(["unzip", "-q", self.args.jar_path, "-d", td]) elif self.args.templates_source == "local-dir": for lang in self.config.languages: lang_upstream_templates_dir = self.config.get_language_config(lang).upstream_templates_dir local_lang_dir = os.path.join(self.args.local_path, lang_upstream_templates_dir) if not os.path.exists(local_lang_dir): log.error( "Directory %s doesn't contain '%s' directory with templates. " + "Make sure %s contains directories with templates for all languages", self.args.local_path, lang_upstream_templates_dir, self.args.local_path ) return 1 shutil.copytree(local_lang_dir, os.path.join(td, lang_upstream_templates_dir)) else: patch_in = copy_from = os.path.join( td, "modules", "openapi-generator", "src", "main", "resources" ) run_command(["git", "clone", OPENAPI_GENERATOR_GIT, td]) run_command(["git", "-C", td, "checkout", self.args.git_committish]) if os.path.exists(self.args.template_patches_dir): log.info("Applying patches to upstream templates ...") patches = glob.glob(os.path.join(self.args.template_patches_dir, "*.patch")) for p in sorted(patches): try: run_command([ "patch", "--fuzz", "0", "--no-backup-if-mismatch", "-p1", "-i", os.path.abspath(os.path.join(self.args.template_patches_dir, os.path.basename(p))), "-d", patch_in, ]) except subprocess.CalledProcessError: # at this point, the stdout/stderr of the process have been printed by # `run_command`, so the user should have sufficient info to about what went wrong log.error( "Failed to apply patch %s, exiting as templates can't be processed", p ) return 1 # copy the processed templates from the temporary dir to templates dir languages = self.args.languages or self.config.languages for lang in languages: upstream_templatedir = self.config.get_language_config(lang).upstream_templates_dir outlang_dir = os.path.join(self.args.output_dir, lang) if os.path.exists(outlang_dir): shutil.rmtree(outlang_dir) shutil.copytree(os.path.join(copy_from, upstream_templatedir), outlang_dir) return 0
def build_test_image(self, df_path, img_name): if os.path.exists(df_path): build = [ "docker", "build", os.path.dirname(df_path), "-f", df_path, "-t", img_name, ] if self.args.no_cache: build.append("--no-cache") run_command(build, combine_out_err=True) return img_name return None
def run_language_commands(self, language, phase, cwd): """ Runs commands specified in language settings for given language and phase :param language: Language to run commands for :type language: ``str`` :param phase: Phase to run commands for (either ``pre`` or ``post``) :type phase: ``str`` :param cwd: Directory to change to while executing all commands :type cwd: ``str`` """ with change_cwd(cwd): lc = self.config.get_language_config(language) commands = lc.get_stage_commands(phase) if commands: log.info("Running '%s' commands for language '%s'", phase, language) else: log.info("No '%s' commands found for language '%s'", phase, language) for command in commands: log.info("Running command '%s'", command.description) to_run = [] for part in command.commandline: if isinstance(part, dict): allowed_functions = {"glob": glob.glob} function_name = part.get("function") function = allowed_functions.get(function_name) if function: result = function(*part.get("args", []), **part.get("kwargs", {})) # NOTE: we may need to improve this logic if/when we add more functions if isinstance(result, list): to_run.extend(result) else: to_run.append(result) else: raise ValueError( "Unknow function '{f}' in command '{d}' for language '{l}'" .format(f=function_name, d=command.description, l=language)) else: to_run.append(str(part)) run_command(to_run, additional_env=lc.command_env)
def run(self): created_branches = {} cmd_result = 0 languages = self.args.get("languages") or self.config.languages commit_msg = "Regenerate client from commit {} of spec repo".format( get_current_commit()) commit_msg = self.args.get("push_commit_msg") or commit_msg for lang_name, lang_config in self.config.languages.items(): # Skip any languages not specified by the user if lang_name not in languages: continue log.info("Running push for language {}".format(lang_name)) gen_dir = lang_config.generated_lang_dir # Assumes all generated changes are in the gen_dir directory # This is done by default in the `generate` command. with change_cwd(gen_dir): repo = "{}/{}".format(lang_config.github_org, lang_config.github_repo) branch_name = self.get_push_branch(lang_name) try: if self.args.get( "skip_if_no_changes") and self.git_status_empty(): log.info( "Only .apigentools file changed for language {}, skipping" .format(lang_name)) continue self.setup_git_config() run_command( ["git", "checkout", "-b", branch_name], dry_run=self.args.get("dry_run"), ) run_command(["git", "add", "-A"], dry_run=self.args.get("dry_run")) run_command( ["git", "commit", "-a", "-m", commit_msg], dry_run=self.args.get("dry_run"), ) run_command( ["git", "push", "origin", "HEAD"], dry_run=self.args.get("dry_run"), ) created_branches[repo] = branch_name except subprocess.CalledProcessError as e: log.error("Error running git commands: {}".format(e)) cmd_result += 1 continue log.info("Apigentools created the following branches:") log.info("\n".join("{} : {}".format(key, value) for key, value in created_branches.items())) return cmd_result
def pull_repository(self, language): output_dir = self.get_generated_lang_dir(language.language) if self.args.git_via_https: repo = REPO_HTTPS_URL.format(language.github_org, language.github_repo) else: repo = REPO_SSH_URL.format(language.github_org, language.github_repo) try: run_command(['git', 'clone', '--depth=2', repo, output_dir]) except subprocess.CalledProcessError as e: # Git doesn't allow you to clone into a non empty dir # Throw a helpful error if this happens log.error( "Error cloning repo {0} into {1}. Make sure {1} is empty first" .format(repo, output_dir)) raise e
def get_push_branch(self, lang_name): """ Get name of branch to create and push. If the default branch doesn't exist, it will be returned, otherwise a new feature branch name will be returned. :param lang_name: Name of language to include in a new feature branch :type language: ``str`` :return: Name of the branch to create and push :rtype: ``str`` """ push_branch = self.args.default_branch try: run_command(["git", "rev-parse", "--verify", push_branch]) # if the default branch exists, we'll create and push a new feature branch push_branch = "{}/{}".format(lang_name, time.time()) except subprocess.CalledProcessError: # if the default branch doesn't exist, we'll create and push it pass return push_branch
def setup_git_config(self, cwd=None): """Update git config for this repository to use the provided author's email/name. If not specified, use the setup from the system/global """ if self.args.get("git_email"): run_command( ["git", "config", "user.email", self.args.get("git_email")], dry_run=self.args.get("dry_run", False), cwd=cwd, ) if self.args.get("git_name"): run_command( ["git", "config", "user.name", self.args.get("git_name")], dry_run=self.args.get("dry_run", False), cwd=cwd, )
def git_status_empty(self): # I hope that `--porcelain` doesn't mean this is fragile ¯\_(ツ)_/¯ status = run_command(["git", "status", "--porcelain"]) result = {} for line in status.stdout.splitlines(): line = line.strip() if line: k, v = line.split(maxsplit=1) result.setdefault(k, []) result[k].append(v) if result == {} or result == {"M": [".apigentools-info"]}: return True return False
def get_codegen_version(self): """ Gets and caches version of the configured codegen_exec. Returns the cached result on subsequent invocations. :return: Codegen version, for example ``4.1.0``; ``None`` if getting the version failed :rtype: ``str`` """ if self.__cached_codegen_version is None: try: res = run_command([self.config.codegen_exec, "version"]) self.__cached_codegen_version = res.stdout.strip() except subprocess.CalledProcessError: pass return self.__cached_codegen_version
def pull_repository(self, language): output_dir = self.get_generated_lang_dir(language.language) secret_repo_url = False if self.args.git_via_https: checkout_url = "" if self.args.git_via_https_oauth_token: checkout_url = "{}:x-oauth-basic@".format( self.args.git_via_https_oauth_token) elif self.args.git_via_https_installation_access_token: checkout_url = "x-access-token:{}@".format( self.args.git_via_https_installation_access_token) if checkout_url: secret_repo_url = True repo = REPO_HTTPS_URL.format(checkout_url, language.github_org, language.github_repo) else: repo = REPO_SSH_URL.format(language.github_org, language.github_repo) try: log_repo = "{}/{}".format( language.github_org, language.github_repo) if secret_repo_url else repo log.info("Pulling repository %s", log_repo) run_command([ 'git', 'clone', '--depth=2', { "item": repo, "secret": secret_repo_url }, output_dir ], sensitive_output=True) except subprocess.CalledProcessError as e: log.error( "Error cloning repo {0} into {1}. Make sure {1} is empty first" .format(log_repo, output_dir)) raise e
def run(self): fs_paths = {} versions = self.args.api_versions or self.config.spec_versions languages = self.args.languages or self.config.languages pull_repo = self.args.clone_repo # first, generate full spec for all major versions of the API for version in versions: fs_paths[version] = write_full_spec(self.config, self.args.spec_dir, version, self.args.full_spec_file) missing_templates = self.get_missing_templates(languages) if missing_templates and not self.args.builtin_templates: log.error( "Missing templates for %s; please run `apigentools templates` first", ", ".join(missing_templates)) return 1 # cache codegen version if self.get_codegen_version() is None: log.error("Failed to get codegen version, exiting") return 1 # now, for each language generate a client library for every major version that is explicitly # listed in its settings (meaning that we can have languages that don't support all major # API versions) for language in languages: language_config = self.config.get_language_config(language) # Clone the language target repo into the output directory if pull_repo: self.pull_repository(language_config) for version in language_config.spec_versions: log.info("Generation in %s, spec version %s", language, version) language_oapi_config_path = os.path.join( self.args.config_dir, LANGUAGE_OAPI_CONFIGS, "{lang}_{v}.json".format(lang=language, v=version)) with open(language_oapi_config_path) as lcp: language_oapi_config = json.load(lcp) version_output_dir = self.get_generated_lang_version_dir( language, version) generate_cmd = [ self.config.codegen_exec, "generate", "--http-user-agent", "{c}/{v}/{l}".format( c=self.config.user_agent_client_name, v=self.get_version_from_lang_oapi_config( language_oapi_config), l=language), "-g", language, "-c", language_oapi_config_path, "-i", fs_paths[version], "-o", version_output_dir, "--additional-properties", "apigentoolsStamp='{stamp}'".format( stamp=self.get_stamp()), ] if not self.args.builtin_templates: generate_cmd.extend( ["-t", os.path.join(self.args.template_dir, language)]) os.makedirs(version_output_dir, exist_ok=True) self.run_language_commands(language, "pre", version_output_dir) run_command(generate_cmd, additional_env=language_config.command_env) self.run_language_commands(language, "post", version_output_dir) self.render_downstream_templates( language, self.args.downstream_templates_dir, ) # Write the apigentools.info file once per language # after each nested folder has been created self.write_dot_apigentools_info(language) return 0
def pull_repository(self, language, branch=None): if not language.github_repo: log.warning( "Skipping repository clone because github_repo is empty") return output_dir = language.generated_lang_dir secret_repo_url = False if self.args.get("git_via_https"): checkout_url = "" if self.args.get("git_via_https_oauth_token"): checkout_url = "{}:x-oauth-basic@".format( self.args.get("git_via_https_oauth_token")) elif self.args.get("git_via_https_installation_access_token"): checkout_url = "x-access-token:{}@".format( self.args.get("git_via_https_installation_access_token")) if checkout_url: secret_repo_url = True repo = REPO_HTTPS_URL.format(checkout_url, language.github_org, language.github_repo) else: repo = REPO_SSH_URL.format(language.github_org, language.github_repo) try: log_repo = ("{}/{}".format(language.github_org, language.github_repo) if secret_repo_url else repo) log.info("Pulling repository %s", log_repo) run_command( [ "git", "clone", "--depth=2", { "item": repo, "secret": secret_repo_url }, output_dir, ], sensitive_output=secret_repo_url, ) except subprocess.CalledProcessError as e: log.error( "Error cloning repo {0} into {1}. Make sure {1} is empty first" .format(log_repo, output_dir)) raise e if branch is not None: try: run_command(["git", "fetch", "origin", branch], cwd=output_dir) run_command(["git", "branch", branch, "FETCH_HEAD"], cwd=output_dir) run_command(["git", "checkout", branch], cwd=output_dir) except subprocess.CalledProcessError: # if the branch doesn't exist, we stay in the default one branch = None if branch is not None and self.args.get("is_ancestor"): try: run_command( [ "git", "merge-base", "--is-ancestor", self.args.get("is_ancestor"), branch, ], cwd=output_dir, ) except subprocess.CalledProcessError: log.warning( f"{self.args.get('is_ancestor')} is not ancestor of branch {branch}, attempting to update branch" ) try: self.setup_git_config(cwd=output_dir) run_command( [ "git", "merge", "--no-ff", "--allow-unrelated-histories", self.args.get("is_ancestor"), ], cwd=output_dir, ) except subprocess.CalledProcessError: log.error( f"Could not merge {self.args.get('is_ancestor')} to {branch} to keep it up-to-date" ) raise
def templates_for_language_spec_version(self, lc, spec_version): # TODO: select directory specified by "templates_dir" in "templates.source" # *before* applying patches templates_cfg = lc.templates_config_for(spec_version) if templates_cfg: log.info( "Obtaining upstream templates for %s/%s ...", lc.language, spec_version ) else: log.info( "No templates configured for %s/%s, skipping", lc.language, spec_version ) return 0 from_container = not templates_cfg.source.system source_type = templates_cfg.source.type with tempfile.TemporaryDirectory() as td: patch_in = copy_from = td image = lc.container_opts_for(spec_version).image if isinstance(templates_cfg.source, OpenapiJarTemplatesConfig): jar_path = templates_cfg.source.jar_path if from_container: log.info("Extracting openapi-generator jar from image %s", image) new_jar_path = os.path.join(td, "openapi-generator.jar") with self.create_container(lc, spec_version) as container: run_command( [ "docker", "cp", "{}:{}".format(container, jar_path), new_jar_path, ] ) jar_path = new_jar_path run_command(["unzip", "-q", jar_path, "-d", td]) elif isinstance(templates_cfg.source, DirectoryTemplatesConfig): lang_dir = os.path.join( templates_cfg.source.directory_path, templates_cfg.source.templates_dir, ) output_dir = os.path.join( td, templates_cfg.source.templates_dir, ) if from_container: log.info("Extracting templates directory from image %s", image) with self.create_container(lc, spec_version) as container: run_command( [ "docker", "cp", "{}:{}".format(container, lang_dir), output_dir, ] ) else: if not os.path.exists(lang_dir): log.error( "Directory %s doesn't contain '%s' subdirectory with templates", templates_cfg.source.directory_path, templates_cfg.source.templates_dir, ) return 1 shutil.copytree( lang_dir, os.path.join(td, templates_cfg.source.templates_dir), ) elif isinstance(templates_cfg.source, OpenapiGitTemplatesConfig): if from_container: log.error( "Templates with source 'openapi-git' must be used with '%s: true'", COMMAND_SYSTEM_KEY, ) patch_in = copy_from = os.path.join( td, "modules", "openapi-generator", "src", "main", "resources" ) run_command(["git", "clone", OPENAPI_GENERATOR_GIT, td]) run_command( [ "git", "-C", td, "checkout", templates_cfg.source.git_committish, ] ) else: log.error("Unknown templates source type {}".format(source_type)) return 1 patches = templates_cfg.patches if patches: log.info("Applying patches to upstream templates ...") for p in patches: try: run_command( [ "patch", "--fuzz", "0", "--no-backup-if-mismatch", "-p1", "-i", os.path.abspath(p), "-d", patch_in, ] ) except subprocess.CalledProcessError: # at this point, the stdout/stderr of the process have been printed by # `run_command`, so the user should have sufficient info to about what went wrong log.error( "Failed to apply patch %s, exiting as templates can't be processed", p, ) return 1 # copy the processed templates from the temporary dir to templates dir outdir = os.path.join(SPEC_REPO_TEMPLATES_DIR, lc.language, spec_version) if os.path.exists(outdir): shutil.rmtree(outdir) shutil.copytree( os.path.join(copy_from, templates_cfg.source.templates_dir), outdir, ) return 0
def create_container(self, lc, spec_version): image = lc.container_opts_for(spec_version).image cn = "apigentools-created-container-{}".format(time.time()) run_command(["docker", "create", "--name", cn, image]) yield cn run_command(["docker", "rm", cn])
def run_config_command( self, command, what_command, cwd=".", chevron_vars=None, additional_functions=None, env_override=None, docker_run_options=None, ): log.info("Running command '%s'", command.description) env_override = env_override or {} if chevron_vars is None: chevron_vars = {} chevron_vars["cwd"] = cwd to_run = [] for part in self._render_command_args(command.commandline, chevron_vars): if isinstance(part, FunctionArgument): allowed_functions = {"glob": glob.glob, "glob_re": glob_re} allowed_functions.update(additional_functions or {}) function_name = part.function function = allowed_functions.get(function_name) if function: with change_cwd(cwd): result = function(*part.args, **part.kwargs) # NOTE: we may need to improve this logic if/when we add more functions result = self._render_command_args(result, chevron_vars) if isinstance(result, list): to_run.extend(result) else: to_run.append(result) else: raise ValueError( "Unknow function '{f}' in command '{d}' for '{l}'". format(f=function_name, d=command.description, l=what_command)) else: to_run.append(str(part)) additional_env = command.container_opts.environment additional_env.update(env_override) is_system = command.container_opts.system run_command_args = {} if is_system: run_command_args.update({ "additional_env": additional_env, "cwd": cwd }) else: image = command.container_opts.image if isinstance(image, ContainerImageBuild): image_name = "apigentools-test-{}".format( what_command.replace("/", "-")) dockerfile = self._render_command_args(image.dockerfile, chevron_vars) context = self._render_command_args(image.context, chevron_vars) with change_cwd(cwd): run_command([ "docker", "build", context, "-t", image_name, "-f", dockerfile ]) image = image_name # dockerize workdir = os.path.join( "/tmp/spec-repo", cwd, self._render_command_args( command.container_opts.workdir, chevron_vars, ), ) dockerized = [ "docker", "run", "--rm", "-v", "{}:{}".format(os.getcwd(), "/tmp/spec-repo"), "--workdir", workdir, ] if to_run: dockerized.extend(["--entrypoint", to_run[0]]) for k, v in additional_env.items(): dockerized.extend(["-e", "{}={}".format(k, v)]) if docker_run_options: dockerized.extend(docker_run_options) dockerized.extend([image] + to_run[1:]) to_run = dockerized run_command(to_run, **run_command_args)
def run(self): cmd_result = 0 log.info("Initializing a new project directory") is_repo = not self.args.no_git_repo os.makedirs(self.args.projectdir, exist_ok=True) with change_cwd(self.args.projectdir): dirs = { "config_dir": constants.DEFAULT_CONFIG_DIR, "downstream_templates_dir": constants.DEFAULT_DOWNSTREAM_TEMPLATES_DIR, "languages_config_dir": os.path.join(constants.DEFAULT_CONFIG_DIR, constants.DEFAULT_LANGUAGES_CONFIG_DIR), "generated_dir": constants.DEFAULT_GENERATED_CODE_DIR, "spec_dir": constants.DEFAULT_SPEC_DIR, "spec_v1_dir": os.path.join(constants.DEFAULT_SPEC_DIR, "v1"), "template_patches_dir": constants.DEFAULT_TEMPLATE_PATCHES_DIR, "templates_dir": constants.DEFAULT_TEMPLATES_DIR, } for _, v in dirs.items(): os.makedirs(v, exist_ok=True) config_file = os.path.join(dirs["config_dir"], constants.DEFAULT_CONFIG_FILE) if not os.path.exists(config_file): with open(config_file, "w") as f: json.dump( { "codegen_exec": "openapi-generator", "languages": {}, "server_base_urls": { "v1": "https://api.myserver.com/v1", }, "spec_sections": { "v1": [], }, "spec_versions": ["v1"], }, f, indent=4, ) v1_header = os.path.join(dirs["spec_v1_dir"], constants.HEADER_FILE_NAME) v1_shared = os.path.join(dirs["spec_v1_dir"], constants.SHARED_SECTION_NAME + ".yaml") if not os.path.exists(v1_header): with open(v1_header, "w") as f: yaml.dump( { "info": { "contact": {}, "description": "Collection of all public API endpoints.", "title": "My API endpoints", "version": "1.0" }, "openapi": "3.0.0", }, f) if not os.path.exists(v1_shared): with open(v1_shared, "w") as f: yaml.dump( { "components": { "schemas": {}, "parameters": {}, "securitySchemes": {}, "requestBodies": {}, "responses": {}, "headers": {}, "examples": {}, "links": {}, "callbacks": {}, }, "security": [], "tags": [], }, f) if is_repo: log.info( "Creating a git repo in the new spec project directory") run_command(["git", "init"], log_level=logging.DEBUG) for d in [dirs["generated_dir"], dirs["templates_dir"]]: with open(os.path.join(d, ".gitkeep"), "w"): pass if not os.path.exists(".gitignore"): with open(".gitignore", "w") as f: f.write("!generated\n" "generated/*\n" "!generated/.gitkeep\n" "spec/*/full_spec.yaml\n" "!templates\n" "templates/*\n" "!templates/.gitkeep") return cmd_result
def run(self): created_branches = {} cmd_result = 0 languages = self.args.languages or self.config.languages commit_msg = "Regenerate client from commit {} of spec repo".format( get_current_commit(self.args.spec_repo_dir)) commit_msg = self.args.push_commit_msg or commit_msg for lang_name, lang_config in self.config.language_configs.items(): # Skip any languages not specified by the user if lang_name not in languages: continue log.info("Running push for language {}".format(lang_name)) gen_dir = self.get_generated_lang_dir(lang_name) # Assumes all generated changes are in the gen_dir directory # This is done by default in the `generate` command. with change_cwd(gen_dir): repo = "{}/{}".format(lang_config.github_org, lang_config.github_repo) branch_name = self.get_push_branch(lang_name) try: if self.args.skip_if_no_changes and self.git_status_empty( ): log.info( "Only .apigentools file changed for language {}, skipping" .format(lang_name)) continue # Update git config for this repository to use the provided author's email/name # If not specified, use the setup from the system/global if self.args.git_email: run_command([ 'git', 'config', 'user.email', self.args.git_email ], dry_run=self.args.dry_run) if self.args.git_name: run_command( ['git', 'config', 'user.name', self.args.git_name], dry_run=self.args.dry_run) run_command(['git', 'checkout', '-b', branch_name], dry_run=self.args.dry_run) run_command(['git', 'add', '-A'], dry_run=self.args.dry_run) run_command(['git', 'commit', '-a', '-m', commit_msg], dry_run=self.args.dry_run) run_command(['git', 'push', 'origin', 'HEAD'], dry_run=self.args.dry_run) created_branches[repo] = branch_name except subprocess.CalledProcessError as e: log.error("Error running git commands: {}".format(e)) cmd_result += 1 continue log.info('Apigentools created the following branches:') log.info('\n'.join('{} : {}'.format(key, value) for key, value in created_branches.items())) return cmd_result