def build_docker_image( name: str, version: str, build_args: str = "", exit_on_error: bool = False ) -> subprocess.CompletedProcess: """Build a docker image from a Dockerfile in the working directory. Args: name (str): Name of the docker image. version (str): Version to use as tag. build_args (str, optional): Add additional build arguments for docker build. exit_on_error (bool, optional): If `True`, exit process as soon as an error occurs. Returns: subprocess.CompletedProcess: Returns the CompletedProcess object of the """ versioned_image = name + ":" + version latest_image = name + ":latest" completed_process = build_utils.run( "docker build -t " + versioned_image + " -t " + latest_image + " " + build_args + " ./", exit_on_error=exit_on_error, ) # TODO tag prefixed image names if completed_process.returncode > 0: build_utils.log(f"Failed to build Docker image {name}:{version}") return completed_process
def publish_pypi_distribution( pypi_token: str, pypi_user: str = "__token__", pypi_repository: Optional[str] = None, exit_on_error: bool = True, ) -> None: """Publish distribution to pypi. Args: pypi_token (str): Token of PyPi repository. pypi_user (str, optional): User of PyPi repository. Defaults to "__token__". pypi_repository (Optional[str], optional): PyPi repository. If `None` provided, use the production instance. exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. """ if not pypi_token: build_utils.log( "PyPI token is required for release (--pypi-token=<TOKEN>)") if exit_on_error: build_utils.exit_process(1) return pypi_repository_args = "" if pypi_repository: pypi_repository_args = f'--repository-url "{pypi_repository}"' # Check twine command build_utils.command_exists("twine", exit_on_error=exit_on_error) # Publish on pypi build_utils.run( f'twine upload --non-interactive -u "{pypi_user}" -p "{pypi_token}" {pypi_repository_args} dist/*', exit_on_error=exit_on_error, )
def main(args: dict) -> None: # set current path as working dir os.chdir(HERE) version = args.get(build_utils.FLAG_VERSION) docker_image_prefix = DOCKER_IMAGE_PREFIX if args.get(build_docker.FLAG_DOCKER_IMAGE_PREFIX): docker_image_prefix = args.get( build_docker.FLAG_DOCKER_IMAGE_PREFIX) # type: ignore if args.get(build_utils.FLAG_MAKE): build_docker.build_docker_image(COMPONENT_NAME, version, exit_on_error=True) if args.get(build_utils.FLAG_CHECK): build_docker.lint_dockerfile(exit_on_error=False) exit_on_error = False if build_utils.FLAG_FORCE else True completed_process = build_docker.check_image( image=build_docker.get_image_name(name=COMPONENT_NAME, tag=version), exit_on_error=exit_on_error, ) if completed_process and completed_process.returncode != 0: build_utils.log( f"The security check failed, but is ignored because {build_utils.FLAG_FORCE} flag is set." ) if args.get(build_utils.FLAG_RELEASE): build_docker.release_docker_image(COMPONENT_NAME, version, docker_image_prefix, exit_on_error=True)
def update_version(module_path: str, version: str, exit_on_error: bool = True) -> None: """Update version in specified module. Args: module_path (str): Python module with a `__version__` attribute. version (str): New version number to write into `__version__` attribute. exit_on_error (bool, optional): If `True`, exit process as soon as error occures. Defaults to True. """ if not version: build_utils.log("Cannot update version, no version provided") if exit_on_error: build_utils.exit_process(1) return if not os.path.exists(module_path): build_utils.log("Couldn't find file: " + module_path) if exit_on_error: build_utils.exit_process(1) return with open(module_path, "r+") as f: data = f.read() f.seek(0) f.write( re.sub(r"__version__ = \".+\"", f'__version__ = "{version}"', data)) f.truncate()
def generate_and_copy_js_client() -> bool: temp_dir = "./temp" pathlib.Path(temp_dir).mkdir(exist_ok=True) swagger_codegen_cli = f"{temp_dir}/swagger-codegen-cli.jar" is_successful = check_and_download_swagger_cli(swagger_codegen_cli) if not is_successful: return False swagger_path = "./backend/lab-service/src/main/resources/swagger/swagger.json" output_path = f"{temp_dir}/client" build_utils.run( f"java -jar {swagger_codegen_cli} generate -i {swagger_path} -l javascript -o {output_path} --additional-properties useES6=true" ) # shutil.move(f"{output_path}/src/", "./webapp/src/services/mllab-client") try: for file in pathlib.Path(f"{output_path}/src/").iterdir(): file_name = str(file.parts[-1]) new_file_name = file_name if file_name == "index.js": new_file_name = "lab-api.js" target_file_path = f"./webapp/src/services/client/{new_file_name}" # Delete existing client files to be replaced with the new ones if pathlib.Path(target_file_path).is_file(): pathlib.Path(target_file_path).unlink() elif pathlib.Path(target_file_path).is_dir(): shutil.rmtree(target_file_path) shutil.move(str(file), target_file_path) except FileNotFoundError as e: build_utils.log(str(e)) return False return True
def build_docker_image( name: str, version: str, build_args: str = "", docker_image_prefix: str = "", dockerfile: Optional[str] = None, additional_build_args: str = "", exit_on_error: bool = True, ) -> subprocess.CompletedProcess: """Build a docker image from a Dockerfile in the working directory. Args: name (str): Name of the docker image. version (str): Version to use as tag. build_args (str, optional): Add additional build arguments for docker build. docker_image_prefix (str, optional): The prefix added to the name to indicate an organization on DockerHub or a completely different repository. dockerfile (str, optional): Specify a specific Dockerfile. If not specified, the default `Dockerfile` wil be used. exit_on_error (bool, optional): If `True`, exit process as soon as an error occurs. Returns: subprocess.CompletedProcess: Returns the CompletedProcess object of the """ # Check if docker exists on the system build_utils.command_exists("docker", exit_on_error=exit_on_error) versioned_tag = get_image_name(name=name, tag=version) latest_tag = get_image_name(name=name, tag="latest") dockerfile_command = "" if dockerfile: dockerfile_command = " -f " + dockerfile completed_process = build_utils.run( "docker build " + dockerfile_command + "-t " + versioned_tag + " -t " + latest_tag + " " + build_args + " ./", exit_on_error=exit_on_error, ) if completed_process.returncode > 0: build_utils.log(f"Failed to build Docker image {versioned_tag}") return completed_process if docker_image_prefix: remote_versioned_tag = get_image_name( name=name, tag=version, image_prefix=docker_image_prefix ) build_utils.run( "docker tag " + versioned_tag + " " + remote_versioned_tag, exit_on_error=exit_on_error, ) return completed_process
def lint_dockerfile() -> None: """Run hadolint on the Dockerfile.""" build_utils.log("Run linters and style checks:") config_file_arg = "" if os.path.exists(".hadolint.yml"): config_file_arg = "--config=.hadolint.yml" build_utils.run(f"hadolint {config_file_arg} Dockerfile", exit_on_error=True)
def run_dev_mode(port: int = 8001, command_prefix: str = PIPENV_RUN) -> None: """Run mkdocs development server. Args: port (int, optional): Port to use for mkdocs development server. Defaults to 8001. command_prefix (str, optional): Prefix to use for all commands. Defaults to `pipenv run`. """ build_utils.log(f"Run docs in development mode (http://localhost:{port}):") build_utils.run(f"{command_prefix} mkdocs serve --dev-addr 0.0.0.0:{port}", exit_on_error=True)
def deploy_gh_pages(command_prefix: str = PIPENV_RUN) -> None: """Deploy mkdocs documentation to Github pages. Args: command_prefix (str, optional): Prefix to use for all commands. Defaults to `pipenv run`. """ build_utils.log("Deploy documentation to Github pages:") build_utils.run(f"{command_prefix} mkdocs gh-deploy --clean", exit_on_error=True, timeout=120)
def lint_markdown() -> None: """Run markdownlint on markdown documentation.""" build_utils.log("Run linters and style checks:") config_file_arg = "" if os.path.exists(".markdown-lint.yml"): config_file_arg = "--config='.markdown-lint.yml'" build_utils.run(f"markdownlint {config_file_arg} ./docs", exit_on_error=True)
def update_version(module_path: str, version: str) -> None: """Update version in specified module. Args: module_path (str): Python module with a `__version__` attribute. version (str): New version number to write into `__version__` attribute. """ if not version: build_utils.log("Cannot update version, no version provided") return with open(module_path, "r+") as f: data = f.read() f.seek(0) f.write(re.sub(r"__version__ = \".+\"", f'__version__ = "{version}"', data)) f.truncate()
def generate_openapi_client( openapi_spec_file: str, target_language: str, work_dir: str = DEFAULT_TEMP_DIR, client_generator: OpenApiGenerator = OpenApiGenerator.OPENAPI_CODEGEN, additional_properties: str = "", additional_flags: str = "", ) -> Union[str, None]: """Generate an open api client. The passed OpenAPI specification file will be taken to generate a client using the passed openapi-generator for the given programming language and optional additional properties (see the respective openapi cli for more information). The client will be generated at the passed `work_dir` directory. Args: openapi_spec_file (str): The OpenAPI specification for which the client will be generated. target_language (str): The client's programming language (e.g. `"javascript"`). work_dir (str, optional): The directory in which the generator cli will be looked for and also the generated client will be placed. If it does not exist, it will be created. client_generator (OpenApiGenerator, optional): The OpenApiGenerator which will be used to generate the client. It will check whether the cli can be found within the `work_dir` directory and if not it will try to download it according to the `_check_and_download_generator_cli` function. additional_properties (str, optional): Additional properties passed to the OpenAPI generator client client (e.g. `"useES6=true"`) Returns: Union[str, None]: Returns the output path if the client generation was successful and None otherwise. """ pathlib.Path(work_dir).mkdir(exist_ok=True) codegen_cli_path = f"{work_dir}/{client_generator.cli_name}" is_successful = _check_and_download_generator_cli( codegen_cli_path, client_generator=client_generator) if not is_successful: return None if not pathlib.Path(openapi_spec_file).is_file(): build_utils.log( f"The OpenAPI spec file {openapi_spec_file} does not exist") return None build_utils.run( client_generator.get_generate_command( openapi_spec_file=openapi_spec_file, target_language=target_language, work_dir=work_dir, additional_properties=additional_properties, additional_flags=additional_flags, )) return client_generator.get_output_path(work_dir=work_dir)
def lint_markdown(markdownlint: bool = True, exit_on_error: bool = True) -> None: """Run markdownlint on markdown documentation. Args: markdownlint (bool, optional): Activate markdown linting via `markdownlint`. Defaults to `True`. exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. """ build_utils.log("Run linters and style checks:") if markdownlint and build_utils.command_exists( "markdownlint", exit_on_error=exit_on_error): config_file_arg = "" if os.path.exists(".markdown-lint.yml"): config_file_arg = "--config='.markdown-lint.yml'" build_utils.run(f"markdownlint {config_file_arg} ./docs", exit_on_error=exit_on_error)
def check_image( image: str, trivy: bool = True, exit_on_error: bool = True ) -> subprocess.CompletedProcess: """Run vulnerability checks on Dockerimage. Args: image (str): The name of the docker image to check. trivy (bool, optional): Activate trivy vulnerability check. Defaults to `True`. exit_on_error (bool, optional): If `True`, exit process as soon as an error occurs. """ build_utils.log("Run vulnerability checks on docker image:") if trivy and build_utils.command_exists("trivy", exit_on_error=exit_on_error): return build_utils.run( f"trivy image --timeout=20m0s --exit-code 1 --severity HIGH,CRITICAL {image}", exit_on_error=exit_on_error, ) return subprocess.CompletedProcess(args="", returncode=-1, stdout="", stderr="")
def run_dev_mode(port: int = 8001, exit_on_error: bool = True) -> None: """Run mkdocs development server. Args: port (int, optional): Port to use for mkdocs development server. Defaults to 8001. exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. """ build_utils.log(f"Run docs in development mode (http://localhost:{port}):") command_prefix = "" if is_pipenv_environment(): command_prefix = _PIPENV_RUN else: # Check mkdocs command build_utils.command_exists("mkdocs", exit_on_error=exit_on_error) build_utils.run( f"{command_prefix} mkdocs serve --dev-addr 0.0.0.0:{port}", exit_on_error=exit_on_error, )
def deploy_gh_pages(exit_on_error: bool = True) -> None: """Deploy mkdocs documentation to Github pages. Args: exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. """ build_utils.log("Deploy documentation to Github pages:") command_prefix = "" if is_pipenv_environment(): command_prefix = _PIPENV_RUN else: # Check mkdocs command build_utils.command_exists("mkdocs", exit_on_error=exit_on_error) build_utils.run( f"{command_prefix} mkdocs gh-deploy --clean", exit_on_error=exit_on_error, timeout=120, )
def lint_dockerfile( hadolint: bool = True, dockerfile: str = "Dockerfile", exit_on_error: bool = True ) -> None: """Run hadolint on the Dockerfile. Args: hadolint (bool, optional): Activate hadolint dockerfile linter. Defaults to `True`. dockerfile (str, optional): Specify a specific Dockerfile. If not specified, the default `Dockerfile` wil be used. exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. """ build_utils.log("Run linters and style checks:") if hadolint and build_utils.command_exists("hadolint", exit_on_error=exit_on_error): config_file_arg = "" if os.path.exists(".hadolint.yml"): config_file_arg = "--config=.hadolint.yml" build_utils.run( f"hadolint {config_file_arg} {dockerfile}", exit_on_error=exit_on_error )
def install_build_env(exit_on_error: bool = True) -> None: """Installs a new virtual environment via pipenv. Args: exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. """ # Check if pipenv exists build_utils.command_exists("pipenv", exit_on_error=exit_on_error) if not os.path.exists("Pipfile"): build_utils.log( "No Pipfile discovered, cannot install pipenv environemnt") if exit_on_error: build_utils.exit_process(1) return build_utils.run("pipenv --rm", exit_on_error=False) build_utils.run( f"pipenv install --dev --python={sys.executable} --skip-lock --site-packages", exit_on_error=exit_on_error, )
def test_with_py_version(python_version: str, exit_on_error: bool = True) -> None: """Run pytest in a environment wiht the specified python version. Args: python_version (str): Python version to use inside the virutal environment. exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. """ if not os.path.exists("Pipfile"): build_utils.log( "No Pipfile discovered. Testing with specific python version only works with pipenv." ) return # Check if pyenv command exists build_utils.command_exists("pyenv", exit_on_error=exit_on_error) # Check if pipenv command exists build_utils.command_exists("pipenv", exit_on_error=exit_on_error) # Install pipenv environment with specific versio build_utils.run( f"pyenv install --skip-existing {python_version} && pyenv local {python_version}", exit_on_error=exit_on_error, ) # Install pipenv environment with specific version build_utils.run( f"pipenv install --dev --python={python_version} --skip-lock", exit_on_error=exit_on_error, ) # Run pytest in pipenv environment build_utils.run("pipenv run pytest", exit_on_error=exit_on_error) # Remove enviornment build_utils.run("pipenv --rm", exit_on_error=False) # Uninstall pyenv version build_utils.run( f"pyenv local --unset && pyenv uninstall -f {python_version}", exit_on_error=False, )
def main(args: dict) -> None: """Execute all component builds.""" # set script path as working dir os.chdir(HERE) # Build react webapp build_utils.build(REACT_WEBAPP_COMPONENT, args) # Build python lib build_utils.build(PYTHON_LIB_COMPONENT, args) if args.get(build_utils.FLAG_MAKE): # Duplicate api docs into the mkdocs documentation build_utils.duplicate_folder(f"./{PYTHON_LIB_COMPONENT}/docs/", f"./{DOCS_COMPONENT}/docs/api-docs/") # Copy python lib distribution to docker container try: dest_path = os.path.join("./", DOCKER_COMPONENT, "resources", PYTHON_LIB_COMPONENT + ".tar.gz") os.makedirs(os.path.dirname(dest_path), exist_ok=True) shutil.copy( glob.glob( f"./{PYTHON_LIB_COMPONENT}/dist/{PYTHON_LIB_COMPONENT}-*.tar.gz" )[0], os.path.join(dest_path), ) except Exception as ex: build_utils.log( f"Failed to copy {PYTHON_LIB_COMPONENT} distribution to {DOCKER_COMPONENT} component: " + str(ex)) build_utils.exit_process(1) # Build docker container build_utils.build(DOCKER_COMPONENT, args) # Build mkdocs documentation build_utils.build(DOCS_COMPONENT, args)
def publish_pypi_distribution( pypi_token: str, pypi_user: str = "__token__", pypi_repository: Optional[str] = None ) -> None: """Publish distribution to pypi. Args: pypi_token (str): Token of PyPi repository. pypi_user (str, optional): User of PyPi repository. Defaults to "__token__". pypi_repository (Optional[str], optional): PyPi repository. If `None` provided, use the production instance. """ if not pypi_token: build_utils.log("PyPI token is required for release (--pypi-token=<TOKEN>)") build_utils.exit_process(1) pypi_repository_args = "" if pypi_repository: pypi_repository_args = f'--repository-url "{pypi_repository}"' # Publish on pypi build_utils.run( f'twine upload --non-interactive -u "{pypi_user}" -p "{pypi_token}" {pypi_repository_args} dist/*', exit_on_error=True, )
def release_docker_image( name: str, version: str, docker_image_prefix: str, exit_on_error: bool = True ) -> subprocess.CompletedProcess: """Push a Docker image to a repository. Args: name (str): The name of the image. Must not be prefixed! version (str): The tag used for the image. docker_image_prefix (str): The prefix added to the name to indicate an organization on DockerHub or a completely different repository. exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. Returns: subprocess.CompletedProcess: Returns the CompletedProcess object of the `docker push ...` command. """ # Check if docker exists on the system build_utils.command_exists("docker", exit_on_error=exit_on_error) if not docker_image_prefix: build_utils.log( "The flag --docker-image-prefix cannot be blank when pushing a Docker image." ) build_utils.exit_process(build_utils.EXIT_CODE_GENERAL) versioned_tag = get_image_name(name=name, tag=version) remote_versioned_tag = get_image_name( name=name, tag=version, image_prefix=docker_image_prefix ) build_utils.run( "docker tag " + versioned_tag + " " + remote_versioned_tag, exit_on_error=exit_on_error, ) completed_process = build_utils.run( "docker push " + remote_versioned_tag, exit_on_error=exit_on_error ) if completed_process.returncode > 0: build_utils.log(f"Failed to release Docker image {name}:{version}") # Only push version with latest tag if no suffix is added (pre-release) if "-" not in version: remote_latest_tag = get_image_name( name=name, tag="latest", image_prefix=docker_image_prefix ) build_utils.log( "Release Docker image with latest tag as well: " + remote_latest_tag ) build_utils.run( "docker tag " + versioned_tag + " " + remote_latest_tag, exit_on_error=exit_on_error, ) build_utils.run("docker push " + remote_latest_tag, exit_on_error=exit_on_error) return completed_process
def release_docker_image( name: str, version: str, docker_image_prefix: str = "", exit_on_error: bool = False ) -> subprocess.CompletedProcess: """Push a Docker image to a repository. Args: name (str): The name of the image. Must not be prefixed! version (str): The tag used for the image. docker_image_prefix (str, optional): The prefix added to the name to indicate an organization on DockerHub or a completely different repository. Defaults to "". exit_on_error (bool, optional): Exit process if an error occurs. Defaults to `True`. Returns: subprocess.CompletedProcess: Returns the CompletedProcess object of the `docker push ...` command. """ if not docker_image_prefix: build_utils.log( "The flag --docker-image-prefix cannot be blank when pushing a Docker image." ) build_utils.exit_process(build_utils.EXIT_CODE_GENERAL) docker_image_prefix = docker_image_prefix.rstrip("/") + "/" versioned_image = name + ":" + version remote_versioned_image = docker_image_prefix + versioned_image build_utils.run( "docker tag " + versioned_image + " " + remote_versioned_image, exit_on_error=exit_on_error, ) completed_process = build_utils.run( "docker push " + remote_versioned_image, exit_on_error=exit_on_error ) if completed_process.returncode > 0: build_utils.log(f"Failed to release Docker image {name}:{version}") if "-dev" not in version: build_utils.log("Release Docker image with latest tag as well.") latest_image = name + ":latest" remote_latest_image = docker_image_prefix + latest_image build_utils.run( "docker tag " + latest_image + " " + remote_latest_image, exit_on_error=exit_on_error, ) build_utils.run( "docker push " + remote_latest_image, exit_on_error=exit_on_error ) return completed_process
def main(args: Dict[str, Union[bool, str]]): # Move libraries to build_utils.run( "rm -r -f services/lab-workspace/docker-res/duplicated-resources/") build_utils.run( "mkdir services/lab-workspace/docker-res/duplicated-resources/") build_utils.run( "cp -R libraries/* services/lab-workspace/docker-res/duplicated-resources/" ) # build base images # For just testing, the lab-workspace does not have to be built as it is not covered by tests yet # TODO: in GitHub actions add workspace to --skip-path to ignore it build_utils.build("services/lab-workspace", args) build_utils.build("services/simple-workspace-service", args) build_utils.build("services/lab-model-service", args) # build demo services/jobs build_utils.build("services/simple-demo-job", args) build_utils.build("services/simple-demo-service", args) build_utils.build("services/simple-fastapi-service", args) # build webapp and move build into backend service # TODO: MOVE SWAGGER API TO WEB APP # build main application first time to generate swagger config backend_args = {**args} backend_args[build_utils.FLAG_TEST] = False build_utils.build("backend", backend_args) if args[build_utils.FLAG_MAKE]: is_successful = generate_and_copy_js_client() if not is_successful: build_utils.log( "Error in generating the JavaScript client library") build_utils.exit_process(1) # format the just generated JavaScript client to make it conform with the project and prevent showing format-related changes in Git build_utils.run( "cd webapp; npm run prettier ./src/services/client/; cd ..") build_utils.build("webapp", args) if args[build_utils.FLAG_MAKE]: # Move webapp build into resources build_utils.run("rm -r -f backend/lab-service/src/main/resources/app/") build_utils.run("mkdir backend/lab-service/src/main/resources/app/") build_utils.run( "cp -R webapp/build/* backend/lab-service/src/main/resources/app/", exit_on_error=True, ) # build documentation build_utils.build("docs", args) if args[build_utils.FLAG_MAKE]: # Move documentation build into resources build_utils.run( "rm -r -f backend/lab-service/src/main/resources/docs/") build_utils.run("mkdir backend/lab-service/src/main/resources/docs/") build_utils.run( "cp -R docs/site/* backend/lab-service/src/main/resources/docs/") # build main application second time to bundle webapp build_utils.build("backend", args)
from universal_build import build_utils COMPONENT_NAME = "unified-model-lib" args = build_utils.parse_arguments() if args[build_utils.FLAG_MAKE]: completed_process = build_utils.run("python setup.py develop") if completed_process.returncode > 0: build_utils.log(f"Error in building component {COMPONENT_NAME}") completed_process = build_utils.run("python generate_docs.py") if completed_process.returncode > 0: build_utils.log( f"Error in generating docs for component {COMPONENT_NAME}") build_utils.build("docker", args)
""" Build ML Lab React Webapp """ from universal_build import build_utils COMPONENT_NAME = "ml-lab-webapp" args = build_utils.parse_arguments() build_utils.log("Install essentials") build_utils.run("npm install") if args[build_utils.FLAG_CHECK]: build_utils.log("Run linters:") build_utils.run("npm run lint:js", exit_on_error=False) build_utils.run("npm run lint:css", exit_on_error=False) build_utils.log("No linter problems") if args[build_utils.FLAG_MAKE]: completed_process = build_utils.run( "npm --allow-same-version --no-git-tag-version version " + args[build_utils.FLAG_VERSION], exit_on_error=True, ) completed_process = build_utils.run("npm run setup", exit_on_error=True) if args[build_utils.FLAG_TEST]: build_utils.log("Test the webapp:") build_utils.run("npm run test", exit_on_error=True)
from universal_build import build_utils args = build_utils.parse_arguments() if args[build_utils.FLAG_VERSION]: # The version is also needed when just tests are executed, so set it independent of the passed flags completed_process = build_utils.run("mvn versions:set -DnewVersion=" + args[build_utils.FLAG_VERSION]) if completed_process.returncode > 0: build_utils.log("Failed to apply version " + args[build_utils.FLAG_VERSION]) build_utils.run("mvn versions:revert") build_utils.exit_process(1) build_utils.run("mvn versions:commit") if args[build_utils.FLAG_MAKE]: # Check if all project can be build, otherwise exit build script completed_process = build_utils.run("mvn clean package") if completed_process.returncode > 0: build_utils.log("Failed to build project") build_utils.exit_process(1) build_utils.run("mvn -N clean install") # libraries build_utils.build("environment-lib", args) build_utils.build("service-lib", args) # services build_utils.build("lab-service", args)
from universal_build import build_utils args = build_utils.parse_arguments() build_utils.log("Install essentials") build_utils.run("yarn install", exit_on_error=False) if args.get(build_utils.FLAG_CHECK): build_utils.log("Run prettier:") build_utils.run("yarn run prettier src/", exit_on_error=True) build_utils.log("Run linters:") build_utils.run("yarn run lint:js", exit_on_error=True) build_utils.run("yarn run lint:css", exit_on_error=True) if args.get(build_utils.FLAG_MAKE): build_utils.log("Build the webapp:") build_utils.run("yarn build", exit_on_error=True) if args.get(build_utils.FLAG_TEST): build_utils.log("Test the webapp:") build_utils.run("yarn test", exit_on_error=True)
def main(args: dict) -> None: # set current path as working dir os.chdir(HERE) version = args.get(build_utils.FLAG_VERSION) if version: # Update version in _about.py build_python.update_version( os.path.join(HERE, f"src/{MAIN_PACKAGE}/_about.py"), build_utils._Version.get_pip_compatible_string(str(version)), ) if args.get(build_utils.FLAG_MAKE): # Install pipenv dev requirements build_python.install_build_env() # Create API documentation via lazydocs build_python.generate_api_docs(github_url=GITHUB_URL, main_package=MAIN_PACKAGE) # Build distribution via setuptools build_python.build_distribution() try: dist_name = MAIN_PACKAGE.replace("_", "-") dist_file = glob.glob(f"./dist/{dist_name}-*.tar.gz")[0] shutil.copy( dist_file, os.path.join(HERE, "build-environment", "resources", dist_name + ".tar.gz"), ) except Exception: build_utils.log("Failed to copy distribution to build container.") build_utils.exit_process(1) if args.get(build_utils.FLAG_CHECK): build_python.code_checks(exit_on_error=True, safety=False) if args.get(build_utils.FLAG_TEST): # Remove coverage files build_utils.run("pipenv run coverage erase", exit_on_error=False) test_markers = args.get(build_utils.FLAG_TEST_MARKER) if build_utils.TEST_MARKER_SLOW in test_markers: # type: ignore # Run if slow test marker is set: test in multiple environments # Python 3.6 build_python.test_with_py_version(python_version="3.6.12") # Python 3.7 build_python.test_with_py_version(python_version="3.7.9") # Activated Python Environment (3.8) build_python.install_build_env() # Run pytest in pipenv environment build_utils.run("pipenv run pytest", exit_on_error=True) # Update pipfile.lock when all tests are successfull (lock environment) build_utils.run("pipenv lock", exit_on_error=True) else: # Run fast tests build_utils.run('pipenv run pytest -m "not slow"', exit_on_error=True) if args.get(build_utils.FLAG_RELEASE): # Bump all versions in some filess previous_version = build_utils.get_latest_version() if previous_version: build_utils.replace_in_files( previous_version, version, file_paths=[ "./actions/build-environment/Dockerfile", "./README.md", "./workflows/build-pipeline.yml", "./workflows/release-pipeline.yml", ], regex=False, exit_on_error=True, ) # Publish distribution on pypi build_python.publish_pypi_distribution( pypi_token=args.get(build_python.FLAG_PYPI_TOKEN), pypi_repository=args.get(build_python.FLAG_PYPI_REPOSITORY), ) # TODO: Publish coverage report: if private repo set CODECOV_TOKEN="token" or use -t # build_utils.run("curl -s https://codecov.io/bash | bash -s", exit_on_error=False) # Build the build-environment component build_utils.build("build-environment", args) # Build all examples components build_utils.build("examples", args)
def code_checks( black: bool = True, isort: bool = True, pydocstyle: bool = True, mypy: bool = True, flake8: bool = True, safety: bool = False, exit_on_error: bool = True, ) -> None: """Run linting and style checks. Args: black (bool, optional): Activate black formatting check. Defaults to True. isort (bool, optional): Activate isort import sorting check. Defaults to True. pydocstyle (bool, optional): Activate pydocstyle docstring check. Defaults to True. mypy (bool, optional): Activate mypy typing check. Defaults to True. flake8 (bool, optional): Activate flake8 linting check. Defaults to True. safety (bool, optional): Activate saftey check via pipenv. Defaults to False. exit_on_error (bool, optional): If `True`, exit process as soon as error occures. Defaults to True. """ command_prefix = "" if is_pipenv_environment(): command_prefix = "pipenv run" successful: bool = True if black: if not command_prefix: # Check twine command build_utils.command_exists("black", exit_on_error=exit_on_error) if (build_utils.run(f"{command_prefix} black --check src", exit_on_error=False).returncode > 0): successful = False if (build_utils.run(f"{command_prefix} black --check tests", exit_on_error=False).returncode > 0): successful = False if isort: if not command_prefix: # Check twine command build_utils.command_exists("isort", exit_on_error=exit_on_error) if (build_utils.run( f"{command_prefix} isort --profile black --check-only src", exit_on_error=False, ).returncode > 0): successful = False if (build_utils.run( f"{command_prefix} isort --profile black --check-only tests", exit_on_error=False, ).returncode > 0): successful = False if pydocstyle: if not command_prefix: # Check twine command build_utils.command_exists("pydocstyle", exit_on_error=exit_on_error) if (build_utils.run(f"{command_prefix} pydocstyle src", exit_on_error=False).returncode > 0): successful = False # Run linters and checks if mypy: if not command_prefix: # Check twine command build_utils.command_exists("mypy", exit_on_error=exit_on_error) if (build_utils.run(f"{command_prefix} mypy src", exit_on_error=False).returncode > 0): successful = False if flake8: if not command_prefix: # Check twine command build_utils.command_exists("flake8", exit_on_error=exit_on_error) if (build_utils.run( f"{command_prefix} flake8 --show-source --statistics src", exit_on_error=False, ).returncode > 0): successful = False if (build_utils.run( f"{command_prefix} flake8 --show-source --statistics tests", exit_on_error=False, ).returncode > 0): successful = False if safety: # Check pipenv command build_utils.command_exists("pipenv", exit_on_error=exit_on_error) # Check using pipenv (runs safety check) if build_utils.run("pipenv check", exit_on_error=False).returncode > 0: successful = False if not successful: build_utils.log( "Code checks (style, linting, safety, ...) failed. Please check the logs and fix the issues." ) build_utils.exit_process(1)