def main(args: dict) -> None: """Execute all component builds.""" # set script path as working dir os.chdir(HERE) # Build react webapp build_utils.build(REACT_WEBAPP_COMPONENT, args) # Build python lib build_utils.build(PYTHON_LIB_COMPONENT, args) if args.get(build_utils.FLAG_MAKE): # Duplicate api docs into the mkdocs documentation build_utils.duplicate_folder(f"./{PYTHON_LIB_COMPONENT}/docs/", f"./{DOCS_COMPONENT}/docs/api-docs/") # Copy python lib distribution to docker container try: dest_path = os.path.join("./", DOCKER_COMPONENT, "resources", PYTHON_LIB_COMPONENT + ".tar.gz") os.makedirs(os.path.dirname(dest_path), exist_ok=True) shutil.copy( glob.glob( f"./{PYTHON_LIB_COMPONENT}/dist/{PYTHON_LIB_COMPONENT}-*.tar.gz" )[0], os.path.join(dest_path), ) except Exception as ex: build_utils.log( f"Failed to copy {PYTHON_LIB_COMPONENT} distribution to {DOCKER_COMPONENT} component: " + str(ex)) build_utils.exit_process(1) # Build docker container build_utils.build(DOCKER_COMPONENT, args) # Build mkdocs documentation build_utils.build(DOCS_COMPONENT, args)
args = build_utils.parse_arguments(argument_parser=parser) VERSION = str(args.get(build_utils.FLAG_VERSION)) docker_image_prefix = args.get(build_docker.FLAG_DOCKER_IMAGE_PREFIX) if not docker_image_prefix: docker_image_prefix = REMOTE_IMAGE_PREFIX if not args.get(FLAG_FLAVOR): args[FLAG_FLAVOR] = "all" flavor = str(args[FLAG_FLAVOR]).lower().strip() if flavor == "all": args[FLAG_FLAVOR] = "gpu-11.3" build_utils.build(".", args) build_utils.exit_process(0) # unknown flavor -> try to build from subdirectory if flavor not in ["gpu-11.3"]: # assume that flavor has its own directory with build.py build_utils.build(flavor + "-flavor", args) build_utils.exit_process(0) docker_image_name = IMAGE_NAME + "-" + flavor # docker build git_rev = "unknown" try: git_rev = (subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode("ascii").strip())
args = build_utils.parse_arguments(argument_parser=parser) VERSION = str(args.get(build_utils.FLAG_VERSION)) docker_image_prefix = args.get(build_docker.FLAG_DOCKER_IMAGE_PREFIX) if not docker_image_prefix: docker_image_prefix = REMOTE_IMAGE_PREFIX if not args.get(FLAG_FLAVOR): args[FLAG_FLAVOR] = "all" flavor = str(args[FLAG_FLAVOR]).lower().strip() if flavor == "all": args[FLAG_FLAVOR] = "minimal" build_utils.build(".", args) args[FLAG_FLAVOR] = "light" build_utils.build(".", args) args[FLAG_FLAVOR] = "full" build_utils.build(".", args) args[FLAG_FLAVOR] = "r" build_utils.build("r-flavor", args) args[FLAG_FLAVOR] = "spark" build_utils.build("spark-flavor", args) args[FLAG_FLAVOR] = "gpu" build_utils.build("gpu-flavor", args)
from universal_build import build_utils COMPONENT_NAME = "unified-model-lib" args = build_utils.parse_arguments() if args[build_utils.FLAG_MAKE]: completed_process = build_utils.run("python setup.py develop") if completed_process.returncode > 0: build_utils.log(f"Error in building component {COMPONENT_NAME}") completed_process = build_utils.run("python generate_docs.py") if completed_process.returncode > 0: build_utils.log( f"Error in generating docs for component {COMPONENT_NAME}") build_utils.build("docker", args)
def main(args: dict) -> None: # set current path as working dir os.chdir(HERE) version = args.get(build_utils.FLAG_VERSION) if version: # Update version in _about.py build_python.update_version( os.path.join(HERE, f"src/{MAIN_PACKAGE}/_about.py"), build_utils._Version.get_pip_compatible_string(str(version)), ) if args.get(build_utils.FLAG_MAKE): # Install pipenv dev requirements build_python.install_build_env() # Create API documentation via lazydocs build_python.generate_api_docs(github_url=GITHUB_URL, main_package=MAIN_PACKAGE) # Build distribution via setuptools build_python.build_distribution() try: dist_name = MAIN_PACKAGE.replace("_", "-") dist_file = glob.glob(f"./dist/{dist_name}-*.tar.gz")[0] shutil.copy( dist_file, os.path.join(HERE, "build-environment", "resources", dist_name + ".tar.gz"), ) except Exception: build_utils.log("Failed to copy distribution to build container.") build_utils.exit_process(1) if args.get(build_utils.FLAG_CHECK): build_python.code_checks(exit_on_error=True, safety=False) if args.get(build_utils.FLAG_TEST): # Remove coverage files build_utils.run("pipenv run coverage erase", exit_on_error=False) test_markers = args.get(build_utils.FLAG_TEST_MARKER) if build_utils.TEST_MARKER_SLOW in test_markers: # type: ignore # Run if slow test marker is set: test in multiple environments # Python 3.6 build_python.test_with_py_version(python_version="3.6.12") # Python 3.7 build_python.test_with_py_version(python_version="3.7.9") # Activated Python Environment (3.8) build_python.install_build_env() # Run pytest in pipenv environment build_utils.run("pipenv run pytest", exit_on_error=True) # Update pipfile.lock when all tests are successfull (lock environment) build_utils.run("pipenv lock", exit_on_error=True) else: # Run fast tests build_utils.run('pipenv run pytest -m "not slow"', exit_on_error=True) if args.get(build_utils.FLAG_RELEASE): # Bump all versions in some filess previous_version = build_utils.get_latest_version() if previous_version: build_utils.replace_in_files( previous_version, version, file_paths=[ "./actions/build-environment/Dockerfile", "./README.md", "./workflows/build-pipeline.yml", "./workflows/release-pipeline.yml", ], regex=False, exit_on_error=True, ) # Publish distribution on pypi build_python.publish_pypi_distribution( pypi_token=args.get(build_python.FLAG_PYPI_TOKEN), pypi_repository=args.get(build_python.FLAG_PYPI_REPOSITORY), ) # TODO: Publish coverage report: if private repo set CODECOV_TOKEN="token" or use -t # build_utils.run("curl -s https://codecov.io/bash | bash -s", exit_on_error=False) # Build the build-environment component build_utils.build("build-environment", args) # Build all examples components build_utils.build("examples", args)
from universal_build import build_utils args = build_utils.parse_arguments() if args[build_utils.FLAG_VERSION]: # The version is also needed when just tests are executed, so set it independent of the passed flags completed_process = build_utils.run("mvn versions:set -DnewVersion=" + args[build_utils.FLAG_VERSION]) if completed_process.returncode > 0: build_utils.log("Failed to apply version " + args[build_utils.FLAG_VERSION]) build_utils.run("mvn versions:revert") build_utils.exit_process(1) build_utils.run("mvn versions:commit") if args[build_utils.FLAG_MAKE]: # Check if all project can be build, otherwise exit build script completed_process = build_utils.run("mvn clean package") if completed_process.returncode > 0: build_utils.log("Failed to build project") build_utils.exit_process(1) build_utils.run("mvn -N clean install") # libraries build_utils.build("environment-lib", args) build_utils.build("service-lib", args) # services build_utils.build("lab-service", args)
def main(args: Dict[str, Union[bool, str]]): # Move libraries to build_utils.run( "rm -r -f services/lab-workspace/docker-res/duplicated-resources/") build_utils.run( "mkdir services/lab-workspace/docker-res/duplicated-resources/") build_utils.run( "cp -R libraries/* services/lab-workspace/docker-res/duplicated-resources/" ) # build base images # For just testing, the lab-workspace does not have to be built as it is not covered by tests yet # TODO: in GitHub actions add workspace to --skip-path to ignore it build_utils.build("services/lab-workspace", args) build_utils.build("services/simple-workspace-service", args) build_utils.build("services/lab-model-service", args) # build demo services/jobs build_utils.build("services/simple-demo-job", args) build_utils.build("services/simple-demo-service", args) build_utils.build("services/simple-fastapi-service", args) # build webapp and move build into backend service # TODO: MOVE SWAGGER API TO WEB APP # build main application first time to generate swagger config backend_args = {**args} backend_args[build_utils.FLAG_TEST] = False build_utils.build("backend", backend_args) if args[build_utils.FLAG_MAKE]: is_successful = generate_and_copy_js_client() if not is_successful: build_utils.log( "Error in generating the JavaScript client library") build_utils.exit_process(1) # format the just generated JavaScript client to make it conform with the project and prevent showing format-related changes in Git build_utils.run( "cd webapp; npm run prettier ./src/services/client/; cd ..") build_utils.build("webapp", args) if args[build_utils.FLAG_MAKE]: # Move webapp build into resources build_utils.run("rm -r -f backend/lab-service/src/main/resources/app/") build_utils.run("mkdir backend/lab-service/src/main/resources/app/") build_utils.run( "cp -R webapp/build/* backend/lab-service/src/main/resources/app/", exit_on_error=True, ) # build documentation build_utils.build("docs", args) if args[build_utils.FLAG_MAKE]: # Move documentation build into resources build_utils.run( "rm -r -f backend/lab-service/src/main/resources/docs/") build_utils.run("mkdir backend/lab-service/src/main/resources/docs/") build_utils.run( "cp -R docs/site/* backend/lab-service/src/main/resources/docs/") # build main application second time to bundle webapp build_utils.build("backend", args)
def main(args: dict) -> None: # set current path as working dir os.chdir(HERE) version = args.get(build_utils.FLAG_VERSION) if version: # Update version in _about.py build_python.update_version( os.path.join(HERE, f"src/{MAIN_PACKAGE}/_about.py"), build_utils._Version.get_pip_compatible_string(str(version)), exit_on_error=True, ) if args.get(build_utils.FLAG_MAKE): # Install pipenv dev requirements build_python.install_build_env(exit_on_error=True) # Create API documentation via lazydocs build_python.generate_api_docs(github_url=GITHUB_URL, main_package=MAIN_PACKAGE, exit_on_error=True) # Build distribution via setuptools build_python.build_distribution(exit_on_error=True) # Copy distribution to playground try: dist_name = MAIN_PACKAGE.replace("_", "-") dist_file = glob.glob(f"./dist/{dist_name}-*.tar.gz")[0] shutil.copy( dist_file, os.path.join(HERE, "playground", "resources", dist_name + ".tar.gz"), ) except Exception as ex: build_utils.log( "Failed to copy distribution to playground container " + str(ex)) build_utils.exit_process(1) # Copy all Demo artifacts (only py and txt files for now) files = [] for ext in ("*.py", "*.txt"): files.extend( glob.glob(os.path.join(HERE, "examples", "**", ext), recursive=True)) DEMO_PATH = os.path.join(HERE, "playground", "resources", "demos") if os.path.exists(DEMO_PATH): shutil.rmtree(DEMO_PATH) for file in files: new_path = os.path.join( DEMO_PATH, os.path.relpath(file, os.path.join(HERE, "examples")), ) os.makedirs(os.path.dirname(new_path), exist_ok=True) shutil.copy(file, new_path) if args.get(build_utils.FLAG_CHECK): build_python.code_checks(exit_on_error=True, safety=False) if args.get(build_utils.FLAG_TEST): # Remove coverage files build_utils.run("pipenv run coverage erase", exit_on_error=False) test_markers = args.get(build_utils.FLAG_TEST_MARKER) if (isinstance(test_markers, list) and build_utils.TEST_MARKER_SLOW in test_markers): # Run if slow test marker is set: test in multiple environments # Python 3.6 build_python.test_with_py_version(python_version="3.6.12", exit_on_error=True) # Python 3.7 # build_python.test_with_py_version( # python_version="3.7.9", exit_on_error=True # ) # Activated Python Environment (3.8) build_python.install_build_env() # Run pytest in pipenv environment build_utils.run("pipenv run pytest", exit_on_error=True) # Update pipfile.lock when all tests are successfull (lock environment) build_utils.run("pipenv lock", exit_on_error=True) else: # Run fast tests build_utils.run('pipenv run pytest -m "not slow"', exit_on_error=True) if args.get(build_utils.FLAG_RELEASE): # Publish distribution on pypi build_python.publish_pypi_distribution( pypi_token=args.get(build_python.FLAG_PYPI_TOKEN), pypi_repository=args.get(build_python.FLAG_PYPI_REPOSITORY), ) # TODO: Publish coverage report: if private repo set CODECOV_TOKEN="token" or use -t # build_utils.run("curl -s https://codecov.io/bash | bash -s", exit_on_error=False) pass # Build the opyrator playground component build_utils.build("playground", args)
help="flavor (lab, lab-gpu) used for docker container", default="lab") COMPONENT_NAME = "ml-workspace" FLAG_FLAVOR = "flavor" args = build_utils.parse_arguments(argument_parser=parser) if not args[FLAG_FLAVOR]: args[FLAG_FLAVOR] = "lab" args[FLAG_FLAVOR] = str(args[FLAG_FLAVOR]).lower() if args[FLAG_FLAVOR] == "all": args[FLAG_FLAVOR] = "lab" build_utils.build(".", args) args[FLAG_FLAVOR] = "lab-gpu" build_utils.build(".", args) build_utils.exit_process(0) # unknown flavor -> try to build from subdirectory if args[FLAG_FLAVOR] not in ["lab", "lab-gpu"]: # assume that flavor has its own directory with build.py build_utils.build(args[FLAG_FLAVOR], args) build_utils.exit_process(0) # Add flavor suffix to image name # service_name += "-" + args.flavor COMPONENT_NAME = f"{COMPONENT_NAME}-{args[FLAG_FLAVOR]}" # Set base workspace image
import argparse import os from universal_build import build_utils HERE = os.path.abspath(os.path.dirname(__file__)) os.chdir(HERE) if __name__ == "__main__": # Custom CLI arguments parser = argparse.ArgumentParser() parser.add_argument("--deployment-token", help="This is a custom token.", default="") parser.add_argument("--my_token", help="This is a custom token.", default="") parser.add_argument("--my_bool", help="This is a custom bool.", action="store_true") args = build_utils.parse_arguments(argument_parser=parser) build_utils.build("submodule", args)