def _build_save_container(platform, bucket) -> str: """ Build image for passed platform and upload the cache to the specified S3 bucket :param platform: Platform :param bucket: Target s3 bucket :return: Platform if failed, None otherwise """ docker_tag = build_util.get_docker_tag(platform) # Preload cache # TODO: Allow to disable this in order to allow clean rebuilds load_docker_cache(bucket_name=bucket, docker_tag=docker_tag) # Start building logging.debug('Building {} as {}'.format(platform, docker_tag)) try: image_id = build_util.build_docker(docker_binary='docker', platform=platform) logging.info('Built {} as {}'.format(docker_tag, image_id)) # Compile and upload tarfile _compile_upload_cache_file(bucket_name=bucket, docker_tag=docker_tag, image_id=image_id) return None except Exception: logging.exception( 'Unexpected exception during build of {}'.format(docker_tag)) return platform
def _build_save_container(platform, registry, load_cache) -> str: """ Build image for passed platform and upload the cache to the specified S3 bucket :param platform: Platform :param registry: Docker registry name :param load_cache: Load cache before building :return: Platform if failed, None otherwise """ docker_tag = build_util.get_docker_tag(platform=platform, registry=registry) # Preload cache if load_cache: load_docker_cache(registry=registry, docker_tag=docker_tag) # Start building logging.debug('Building %s as %s', platform, docker_tag) try: # Increase the number of retries for building the cache. image_id = build_util.build_docker(docker_binary='docker', platform=platform, registry=registry, num_retries=10) logging.info('Built %s as %s', docker_tag, image_id) # Push cache to registry _upload_image(registry=registry, docker_tag=docker_tag, image_id=image_id) return None except Exception: logging.exception('Unexpected exception during build of %s', docker_tag) return platform
def _build_save_container(platform, registry, load_cache) -> Optional[str]: """ Build image for passed platform and upload the cache to the specified S3 bucket :param platform: Platform :param registry: Docker registry name :param load_cache: Load cache before building :return: Platform if failed, None otherwise """ docker_tag = build_util.get_docker_tag(platform=platform, registry=registry) # Preload cache if load_cache: load_docker_cache(registry=registry, docker_tag=docker_tag) # Start building logging.debug('Building %s as %s', platform, docker_tag) try: # Increase the number of retries for building the cache. image_id = build_util.build_docker(docker_binary='docker', platform=platform, registry=registry, num_retries=10, no_cache=False) logging.info('Built %s as %s', docker_tag, image_id) # Push cache to registry _upload_image(registry=registry, docker_tag=docker_tag, image_id=image_id) return None except Exception: logging.exception('Unexpected exception during build of %s', docker_tag) return platform
def _build_save_container(platform, bucket) -> str: """ Build image for passed platform and upload the cache to the specified S3 bucket :param platform: Platform :param bucket: Target s3 bucket :return: Platform if failed, None otherwise """ docker_tag = build_util.get_docker_tag(platform) # Preload cache # TODO: Allow to disable this in order to allow clean rebuilds load_docker_cache(bucket_name=bucket, docker_tag=docker_tag) # Start building logging.debug('Building {} as {}'.format(platform, docker_tag)) try: image_id = build_util.build_docker(docker_binary='docker', platform=platform) logging.info('Built {} as {}'.format(docker_tag, image_id)) # Compile and upload tarfile _compile_upload_cache_file(bucket_name=bucket, docker_tag=docker_tag, image_id=image_id) return None except Exception: logging.exception('Unexpected exception during build of {}'.format(docker_tag)) return platform
def test_full_cache(self): """ Test whether it's possible to restore cache entirely :return: """ dockerfile_content = """ FROM busybox RUN touch ~/file1 RUN touch ~/file2 RUN touch ~/file3 RUN touch ~/file4 """ platform = 'test_full_cache' docker_tag = build_util.get_docker_tag(platform=platform, registry=DOCKER_REGISTRY_PATH) dockerfile_path = os.path.join(DOCKERFILE_DIR, 'Dockerfile.build.' + platform) try: with open(dockerfile_path, 'w') as dockerfile_handle: dockerfile_handle.write(dockerfile_content) # Warm up docker_cache.delete_local_docker_cache(docker_tag=docker_tag) def warm_up_lambda_func(): build_util.build_docker(docker_binary='docker', platform=platform, registry=DOCKER_REGISTRY_PATH) _assert_docker_build(lambda_func=warm_up_lambda_func, expected_cache_hit_count=0, expected_cache_miss_count=4) # Assert local cache is properly primed def primed_cache_lambda_func(): build_util.build_docker(docker_binary='docker', platform=platform, registry=DOCKER_REGISTRY_PATH) _assert_docker_build(lambda_func=primed_cache_lambda_func, expected_cache_hit_count=4, expected_cache_miss_count=0) # Upload and clean local cache docker_cache.build_save_containers(platforms=[platform], registry=DOCKER_REGISTRY_PATH, load_cache=False) docker_cache.delete_local_docker_cache(docker_tag=docker_tag) # Build with clean local cache and cache loading enabled def clean_cache_lambda_func(): docker_cache.build_save_containers( platforms=[platform], registry=DOCKER_REGISTRY_PATH, load_cache=True) _assert_docker_build(lambda_func=clean_cache_lambda_func, expected_cache_hit_count=4, expected_cache_miss_count=0) finally: # Delete dockerfile os.remove(dockerfile_path) docker_cache.delete_local_docker_cache(docker_tag=docker_tag)
def _build_save_container(platform, registry, load_cache) -> Optional[str]: """ Build image for passed platform and upload the cache to the specified S3 bucket :param platform: Platform :param registry: Docker registry name :param load_cache: Load cache before building :return: Platform if failed, None otherwise """ # Case 1: docker-compose if platform in build_util.DOCKER_COMPOSE_WHITELIST: build_util.build_docker(platform=platform, registry=registry, num_retries=10, no_cache=False) push_cmd = ['docker-compose', 'push', platform] subprocess.check_call(push_cmd) return None # Case 2: Deprecated way, will be removed docker_tag = build_util.get_docker_tag(platform=platform, registry=registry) # Preload cache if load_cache: load_docker_cache(registry=registry, docker_tag=docker_tag) # Start building logging.debug('Building %s as %s', platform, docker_tag) try: # Increase the number of retries for building the cache. image_id = build_util.build_docker(platform=platform, registry=registry, num_retries=10, no_cache=False) logging.info('Built %s as %s', docker_tag, image_id) # Push cache to registry _upload_image(registry=registry, docker_tag=docker_tag, image_id=image_id) return None except Exception: logging.exception('Unexpected exception during build of %s', docker_tag) return platform
def test_partial_cache(self): """ Test whether it's possible to restore cache and then pit it up partially by using a Dockerfile which shares some parts :return: """ # These two dockerfiles diverge at the fourth RUN statement. Their common parts (1-3) should be re-used dockerfile_content_1 = """ FROM busybox RUN touch ~/file1 RUN touch ~/file2 RUN touch ~/file3 RUN touch ~/file4 """ dockerfile_content_2 = """ FROM busybox RUN touch ~/file1 RUN touch ~/file2 RUN touch ~/file3 RUN touch ~/file5 RUN touch ~/file4 RUN touch ~/file6 """ platform = 'test_partial_cache' docker_tag = build_util.get_docker_tag(platform=platform, registry=DOCKER_REGISTRY_PATH) dockerfile_path = os.path.join(DOCKERFILE_DIR, 'Dockerfile.build.' + platform) try: # Write initial Dockerfile with open(dockerfile_path, 'w') as dockerfile_handle: dockerfile_handle.write(dockerfile_content_1) # Warm up docker_cache.delete_local_docker_cache(docker_tag=docker_tag) def warm_up_lambda_func(): build_util.build_docker(docker_binary='docker', platform=platform, registry=DOCKER_REGISTRY_PATH) _assert_docker_build(lambda_func=warm_up_lambda_func, expected_cache_hit_count=0, expected_cache_miss_count=4) # Assert local cache is properly primed def primed_cache_lambda_func(): build_util.build_docker(docker_binary='docker', platform=platform, registry=DOCKER_REGISTRY_PATH) _assert_docker_build(lambda_func=primed_cache_lambda_func, expected_cache_hit_count=4, expected_cache_miss_count=0) # Upload and clean local cache docker_cache.build_save_containers(platforms=[platform], registry=DOCKER_REGISTRY_PATH, load_cache=False) docker_cache.delete_local_docker_cache(docker_tag=docker_tag) # Replace Dockerfile with the second one, resulting in a partial cache hit with open(dockerfile_path, 'w') as dockerfile_handle: dockerfile_handle.write(dockerfile_content_2) # Test if partial cache is properly hit. It will attempt to load the cache from the first Dockerfile, # resulting in a partial hit def partial_cache_lambda_func(): docker_cache.build_save_containers( platforms=[platform], registry=DOCKER_REGISTRY_PATH, load_cache=True) _assert_docker_build(lambda_func=partial_cache_lambda_func, expected_cache_hit_count=3, expected_cache_miss_count=3) finally: # Delete dockerfile os.remove(dockerfile_path) docker_cache.delete_local_docker_cache(docker_tag=docker_tag)