Ejemplo n.º 1
0
def create_udocker_container(image_id):
    if (utils.get_tree_size(scar_temporal_folder) < MAX_S3_PAYLOAD_SIZE / 2):
        execute_command([
            "python3", udocker_exec, "create", "--name=lambda_cont", image_id
        ],
                        cli_msg="Creating container structure")
    if (utils.get_tree_size(scar_temporal_folder) > MAX_S3_PAYLOAD_SIZE):
        shutil.rmtree(scar_temporal_folder + "/udocker/containers/")
Ejemplo n.º 2
0
 def create_udocker_container(self, image_id):
     if (utils.get_tree_size(self.scar_temporal_folder) <
             MAX_S3_PAYLOAD_SIZE / 2):
         self.execute_command(self.udocker_exec +
                              ["create", "--name=lambda_cont", image_id],
                              cli_msg="Creating container structure")
     if (utils.get_tree_size(self.scar_temporal_folder) >
             MAX_S3_PAYLOAD_SIZE):
         shutil.rmtree(
             utils.join_paths(self.scar_temporal_folder,
                              "udocker/containers/"))
Ejemplo n.º 3
0
def upload_file_to_S3_bucket(image_file, deployment_bucket, file_key):
    if(utils.get_tree_size(scar_temporal_folder) > MAX_S3_PAYLOAD_SIZE):         
        error_msg = "Uncompressed image size greater than 250MB.\nPlease reduce the uncompressed image and try again."
        logger.error(error_msg)
        utils.delete_file(zip_file_path)
        exit(1)
    
    logger.info("Uploading '%s' to the '%s' S3 bucket" % (image_file, deployment_bucket))
    file_data = utils.read_file(image_file, 'rb')
    S3().upload_file(deployment_bucket, file_key, file_data)
Ejemplo n.º 4
0
def upload_file_to_S3_bucket(image_file, deployment_bucket, file_key):
    if (utils.get_tree_size(scar_temporal_folder) > MAX_S3_PAYLOAD_SIZE):
        error_msg = "Uncompressed image size greater than 250MB.\nPlease reduce the uncompressed image and try again."
        logger.error(error_msg)
        utils.finish_failed_execution()

    logger.info("Uploading '%s' to the '%s' S3 bucket" %
                (image_file, deployment_bucket))
    file_data = utils.get_file_as_byte_array(image_file)
    S3().upload_file(deployment_bucket, file_key, file_data)
Ejemplo n.º 5
0
def is_large_dir(app_dir, size_in_bytes=1e8):
    if utils.get_tree_size(app_dir) > size_in_bytes:
        return True
    return False
Ejemplo n.º 6
0
 def validate_s3_code_size(scar_folder, MAX_S3_PAYLOAD_SIZE):
     if utils.get_tree_size(scar_folder) > MAX_S3_PAYLOAD_SIZE:
         raise S3CodeSizeError(code_size='250MB')