def _copy_supervisor_files(parent_folder: str, tmp_zip_path: str, layer_code_path: str) -> None: supervisor_path = FileUtils.join_paths(tmp_zip_path, parent_folder, 'faassupervisor') shutil.move( supervisor_path, FileUtils.join_paths(layer_code_path, 'python', 'faassupervisor'))
def _copy_extra_files(parent_folder: str, tmp_zip_path: str, layer_code_path: str) -> None: extra_folder_path = FileUtils.join_paths(tmp_zip_path, parent_folder, 'extra') files = FileUtils.get_all_files_in_directory(extra_folder_path) for file_path in files: FileUtils.unzip_folder(file_path, layer_code_path)
def create_function(self): # Create tmp folders zip_payload_path = None supervisor_zip_path = None if self.function.get('runtime') == "image": # Create docker image in ECR self.function['container']['image'] = ContainerImage.create_ecr_image(self.resources_info, self.supervisor_version) else: # Check if supervisor's source is already cached cached, supervisor_zip_path = SupervisorUtils.is_supervisor_cached(self.supervisor_version) if not cached: # Download supervisor supervisor_zip_path = SupervisorUtils.download_supervisor(self.supervisor_version) # Manage supervisor layer self._manage_supervisor_layer(supervisor_zip_path) # Create function tmp_folder = FileUtils.create_tmp_dir() zip_payload_path = FileUtils.join_paths(tmp_folder.name, 'function.zip') self._set_image_id() self._set_fdl() creation_args = self._get_creations_args(zip_payload_path, supervisor_zip_path) response = self.client.create_function(**creation_args) if response and "FunctionArn" in response: self.function['arn'] = response.get('FunctionArn', "") return response
def _add_init_script(self) -> None: """Copy the init script defined by the user to the payload folder.""" if self.resources_info.get('lambda').get('init_script', False): init_script_path = self.resources_info.get('lambda').get('init_script') FileUtils.copy_file(init_script_path, FileUtils.join_paths(self.tmp_payload_folder.name, FileUtils.get_file_name(init_script_path)))
def __init__(self, aws_properties, supervisor_version): self.aws = aws_properties self.supervisor_version = supervisor_version self.scar_tmp_function_folder = FileUtils.create_tmp_dir() self.scar_tmp_function_folder_path = self.scar_tmp_function_folder.name self._supervisor_zip_path = FileUtils.join_paths(self.aws.lambdaf.tmp_folder_path, 'faas.zip') self.package_args = {}
def __init__(self, resources_info: str, tmp_payload_folder_path: str, supervisor_zip_path: str): self.resources_info = resources_info self._tmp_payload_folder_path = tmp_payload_folder_path self._udocker_dir = FileUtils.join_paths(self._tmp_payload_folder_path, "udocker") self._udocker_dir_orig = "" self._udocker_code = FileUtils.join_paths(self._udocker_dir, "udocker.py") self._udocker_exec = ['python3', self._udocker_code] self._install_udocker(supervisor_zip_path)
def _extract_udocker_zip(supervisor_zip_path) -> None: file_path = "" with ZipFile(supervisor_zip_path) as thezip: for file in thezip.namelist(): if file.endswith("udocker.zip"): file_path = FileUtils.join_paths(FileUtils.get_tmp_dir(), file) thezip.extract(file, FileUtils.get_tmp_dir()) break return file_path
def _update_config_file(self): logger.info(("SCAR configuration file deprecated.\n" "Updating your SCAR configuration file.")) FileUtils.copy_file(self.config_file_path, self.backup_file_path) logger.info(f"Old configuration file saved in '{self.backup_file_path}'.") self._create_new_config_file() logger.info((f"New configuration file saved in '{self.config_file_path}'.\n" "Please fill your new configuration file with your account information.")) SysUtils.finish_scar_execution()
def _add_init_script(self): if hasattr(self.aws.lambdaf, "init_script"): if hasattr(self.aws, "config_path"): self.aws.lambdaf.init_script = FileUtils.join_paths(self.aws.config_path, self.aws.lambdaf.init_script) FileUtils.copy_file(self.aws.lambdaf.init_script, FileUtils.join_paths(self.scar_tmp_function_folder_path, _INIT_SCRIPT_NAME)) self.aws.lambdaf.environment['Variables']['INIT_SCRIPT_PATH'] = \ f"/var/task/{_INIT_SCRIPT_NAME}"
def _extract_handler_code(self) -> None: function_handler_dest = FileUtils.join_paths(self.scar_tmp_function_folder_path, f"{self.aws.lambdaf.name}.py") file_path = "" with ZipFile(self._supervisor_zip_path) as thezip: for file in thezip.namelist(): if file.endswith("function_handler.py"): file_path = FileUtils.join_paths(self.aws.lambdaf.tmp_folder_path, file) thezip.extract(file, self.aws.lambdaf.tmp_folder_path) break FileUtils.copy_file(file_path, function_handler_dest)
def _add_extra_payload(self) -> None: if self.resources_info.get('lambda').get('extra_payload', False): payload_path = self.resources_info.get('lambda').get('extra_payload') logger.info(f"Adding extra payload '{payload_path}'") if FileUtils.is_file(payload_path): FileUtils.copy_file(self.resources_info.get('lambda').get('extra_payload'), self.tmp_payload_folder.name) else: FileUtils.copy_dir(self.resources_info.get('lambda').get('extra_payload'), self.tmp_payload_folder.name) del(self.resources_info['lambda']['extra_payload'])
def _create_layer(self) -> None: tmp_zip_path, layer_code_path = _create_tmp_folders() layer_zip_path = FileUtils.join_paths( FileUtils.get_tmp_dir(), f"{self._SUPERVISOR_LAYER_NAME}.zip") parent_folder = _download_supervisor(self.supervisor_version, tmp_zip_path) _copy_supervisor_files(parent_folder, tmp_zip_path, layer_code_path) _copy_extra_files(parent_folder, tmp_zip_path, layer_code_path) _create_layer_zip(layer_zip_path, layer_code_path) self.layer.create(**self._get_supervisor_layer_props(layer_zip_path)) FileUtils.delete_file(layer_zip_path)
class ConfigFileParser(): """Class to manage the SCAR configuration file creation, update and load.""" _CONFIG_FOLDER_PATH = ".scar" _CONFIG_FILE_PATH = "scar.cfg" _CONFIG_FILE_NAME_BCK = "scar.cfg_old" config_file_folder = FileUtils.join_paths(SysUtils.get_user_home_path(), _CONFIG_FOLDER_PATH) config_file_path = FileUtils.join_paths(config_file_folder, _CONFIG_FILE_PATH) backup_file_path = FileUtils.join_paths(config_file_folder, _CONFIG_FILE_NAME_BCK) @exception(logger) def __init__(self): # Check if the config file exists if FileUtils.is_file(self.config_file_path): with open(self.config_file_path) as cfg_file: self.cfg_data = json.load(cfg_file) if not self._is_config_file_updated(): self._update_config_file() else: self._create_scar_config_folder_and_file() def _is_config_file_updated(self): if 'config_version' not in self.cfg_data['scar']: return False return StrUtils.compare_versions(self.cfg_data.get('scar', {}).get("config_version", ""), _DEFAULT_CFG['scar']["config_version"]) >= 0 def get_properties(self): """Returns the configuration data of the configuration file.""" return self.cfg_data def get_udocker_zip_url(self): """Returns the url where the udocker zip is stored.""" return self.cfg_data['scar']['udocker_info']['zip_url'] def _create_scar_config_folder_and_file(self): FileUtils.create_folder(self.config_file_folder) self._create_new_config_file() raise ScarConfigFileError(file_path=self.config_file_path) def _create_new_config_file(self): FileUtils.create_file_with_content(self.config_file_path, json.dumps(_DEFAULT_CFG, indent=2)) def _update_config_file(self): logger.info(("SCAR configuration file deprecated.\n" "Updating your SCAR configuration file.")) FileUtils.copy_file(self.config_file_path, self.backup_file_path) logger.info(f"Old configuration file saved in '{self.backup_file_path}'.") self._create_new_config_file() logger.info((f"New configuration file saved in '{self.config_file_path}'.\n" "Please fill your new configuration file with your account information.")) SysUtils.finish_scar_execution()
def _initialize_properties(self, aws_properties): self.aws.lambdaf.environment = {'Variables': {}} self.aws.lambdaf.invocation_type = "RequestResponse" self.aws.lambdaf.log_type = "Tail" self.aws.lambdaf.layers = [] self.aws.lambdaf.tmp_folder = FileUtils.create_tmp_dir() self.aws.lambdaf.tmp_folder_path = self.aws.lambdaf.tmp_folder.name self.aws.lambdaf.zip_file_path = FileUtils.join_paths(self.aws.lambdaf.tmp_folder_path, 'function.zip') if hasattr(self.aws.lambdaf, "name"): self.aws.lambdaf.handler = "{0}.lambda_handler".format(self.aws.lambdaf.name) if not hasattr(self.aws.lambdaf, "asynchronous"): self.aws.lambdaf.asynchronous = False self._set_default_call_parameters()
def _add_config_file_path(scar_info: Dict, resources_info: Dict): if scar_info.get("conf_file", False): resources_info['lambda']['config_path'] = os.path.dirname(scar_info.get("conf_file")) # Update the path of the files based on the path of the yaml (if any) if resources_info['lambda'].get('init_script', False): resources_info['lambda']['init_script'] = FileUtils.join_paths(resources_info['lambda']['config_path'], resources_info['lambda']['init_script']) if resources_info['lambda'].get('image_file', False): resources_info['lambda']['image_file'] = FileUtils.join_paths(resources_info['lambda']['config_path'], resources_info['lambda']['image_file']) if resources_info['lambda'].get('run_script', False): resources_info['lambda']['run_script'] = FileUtils.join_paths(resources_info['lambda']['config_path'], resources_info['lambda']['run_script'])
def prepare_udocker_image(self): self.save_tmp_udocker_env() image_path = FileUtils.join_paths(FileUtils.get_tmp_dir(), "udocker_image.tar.gz") FileUtils.copy_file(self.aws.lambdaf.image_file, image_path) cmd_out = SysUtils.execute_command_with_msg( self.udocker_exec + ["load", "-i", image_path], cli_msg="Loading image file") # Get the image name from the command output self.aws.lambdaf.image = cmd_out.split('\n')[1] self._create_udocker_container() self.aws.lambdaf.environment['Variables'][ 'IMAGE_ID'] = self.aws.lambdaf.image self._set_udocker_local_registry() self.restore_udocker_env()
def _validate_container_size(self, max_payload_size): if FileUtils.get_tree_size(self.udocker_dir) < (max_payload_size / 2): ucmd = self.udocker_exec + [ "create", "--name=lambda_cont", self.aws.lambdaf.image ] SysUtils.execute_command_with_msg( ucmd, cli_msg="Creating container structure") elif FileUtils.get_tree_size(self.udocker_dir) > max_payload_size: FileUtils.delete_folder( FileUtils.join_paths(self.udocker_dir, "containers")) else: self.aws.lambdaf.environment['Variables']['UDOCKER_LAYERS'] = \ '/var/task/udocker/containers/'
def _get_user_script(self) -> str: script = '' if self.resources_info.get('lambda').get('init_script', False): file_content = FileUtils.read_file( self.resources_info.get('lambda').get('init_script')) script = StrUtils.utf8_to_base64_string(file_content) return script
def create_function_config(resources_info): function_cfg = { 'storage_providers': FileUtils.load_tmp_config_file().get('storage_providers', {}) } function_cfg.update(resources_info.get('lambda')) return function_cfg
def create_function_config(resources_info): function_cfg = { 'storage_providers': FileUtils.load_tmp_config_file().get('storage_providers', {}) } function_cfg.update(resources_info.get('lambda')) clean_function_config(function_cfg) # Add Batch specific info if resources_info.get('lambda').get("execution_mode") == "batch": function_cfg.update({ "batch": { "multi_node_parallel": resources_info.get('batch').get("multi_node_parallel") } }) # Add ECR specific info if resources_info.get( 'lambda').get('runtime') == "image" and resources_info.get( 'ecr', {}).get("delete_image") is not None: function_cfg.update({ "ecr": { "delete_image": resources_info.get('ecr').get("delete_image") } }) return function_cfg
def _get_invocation_payload(self): # Default payload payload = self.aws.lambdaf.payload if hasattr(self.aws.lambdaf, 'payload') else {} if not payload: # Check for defined run script if hasattr(self.aws.lambdaf, "run_script"): script_path = self.aws.lambdaf.run_script if hasattr(self.aws, "config_path"): script_path = FileUtils.join_paths(self.aws.config_path, script_path) # We first code to base64 in bytes and then decode those bytes to allow the json lib to parse the data # https://stackoverflow.com/questions/37225035/serialize-in-json-a-base64-encoded-data#37239382 payload = { "script" : StrUtils.bytes_to_base64str(FileUtils.read_file(script_path, 'rb')) } # Check for defined commands # This overrides any other function payload if hasattr(self.aws.lambdaf, "c_args"): payload = {"cmd_args" : json.dumps(self.aws.lambdaf.c_args)} return json.dumps(payload)
def _set_function_code(self): # Zip all the files and folders needed FunctionPackager(self.aws, self.supervisor_version).create_zip() if hasattr(self.aws, "s3") and hasattr(self.aws.s3, 'deployment_bucket'): self._upload_to_S3() self.aws.lambdaf.code = {"S3Bucket": self.aws.s3.deployment_bucket, "S3Key": self.aws.s3.file_key} else: self.aws.lambdaf.code = {"ZipFile": FileUtils.read_file(self.aws.lambdaf.zip_file_path, mode="rb")}
def __init__(self, aws_properties, function_tmp_folder, supervisor_zip_path): self.aws = aws_properties self.function_tmp_folder = function_tmp_folder self.udocker_dir = FileUtils.join_paths(self.function_tmp_folder, "udocker") self.udocker_dir_orig = "" self._initialize_udocker(supervisor_zip_path)
def _get_supervisor_layer_props(self, layer_zip_path: str) -> Dict: return { 'LayerName': self._SUPERVISOR_LAYER_NAME, 'Description': self.supervisor_version, 'Content': { 'ZipFile': FileUtils.read_file(layer_zip_path, mode="rb") }, 'LicenseInfo': 'Apache 2.0' }
def upload_file_or_folder_to_s3(self): path_to_upload = self.scar_properties.path self.aws_s3.create_input_bucket() files = [path_to_upload] if os.path.isdir(path_to_upload): files = FileUtils.get_all_files_in_directory(path_to_upload) for file_path in files: self.aws_s3.upload_file(folder_name=self.aws_properties.s3.input_folder, file_path=file_path)
def __init__(self, func_call): self.raw_args = FileUtils.load_tmp_config_file() AWSValidator.validate_kwargs(self.raw_args) self.aws_resources = self.raw_args.get('functions', {}).get('aws', {}) self.storage_providers = self.raw_args.get('storage_providers', {}) self.scar_info = self.raw_args.get('scar', {}) _add_extra_aws_properties(self.scar_info, self.aws_resources) # Call the user's command getattr(self, func_call)()
def __init__(self): # Check if the config file exists if FileUtils.is_file(self.config_file_path): with open(self.config_file_path) as cfg_file: self.cfg_data = json.load(cfg_file) if not self._is_config_file_updated(): self._update_config_file() else: self._create_scar_config_folder_and_file()
def _manage_udocker_images(self): if hasattr(self.aws.lambdaf, "image") and \ hasattr(self.aws, "s3") and \ hasattr(self.aws.s3, "deployment_bucket"): self.udocker.download_udocker_image() if hasattr(self.aws.lambdaf, "image_file"): if hasattr(self.aws, "config_path"): self.aws.lambdaf.image_file = FileUtils.join_paths(self.aws.config_path, self.aws.lambdaf.image_file) self.udocker.prepare_udocker_image()
def _upload_file_or_folder_to_s3(self, resources_info: Dict) -> None: path_to_upload = self.scar_info.get('path') files = [path_to_upload] if os.path.isdir(path_to_upload): files = FileUtils.get_all_files_in_directory(path_to_upload) s3_service = S3(resources_info) storage_path = resources_info.get('lambda').get('input')[0].get('path') bucket, folder = s3_service.create_bucket_and_folders(storage_path) for file_path in files: s3_service.upload_file(bucket=bucket, folder_name=folder, file_path=file_path)
def validate_http_payload_size(file_path, async_call=False): file_size = FileUtils.get_file_size(file_path) if file_size > MAX_POST_BODY_SIZE: filesize = '{0:.2f}MB'.format(file_size / MB) maxsize = '{0:.2f}MB'.format(MAX_POST_BODY_SIZE / MB) raise InvocationPayloadError(file_size=filesize, max_size=maxsize) if async_call and file_size > MAX_POST_BODY_SIZE_ASYNC: filesize = '{0:.2f}KB'.format(file_size / KB) maxsize = '{0:.2f}KB'.format(MAX_POST_BODY_SIZE_ASYNC / KB) raise InvocationPayloadError(file_size=filesize, max_size=maxsize)