def _get_user_script(self) -> str: script = '' if self.resources_info.get('lambda').get('init_script', False): file_content = FileUtils.read_file( self.resources_info.get('lambda').get('init_script')) script = StrUtils.utf8_to_base64_string(file_content) return script
def _set_function_code(self): # Zip all the files and folders needed FunctionPackager(self.aws, self.supervisor_version).create_zip() if hasattr(self.aws, "s3") and hasattr(self.aws.s3, 'deployment_bucket'): self._upload_to_S3() self.aws.lambdaf.code = {"S3Bucket": self.aws.s3.deployment_bucket, "S3Key": self.aws.s3.file_key} else: self.aws.lambdaf.code = {"ZipFile": FileUtils.read_file(self.aws.lambdaf.zip_file_path, mode="rb")}
def _get_supervisor_layer_props(self, layer_zip_path: str) -> Dict: return { 'LayerName': self._SUPERVISOR_LAYER_NAME, 'Description': self.supervisor_version, 'Content': { 'ZipFile': FileUtils.read_file(layer_zip_path, mode="rb") }, 'LicenseInfo': 'Apache 2.0' }
def _get_creation_args(resources_info: Dict, storage_providers: Dict) -> Dict: creation_args = {} # Clean None values for k, v in resources_info.items(): if v: creation_args[k] = v # Get the content of 'script' creation_args['script'] = FileUtils.read_file(creation_args['script']) # Add storage_providers creation_args['storage_providers'] = storage_providers return creation_args
def upload_file(self, folder_name=None, file_path=None, file_key=None): kwargs = {'Bucket' : self.aws.s3.input_bucket} kwargs['Key'] = self.get_file_key(folder_name, file_path, file_key) if file_path: try: kwargs['Body'] = FileUtils.read_file(file_path, 'rb') except FileNotFoundError: raise excp.UploadFileNotFoundError(file_path=file_path) if folder_name and not file_path: logger.info("Folder '{0}' created in bucket '{1}'".format(kwargs['Key'], kwargs['Bucket'])) else: logger.info("Uploading file '{0}' to bucket '{1}' with key '{2}'".format(file_path, kwargs['Bucket'], kwargs['Key'])) self.client.upload_file(**kwargs)
def _get_supervisor_layer_props(self, layer_zip_path: str) -> Dict: return { 'LayerName': self.layer_name, 'Description': self.supervisor_version, 'Content': { 'ZipFile': FileUtils.read_file(layer_zip_path, mode="rb") }, 'CompatibleRuntimes': ['python3.8', 'python3.7'], 'LicenseInfo': self.resources_info.get('lambda').get('supervisor').get( 'license_info') }
def _get_invocation_payload(self): # Default payload payload = self.function.get('payload', {}) if not payload: # Check for defined run script if self.function.get("run_script", False): script_path = self.function.get("run_script") # We first code to base64 in bytes and then decode those bytes to allow the json lib to parse the data # https://stackoverflow.com/questions/37225035/serialize-in-json-a-base64-encoded-data#37239382 payload = {"script": StrUtils.bytes_to_base64str(FileUtils.read_file(script_path, 'rb'))} # Check for defined commands # This overrides any other function payload if self.function.get("c_args", False): payload = {"cmd_args": json.dumps(self.function.get("c_args"))} return json.dumps(payload)
def _get_function_code(self, zip_payload_path: str, supervisor_zip_path: str) -> Dict: '''Zip all the files and folders needed.''' code = {} FunctionPackager(self.resources_info, supervisor_zip_path).create_zip(zip_payload_path) if self.function.get('deployment').get('bucket', False): file_key = f"lambda/{self.function.get('name')}.zip" s3_client = S3(self.resources_info) s3_client.create_bucket(self.function.get('deployment').get('bucket')) s3_client.upload_file(bucket=self.function.get('deployment').get('bucket'), file_path=zip_payload_path, file_key=file_key) code = {"S3Bucket": self.function.get('deployment').get('bucket'), "S3Key": file_key} else: code = {"ZipFile": FileUtils.read_file(zip_payload_path, mode="rb")} return code
def _get_invocation_payload(self): # Default payload payload = self.aws.lambdaf.payload if hasattr(self.aws.lambdaf, 'payload') else {} if not payload: # Check for defined run script if hasattr(self.aws.lambdaf, "run_script"): script_path = self.aws.lambdaf.run_script if hasattr(self.aws, "config_path"): script_path = FileUtils.join_paths(self.aws.config_path, script_path) # We first code to base64 in bytes and then decode those bytes to allow the json lib to parse the data # https://stackoverflow.com/questions/37225035/serialize-in-json-a-base64-encoded-data#37239382 payload = { "script" : StrUtils.bytes_to_base64str(FileUtils.read_file(script_path, 'rb')) } # Check for defined commands # This overrides any other function payload if hasattr(self.aws.lambdaf, "c_args"): payload = {"cmd_args" : json.dumps(self.aws.lambdaf.c_args)} return json.dumps(payload)
def upload_file(self, bucket: str, folder_name: str = None, file_path: str = None, file_key: str = None) -> None: kwargs = {'Bucket': bucket} kwargs['Key'] = self.get_file_key(folder_name, file_path, file_key) if file_path: try: kwargs['Body'] = FileUtils.read_file(file_path, 'rb') except FileNotFoundError: raise excp.UploadFileNotFoundError(file_path=file_path) if folder_name and not file_path: kwargs['ContentType'] = 'application/x-directory' logger.info( f"Folder '{kwargs['Key']}' created in bucket '{kwargs['Bucket']}'." ) else: logger.info( f"Uploading file '{file_path}' to bucket '{kwargs['Bucket']}' with key '{kwargs['Key']}'." ) self.client.upload_file(**kwargs)
def _get_user_script(self): script = '' if hasattr(self.aws.lambdaf, "init_script"): file_content = FileUtils.read_file(self.aws.lambdaf.init_script) script = StrUtils.utf8_to_base64_string(file_content) return script