def parse_http_response(response: Response, resources_info: Dict, scar_info: Dict) -> None: '''Process the response generated by an API Gateway invocation.''' output_type = scar_info.get('cli_output') function_name = resources_info.get('lambda').get('name') asynch = resources_info.get('lambda').get('asynchronous') text_message = "" if response.ok: if output_type == OutputType.BINARY.value: output_file = scar_info.get('output_file', '') with open(output_file, "wb") as out: out.write(StrUtils.decode_base64(response.text)) text_message = f"Output saved in file '{output_file}'" else: text_message = f"Request Id: {response.headers['amz-lambda-request-id']}" if asynch: text_message += f"\nFunction '{function_name}' launched correctly" else: text_message += f"\nLog Group Name: {response.headers['amz-log-group-name']}\n" text_message += f"Log Stream Name: {response.headers['amz-log-stream-name']}\n" text_message += StrUtils.base64_to_utf8_string(response.text) else: if asynch and response.status_code == 502: text_message = f"Function '{function_name}' launched successfully." else: error = json.loads(response.text) if 'message' in error: text_message = f"Error ({response.reason}): {error['message']}" else: text_message = f"Error ({response.reason}): {error['exception']}" logger.info(text_message)
def _print_generic_response(response, output_type, aws_output, text_message=None, json_output=None, verbose_output=None, output_file=None): if output_type == OutputType.BINARY.value: with open(output_file, "wb") as out: out.write(StrUtils.decode_base64(response['Payload']['body'])) elif output_type == OutputType.PLAIN_TEXT.value: output = text_message logger.info(output) else: if output_type == OutputType.JSON.value: output = json_output if json_output else { aws_output: { 'RequestId': response['ResponseMetadata']['RequestId'], 'HTTPStatusCode': response['ResponseMetadata']['HTTPStatusCode'] } } elif output_type == OutputType.VERBOSE.value: output = verbose_output if verbose_output else { aws_output: response } logger.info_json(output)
def _parse_requestresponse_invocation_response(**kwargs): response = kwargs['Response'] if 'errorMessage' in response['Payload']: json_message = response['Payload'] text_message = response['Payload']['errorMessage'] else: request_id = response['ResponseMetadata']['RequestId'] log_group_name = response['Payload']['headers']['amz-log-group-name'] log_stream_name = response['Payload']['headers']['amz-log-stream-name'] if "exception" in response['Payload']['body']: body = ("ERROR launching udocker container: \n " f"{json.loads(response['Payload']['body'])['exception']}") elif response['Payload']['isBase64Encoded']: body = StrUtils.base64_to_utf8_string(response['Payload']['body']) else: body = response['Payload']['body'] text_message = (f"Request Id: {request_id}\n" f"Log Group Name: {log_group_name}\n" f"Log Stream Name: {log_stream_name}\n") text_message += body json_message = {'LambdaOutput' : {'StatusCode' : response['StatusCode'], 'Payload' : body, 'LogGroupName' : log_group_name, 'LogStreamName' : log_stream_name, 'RequestId' : request_id}} output_file = None if 'OutputFile' in kwargs and kwargs['OutputFile']: output_file = kwargs['OutputFile'] _print_generic_response(response, kwargs['OutputType'], 'LambdaOutput', text_message, json_output=json_message, output_file=output_file)
def _get_user_script(self) -> str: script = '' if self.resources_info.get('lambda').get('init_script', False): file_content = FileUtils.read_file( self.resources_info.get('lambda').get('init_script')) script = StrUtils.utf8_to_base64_string(file_content) return script
def _create_supervisor_user_data(self) -> str: """Returns the user_data with the script required for downloading the specified version of faas-supervisor in mime-multipart format and encoded in base64 Generic mime-multipart file: Content-Type: multipart/mixed; boundary="===============3595946014116037730==" MIME-Version: 1.0 --===============3595946014116037730== Content-Type: text/x-shellscript; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit #!/bin/bash mkdir -p /opt/faas-supervisor/bin curl https://github.com/grycap/faas-supervisor/releases/download/1.0.11/supervisor -L -o /opt/faas-supervisor/bin/supervisor chmod +x /opt/faas-supervisor/bin/supervisor --===============3595946014116037730==--""" multipart = MIMEMultipart() url = GitHubUtils.get_asset_url(self._SUPERVISOR_GITHUB_USER, self._SUPERVISOR_GITHUB_REPO, self._SUPERVISOR_GITHUB_ASSET_NAME, self.supervisor_version) script = self._LAUNCH_TEMPLATE_SCRIPT.substitute( supervisor_binary_url=url) content = MIMEText(script, 'x-shellscript') multipart.attach(content) return StrUtils.utf8_to_base64_string(str(multipart))
def parse_http_response(response, function_name, asynch, output_type, output_file): if response.ok: if output_type == OutputType.BINARY: with open(output_file, "wb") as out: out.write(StrUtils.decode_base64(response.text)) text_message = f"Output saved in file '{output_file}'" else: text_message = f"Request Id: {response.headers['amz-lambda-request-id']}" if asynch: text_message += f"\nFunction '{function_name}' launched correctly" else: text_message += f"\nLog Group Name: {response.headers['amz-log-group-name']}\n" text_message += f"Log Stream Name: {response.headers['amz-log-stream-name']}\n" text_message += json.loads(response.text)["udocker_output"] else: if asynch and response.status_code == 502: text_message = f"Function '{function_name}' launched sucessfully." else: error = json.loads(response.text) if 'message' in error: text_message = f"Error ({response.reason}): {error['message']}" else: text_message = f"Error ({response.reason}): {error['exception']}" logger.info(text_message)
def validate_function_name(function_name): if not StrUtils.find_expression(function_name, VALID_LAMBDA_NAME_REGEX): error_msg = ("Find name restrictions in: https://docs.aws.amazon.com/lambda/latest/" "dg/API_CreateFunction.html#SSS-CreateFunction-request-FunctionName") raise ValidatorError(parameter='function_name', parameter_value=function_name, error_msg=error_msg)
def __init__(self, resources_info: Dict) -> None: super().__init__(resources_info.get('lambda', {})) self.resources_info = resources_info self.function = resources_info.get('lambda', {}) self.supervisor_version = resources_info.get('lambda').get('supervisor').get('version') if (self.function.get('runtime') == "image" and StrUtils.compare_versions(self.supervisor_version, "1.5.0b3") < 0): # In case of using image runtime # it must be 1.5.0-beta3 version or higher raise Exception("Supervisor version must be 1.5.0 or higher for image runtime.")
def _parse_error_invocation_response(response, function_name): if response: if "Task timed out" in response['Payload']: # Find the timeout time message = StrUtils.find_expression(str(response['Payload']), '(Task timed out .* seconds)') # Modify the error message to ease the error readability error_msg = message.replace("Task", "Function '%s'" % function_name) error_log = f"Error in function response: {error_msg}" else: error_msg = "Error in function response." error_log = f"Error in function response: {response['Payload']}" logger.error(error_msg, error_log)
def _get_invocation_payload(self): # Default payload payload = self.function.get('payload', {}) if not payload: # Check for defined run script if self.function.get("run_script", False): script_path = self.function.get("run_script") # We first code to base64 in bytes and then decode those bytes to allow the json lib to parse the data # https://stackoverflow.com/questions/37225035/serialize-in-json-a-base64-encoded-data#37239382 payload = {"script": StrUtils.bytes_to_base64str(FileUtils.read_file(script_path, 'rb'))} # Check for defined commands # This overrides any other function payload if self.function.get("c_args", False): payload = {"cmd_args": json.dumps(self.function.get("c_args"))} return json.dumps(payload)
def _get_invocation_payload(self): # Default payload payload = self.aws.lambdaf.payload if hasattr(self.aws.lambdaf, 'payload') else {} if not payload: # Check for defined run script if hasattr(self.aws.lambdaf, "run_script"): script_path = self.aws.lambdaf.run_script if hasattr(self.aws, "config_path"): script_path = FileUtils.join_paths(self.aws.config_path, script_path) # We first code to base64 in bytes and then decode those bytes to allow the json lib to parse the data # https://stackoverflow.com/questions/37225035/serialize-in-json-a-base64-encoded-data#37239382 payload = { "script" : StrUtils.bytes_to_base64str(FileUtils.read_file(script_path, 'rb')) } # Check for defined commands # This overrides any other function payload if hasattr(self.aws.lambdaf, "c_args"): payload = {"cmd_args" : json.dumps(self.aws.lambdaf.c_args)} return json.dumps(payload)
def check_faas_supervisor_layer(self): """Checks if the supervisor layer exists, if not, creates the layer. If the layer exists and it's not updated, updates the layer.""" # Get the layer information layer_info = self.layer.get_latest_layer_info( self._SUPERVISOR_LAYER_NAME) # Compare supervisor versions if layer_info and 'Description' in layer_info: # If the supervisor layer version is lower than the passed version, # we must update the layer if StrUtils.compare_versions(layer_info.get('Description', ''), self.supervisor_version) < 0: self._update_supervisor_layer() else: logger.info("Using existent 'faas-supervisor' layer") else: # Layer not found, we have to create it self._create_supervisor_layer()
def get_fdl_config(self, arn: str = None) -> Dict: function = arn if arn else self.function.get('name') function_info = self.client.get_function(function) # Get the FDL from the env variable fdl = function_info.get('Configuration', {}).get('Environment', {}).get('Variables', {}).get('FDL') if fdl: return yaml.safe_load(StrUtils.decode_base64(fdl)) # In the future this part can be removed if 'Location' in function_info.get('Code'): dep_pack_url = function_info.get('Code').get('Location') else: return {} dep_pack = get_file(dep_pack_url) # Extract function_config.yaml try: with ZipFile(io.BytesIO(dep_pack)) as thezip: with thezip.open('function_config.yaml') as cfg_yaml: return yaml.safe_load(cfg_yaml) except (KeyError, BadZipfile): return {}
def get_user_info(self) -> Dict: """Retrieves information about the specified IAM user, including the user's creation date, path, unique ID, and ARN.""" try: return self.client.get_user() except ClientError as cerr: if cerr.response['Error']['Code'] == 'AccessDenied': # If the user doesn't have access rights to IAMClient # we can find the user name in the error response user_name = StrUtils.find_expression(str(cerr), self._USER_NAME_REGEX) return { 'UserName': user_name, 'User': { 'UserName': user_name, 'UserId': '' } } raise cerr except Exception as ex: raise GetUserInfoError(error_msg=ex)
def _add_account_id(resources_info: Dict): resources_info['iam']['account_id'] = StrUtils.find_expression( resources_info['iam']['role'], _ACCOUNT_ID_REGEX)
def add_invocation_permission(self, **kwargs: Dict) -> Dict: """Adds a permission to the resource policy associated with the specified AWS Lambda function.""" kwargs['StatementId'] = StrUtils.get_random_uuid4_str() kwargs['Action'] = "lambda:InvokeFunction" return self.client.add_permission(**kwargs)
def _is_config_file_updated(self): if 'config_version' not in self.cfg_data['scar']: return False return StrUtils.compare_versions( self.cfg_data.get('scar', {}).get("config_version", ""), _DEFAULT_CFG['scar']["config_version"]) >= 0
def test_create_function(self, load_tmp_config_file, prepare_udocker_image, download_supervisor, boto_session): session, lam, _ = self._init_mocks([ 'list_layers', 'publish_layer_version', 'get_bucket_location', 'put_object', 'create_function', 'list_layer_versions' ]) boto_session.return_value = session load_tmp_config_file.return_value = {} tests_path = os.path.dirname(os.path.abspath(__file__)) download_supervisor.return_value = os.path.join( tests_path, "../../files/supervisor.zip") lam.client.client.list_layers.return_value = { 'Layers': [{ 'LayerName': 'layername' }] } lam.client.client.publish_layer_version.return_value = { 'LayerVersionArn': '1' } lam.client.client.create_function.return_value = { 'FunctionArn': 'farn' } lam.client.client.list_layer_versions.return_value = { 'LayerVersions': [] } lam.create_function() fdl = { "storage_providers": {}, "name": "fname", "runtime": "python3.7", "timeout": 300, "memory": 512, "layers": ["1"], "log_type": "Tail", "tags": { "createdby": "scar" }, "handler": "some.handler", "description": "desc", "deployment": { "bucket": "someb", "max_s3_payload_size": 262144000 }, "environment": { "Variables": { "IMAGE_ID": "some/image:tag" } }, "container": { "image": "some/image:tag", "image_file": "some.tgz", "environment": { "Variables": {} } }, "supervisor": { "version": "1.4.2", "layer_name": "layername" } } res = { 'FunctionName': 'fname', 'Role': 'iamrole', 'Environment': { 'Variables': { 'IMAGE_ID': 'some/image:tag', 'FDL': StrUtils.dict_to_base64_string(fdl) } }, 'Description': 'desc', 'Timeout': 300, 'MemorySize': 512, 'Tags': { 'createdby': 'scar' }, 'Code': { 'S3Bucket': 'someb', 'S3Key': 'lambda/fname.zip' }, 'Runtime': 'python3.7', 'Handler': 'some.handler', 'Architectures': ['x86_64'], 'Layers': ['1'] } self.assertEqual( lam.client.client.create_function.call_args_list[0][1], res) self.assertEqual( lam.client.client.publish_layer_version.call_args_list[0][1] ['LayerName'], "layername") self.assertEqual( lam.client.client.publish_layer_version.call_args_list[0][1] ['Description'], "1.4.2") self.assertEqual( len(lam.client.client.publish_layer_version.call_args_list[0][1] ['Content']['ZipFile']), 99662)
def test_create_function_image(self, from_env, unzip_folder, load_tmp_config_file, download_supervisor_asset, boto_session): session, lam, client = self._init_mocks([ 'create_function', 'create_repository', 'describe_registry', 'get_authorization_token' ]) boto_session.return_value = session load_tmp_config_file.return_value = {} tests_path = os.path.dirname(os.path.abspath(__file__)) download_supervisor_asset.return_value = os.path.join( tests_path, "../../files/supervisor.zip") docker = MagicMock(['login', 'images']) docker.images = MagicMock(['build', 'push']) from_env.return_value = docker client.create_repository.return_value = { "repository": { "repositoryUri": "repouri" } } client.describe_registry.return_value = {'registryId': 'regid'} client.get_authorization_token.return_value = { 'authorizationData': [{ 'authorizationToken': 'QVdTOnRva2Vu' }] } lam.resources_info['lambda']['runtime'] = 'image' lam.resources_info['lambda']['supervisor'][ 'version'] = lam.supervisor_version = '1.5.0' lam.resources_info['lambda']['vpc'] = { 'SubnetIds': ['subnet'], 'SecurityGroupIds': ['sg'] } lam.resources_info['lambda']['file_system'] = [{ 'Arn': 'efsaparn', '': '/mnt' }] lam.create_function() fdl = { "storage_providers": {}, "name": "fname", "runtime": "image", "timeout": 300, "memory": 512, "layers": [], "log_type": "Tail", "tags": { "createdby": "scar" }, "handler": "some.handler", "description": "desc", "deployment": { "bucket": "someb", "max_s3_payload_size": 262144000 }, "environment": { "Variables": { "IMAGE_ID": "repouri:latest" } }, "container": { "image": "repouri:latest", "image_file": "some.tgz", "environment": { "Variables": {} } }, "supervisor": { "version": "1.5.0", "layer_name": "layername" }, "vpc": { "SubnetIds": ["subnet"], "SecurityGroupIds": ["sg"] }, "file_system": [{ 'Arn': 'efsaparn', '': '/mnt' }], "ecr": { "delete_image": True } } res = { 'FunctionName': 'fname', 'Role': 'iamrole', 'Environment': { 'Variables': { 'IMAGE_ID': 'repouri:latest', 'FDL': StrUtils.dict_to_base64_string(fdl) } }, 'Description': 'desc', 'Timeout': 300, 'MemorySize': 512, 'PackageType': 'Image', 'Tags': { 'createdby': 'scar' }, 'Architectures': ['x86_64'], 'VpcConfig': { 'SubnetIds': ['subnet'], 'SecurityGroupIds': ['sg'] }, 'FileSystemConfigs': [{ 'Arn': 'efsaparn', '': '/mnt' }], 'Code': { 'ImageUri': 'repouri:latest' } } self.assertEqual( lam.client.client.create_function.call_args_list[0][1], res) self.assertEqual(docker.images.push.call_args_list[0][0][0], "repouri") self.assertEqual(docker.images.build.call_args_list[0][1]['tag'], "repouri")
def _set_fdl(self): fdl = StrUtils.dict_to_base64_string(create_function_config(self.resources_info)) self.function['environment']['Variables']['FDL'] = fdl
def _add_account_id(self): self.aws_properties.account_id = StrUtils.find_expression(self.aws_properties.iam.role, _ACCOUNT_ID_REGEX)
def _parse_base64_response_values(value): value['LogResult'] = StrUtils.base64_to_utf8_string(value['LogResult']) value['ResponseMetadata']['HTTPHeaders']['x-amz-log-result'] = \ StrUtils.base64_to_utf8_string(value['ResponseMetadata']['HTTPHeaders']['x-amz-log-result'])
def _get_config_file(self) -> str: cfg_file = '' config = create_function_config(self.resources_info) yaml_str = yaml.safe_dump(config) cfg_file = StrUtils.utf8_to_base64_string(yaml_str) return cfg_file
def _get_user_script(self): script = '' if hasattr(self.aws.lambdaf, "init_script"): file_content = FileUtils.read_file(self.aws.lambdaf.init_script) script = StrUtils.utf8_to_base64_string(file_content) return script