def _create_s3_buckets(self, resources_info: Dict) -> None: if resources_info.get('lambda').get('input', False): s3_service = S3(resources_info) for bucket in resources_info.get('lambda').get('input'): if bucket.get('storage_provider') == 's3': bucket_name, folders = s3_service.create_bucket_and_folders( bucket.get('path')) lambda_client = Lambda(resources_info) lambda_client.link_function_and_bucket(bucket_name) # Check if function is already available logger.info("Wait function to be 'Active'") if not lambda_client.wait_function_active( resources_info.get('lambda').get('arn')): logger.error("Timeout waiting function.") else: logger.info("Function 'Active'") s3_service.set_input_bucket_notification( bucket_name, folders) if not folders: logger.info( f'Input bucket "{bucket_name}" successfully created' ) if resources_info.get('lambda').get('output', False): s3_service = S3(resources_info) for bucket in resources_info.get('lambda').get('output'): if bucket.get('storage_provider') == 's3': bucket_name, folders = s3_service.create_bucket_and_folders( bucket.get('path')) if not folders: logger.info( f'Output bucket "{bucket_name}" successfully created' )
def _create_s3_buckets(self, resources_info: Dict) -> None: if resources_info.get('lambda').get('input', False): s3_service = S3(resources_info) for bucket in resources_info.get('lambda').get('input'): if bucket.get('storage_provider') == 's3': bucket_name, folders = s3_service.create_bucket_and_folders( bucket.get('path')) Lambda(resources_info).link_function_and_bucket( bucket_name) s3_service.set_input_bucket_notification( bucket_name, folders) if not folders: logger.info( f'Input bucket "{bucket_name}" successfully created' ) if resources_info.get('lambda').get('output', False): s3_service = S3(resources_info) for bucket in resources_info.get('lambda').get('output'): if bucket.get('storage_provider') == 's3': bucket_name, folders = s3_service.create_bucket_and_folders( bucket.get('path')) if not folders: logger.info( f'Output bucket "{bucket_name}" successfully created' )
def test_set_input_bucket_notification(self, boto_session): boto_session.return_value = self._init_mocks([ 'put_bucket_notification_configuration', 'get_bucket_notification_configuration' ]) s3 = S3({'lambda': {'arn': 'arn'}}) s3.client.client.get_bucket_notification_configuration.return_value = { 'LambdaFunctionConfigurations': [] } s3.client.client.put_bucket_notification_configuration.return_value = {} s3.set_input_bucket_notification('bucket', 'folders') expected_res = call(Bucket='bucket', NotificationConfiguration={ 'LambdaFunctionConfigurations': [{ 'LambdaFunctionArn': 'arn', 'Events': ['s3:ObjectCreated:*'], 'Filter': { 'Key': { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'folders/' }] } } }] }) self.assertEqual( s3.client.client.put_bucket_notification_configuration. call_args_list[0], expected_res)
def _delete_bucket_notifications(self, resources_info: Dict) -> None: lambda_client = Lambda(resources_info) function_name = resources_info.get('lambda').get('name') resources_info['lambda']['arn'] = lambda_client.get_function_configuration(function_name).get('FunctionArn') resources_info['lambda']['input'] = lambda_client.get_fdl_config(function_name).get('input', False) if resources_info.get('lambda').get('input'): for input_storage in resources_info.get('lambda').get('input'): if input_storage.get('storage_provider') == 's3': bucket_name = input_storage.get('path').split("/", 1)[0] S3(resources_info).delete_bucket_notification(bucket_name)
def _upload_file_or_folder_to_s3(self, resources_info: Dict) -> None: path_to_upload = self.scar_info.get('path') files = [path_to_upload] if os.path.isdir(path_to_upload): files = FileUtils.get_all_files_in_directory(path_to_upload) s3_service = S3(resources_info) storage_path = resources_info.get('lambda').get('input')[0].get('path') bucket, folder = s3_service.create_bucket_and_folders(storage_path) for file_path in files: s3_service.upload_file(bucket=bucket, folder_name=folder, file_path=file_path)
def ls(self): # If a bucket is defined, then we list their files resources_info = self.aws_resources[0] if resources_info.get('lambda').get('input', False): file_list = S3(resources_info).get_bucket_file_list() for file_info in file_list: logger.info(file_info) else: # Return the resources of the region in the scar's configuration file aws_resources = _get_all_functions(self.aws_resources[0]) response_parser.parse_ls_response(aws_resources, self.scar_info.get('cli_output'))
def test_get_bucket_file_list(self, boto_session): boto_session.return_value = self._init_mocks( ['get_bucket_location', 'list_objects_v2']) s3 = S3({}) s3.client.client.list_objects_v2.return_value = { 'IsTruncated': False, 'Contents': [{ 'Key': 'key1' }] } self.assertEqual(s3.get_bucket_file_list({'path': '/'}), ['key1'])
def test_create_bucket(self, boto_session): boto_session.return_value = self._init_mocks( ['get_bucket_location', 'create_bucket']) s3 = S3({}) s3.client.client.get_bucket_location.side_effect = ClientError( {'Error': { 'Code': 'NoSuchBucket' }}, 'op') s3.client.client.create_bucket.return_value = {} s3.create_bucket('bname') self.assertEqual(s3.client.client.create_bucket.call_args_list[0], call(ACL='private', Bucket='bname'))
def test_download_file(self, boto_session): boto_session.return_value = self._init_mocks(['download_fileobj']) s3 = S3({}) s3.client.client.download_fileobj.return_value = {} s3.download_file('bucket', 'key', 'path') self.assertEqual( s3.client.client.download_fileobj.call_args_list[0][1]['Bucket'], 'bucket') self.assertEqual( s3.client.client.download_fileobj.call_args_list[0][1]['Key'], 'key') self.assertIn('Fileobj', s3.client.client.download_fileobj.call_args_list[0][1]) os.unlink('path')
def _process_s3_input_bucket_calls(self, resources_info: Dict, storage: Dict) -> None: s3_service = S3(resources_info) lambda_service = Lambda(resources_info) s3_file_list = s3_service.get_bucket_file_list(storage) bucket_name, _ = get_bucket_and_folders(storage.get('path')) logger.info(f"Files found: '{s3_file_list}'") # First do a request response invocation to prepare the lambda environment if s3_file_list: s3_event = s3_service.get_s3_event(bucket_name, s3_file_list.pop(0)) lambda_service.launch_request_response_event(s3_event) # If the list has more elements, invoke functions asynchronously if s3_file_list: s3_event_list = s3_service.get_s3_event_list(bucket_name, s3_file_list) lambda_service.process_asynchronous_lambda_invocations(s3_event_list)
def test_upload_file(self, boto_session): boto_session.return_value = self._init_mocks(['put_object']) s3 = S3({}) s3.client.client.put_object.return_value = {} tmpfile = tempfile.NamedTemporaryFile(delete=False) tmpfile.write(b'Hello world!') tmpfile.close() s3.upload_file('bname', file_path=tmpfile.name) os.unlink(tmpfile.name) self.assertEqual( s3.client.client.put_object.call_args_list[0], call(Bucket='bname', Key=os.path.basename(tmpfile.name), Body=b'Hello world!'))
def _download_file_or_folder_from_s3(self, resources_info: Dict) -> None: s3_service = S3(resources_info) s3_file_list = s3_service.get_bucket_file_list() for s3_file in s3_file_list: # Avoid download s3 'folders' if not s3_file.endswith('/'): file_path = self._get_download_file_path(file_key=s3_file) # make sure the path folders are created dir_path = os.path.dirname(file_path) if dir_path and not os.path.isdir(dir_path): os.makedirs(dir_path, exist_ok=True) bucket, _ = get_bucket_and_folders(resources_info.get('lambda').get('input')[0].get('path')) s3_service.download_file(bucket, s3_file, file_path)
def test_create_bucket_and_folders(self, boto_session): boto_session.return_value = self._init_mocks([ 'get_bucket_location', 'create_bucket', 'put_object', 'get_object' ]) s3 = S3({}) s3.client.client.get_bucket_location.side_effect = ClientError( {'Error': { 'Code': 'NoSuchBucket' }}, 'op') s3.client.client.create_bucket.return_value = {} s3.client.client.put_object.return_value = {} s3.client.client.get_object.return_value = {} self.assertEqual(s3.create_bucket_and_folders('storage/path'), ('storage', 'path'))
def _get_function_code(self, zip_payload_path: str, supervisor_zip_path: str) -> Dict: '''Zip all the files and folders needed.''' code = {} FunctionPackager(self.resources_info, supervisor_zip_path).create_zip(zip_payload_path) if self.function.get('deployment').get('bucket', False): file_key = f"lambda/{self.function.get('name')}.zip" s3_client = S3(self.resources_info) s3_client.create_bucket(self.function.get('deployment').get('bucket')) s3_client.upload_file(bucket=self.function.get('deployment').get('bucket'), file_path=zip_payload_path, file_key=file_key) code = {"S3Bucket": self.function.get('deployment').get('bucket'), "S3Key": file_key} else: code = {"ZipFile": FileUtils.read_file(zip_payload_path, mode="rb")} return code
def test_delete_bucket_notification(self, boto_session): boto_session.return_value = self._init_mocks([ 'put_bucket_notification_configuration', 'get_bucket_notification_configuration' ]) s3 = S3({'lambda': {'arn': 'arn'}}) s3.client.client.get_bucket_notification_configuration.return_value = { 'LambdaFunctionConfigurations': [] } s3.client.client.put_bucket_notification_configuration.return_value = {} s3.delete_bucket_notification('bucket') expected_res = call( Bucket='bucket', NotificationConfiguration={'LambdaFunctionConfigurations': []}) self.assertEqual( s3.client.client.put_bucket_notification_configuration. call_args_list[0], expected_res)
def s3(self): s3 = S3(self.aws) return s3
def test_init(self): s3 = S3({}) self.assertEqual(type(s3.client.client).__name__, "S3")
def aws_s3(self): aws_s3 = S3(self.aws_properties) return aws_s3