def api_bucket_activate(bucket_id): error = None # Get bucket try: bucket = (database['session'].query(S3Bucket).filter( S3Bucket.users.any(id=current_user.id)).filter( S3Bucket.id == bucket_id).one()) except Exception as exception: log.exception(exception) abort(403) # Setup bucket on aws if not error: success, message = set_up_bucket_on_aws(bucket.access_key_id, bucket.secret_access_key, bucket.name) if not success: error = 'Error setting up bucket integration. {0}'.format(message) # Activate bucket in db if not error: # TODO: # here we have additional select query because # we use procedural helper instead of model helper bucket.activate() activate_bucket(bucket.name) return jsonify({ 'error': error, })
def test_set_up_bucket_on_aws_lambda_connection_error(mock_s3_client): args = ('mock_access_key_id', 'mock_secret_access_key', 'test_bucket') with mock.patch.object(LambdaClient, 'check_connection') as mock_call: mock_call.side_effect = S3Exception( 'Could not connect to the Lambda endpoint', 's3-connection-error') assert set_up_bucket_on_aws( *args) == (False, 'Could not connect to the Lambda endpoint')
def test_set_up_bucket_on_aws_lambda_permission_already_exists_passes(): args = ('mock_access_key_id', 'mock_secret_access_key', 'test_bucket') with mock.patch('goodtablesio.integrations.s3.utils.bucket._check_connection'), \ mock.patch('goodtablesio.integrations.s3.utils.bucket._add_policy'), \ mock.patch.object(LambdaClient, 'add_permission_to_bucket') as a, \ mock.patch('goodtablesio.integrations.s3.utils.bucket._add_notification'): a.side_effect = S3Exception('Permission already exists', 's3-bucket-has-already-perm-on-lambda') assert set_up_bucket_on_aws(*args) == (True, '')
def test_set_up_bucket_on_aws_s3_add_policy_access_denied(): args = ('mock_access_key_id', 'mock_secret_access_key', 'test_bucket') with mock.patch('goodtablesio.integrations.s3.utils.bucket._check_connection'), \ mock.patch.object(S3Client, 'add_policy_for_lambda') as mock_call: mock_call.side_effect = S3Exception('Access denied', 's3-access-denied', 'get-bucket-policy') assert set_up_bucket_on_aws( *args) == (False, 'Access denied (get-bucket-policy)')
def test_set_up_bucket_on_aws_lambda_add_permission_fails(): args = ('mock_access_key_id', 'mock_secret_access_key', 'test_bucket') with mock.patch('goodtablesio.integrations.s3.utils.bucket._check_connection'), \ mock.patch('goodtablesio.integrations.s3.utils.bucket._add_policy'), \ mock.patch.object(LambdaClient, 'add_permission_to_bucket') as a, \ mock.patch.object(S3Client, 'remove_policy_for_lambda') as b: a.side_effect = S3Exception('Access denied', 's3-access-denied', 'add-permission') assert set_up_bucket_on_aws( *args) == (False, 'Access denied (add-permission)') b.assert_called_once_with('test_bucket')
def api_bucket_add(): error = None bucket_data = None # Check current number of buckets for this user if not _check_number_of_buckets(current_user): error = 'Free plan users can only have {} active buckets'.format( settings.MAX_S3_BUCKETS_ON_FREE_PLAN) # Get input fields if not error: payload = request.get_json() access_key_id = payload.get('access-key-id') secret_access_key = payload.get('secret-access-key') bucket_name = payload.get('bucket-name') # Check input fields if not access_key_id or not secret_access_key or not bucket_name: error = 'Missing fields' # Get bucket if not error: bucket = database['session'].query(S3Bucket).filter( S3Bucket.name == bucket_name).one_or_none() if bucket and bucket.active: error = 'Bucket already exists' # Setup bucket on aws if not error: success, message = set_up_bucket_on_aws(access_key_id, secret_access_key, bucket_name) if not success: error = 'Error setting up bucket integration. {0}'.format(message) # Create bucket in db if not error: bucket = create_bucket(bucket_name, access_key_id, secret_access_key, user=current_user) bucket_data = bucket.to_api() return jsonify({ 'bucket': bucket_data, 'error': error, })
def api_bucket_add(): error = None bucket_data = None # Get input fields payload = request.get_json() access_key_id = payload.get('access-key-id') secret_access_key = payload.get('secret-access-key') bucket_name = payload.get('bucket-name') # Check input fields if not access_key_id or not secret_access_key or not bucket_name: error = 'Missing fields' # Get bucket if not error: bucket = database['session'].query(S3Bucket).filter( S3Bucket.name == bucket_name).one_or_none() if bucket and bucket.active: error = 'Bucket already exists' # Setup bucket on aws if not error: success, message = set_up_bucket_on_aws(access_key_id, secret_access_key, bucket_name) if not success: error = 'Error setting up bucket integration. {0}'.format(message) # Create bucket in db if not error: bucket = create_bucket(bucket_name, access_key_id, secret_access_key, user=current_user) bucket_data = bucket.to_api() return jsonify({ 'bucket': bucket_data, 'error': error, })
def test_set_up_bucket_on_aws(mock_s3_client, mock_lambda_client): args = ('mock_access_key_id', 'mock_secret_access_key', 'test_bucket') assert set_up_bucket_on_aws(*args) == (True, '')