def test_save_empty_file(s3_storage_adapter_instance : S3StorageAdapter): """ When file is empty, only the flag for completion should be created. We don't want empty ledger file :param s3_storage_adapter_instance: :return: """ # Test Setup s3_storage_adapter_instance.operations_to_save = [] ledger_name = 'test_commit' s3_storage_adapter_instance.file_name = ledger_name s3_storage_adapter_instance._rollback() # Test s3_storage_adapter_instance.save([], [], ledger_name) assert ledger_name == s3_storage_adapter_instance.get_last_file_sequence() list_of_files = __get_files_in_key(s3_storage_adapter_instance, '{}{}'.format(s3_storage_adapter_instance.ledgers_prefix, ledger_name)) assert len(list_of_files) == 0 # Making sure COMPLETE_INDICATION exists completion_file = __get_files_in_key(s3_storage_adapter_instance, '{}{}'.format(s3_storage_adapter_instance.completion_indication_path, ledger_name)) assert len(completion_file) == 1 # Test Cleanup s3_storage_adapter_instance.operations_to_save = [] s3_storage_adapter_instance._rollback()
def test_convert_creations(s3_storage_adapter_instance: S3StorageAdapter): creation = __generate_row_based_on_schema(s3_storage_adapter_instance.creations_output_schema()) creation['timestamp'] = 1535594286 del creation['type'] returned_dict = s3_storage_adapter_instance.convert_creation(*creation.values()) creation.update({'timestamp': datetime.strptime('2018-08-30 01:58:06', '%Y-%m-%d %H:%M:%S'), 'type': 'creation'}) assert returned_dict == creation
def test_convert_payment(s3_storage_adapter_instance : S3StorageAdapter): payment = __generate_row_based_on_schema(s3_storage_adapter_instance.payments_output_schema()) payment['timestamp'] = 1535594286 del payment['type'] returned_dict = s3_storage_adapter_instance.convert_payment(*payment.values()) payment.update({'timestamp': datetime.strptime('2018-08-30 01:58:06', '%Y-%m-%d %H:%M:%S'), 'type': 'payment'}) assert returned_dict == payment
def main(): """Main entry point.""" logging.basicConfig(level='INFO', format='%(asctime)s | %(levelname)s | %(message)s') if not S3_STORAGE_BUCKET: logging.info( 'S3 is not being used as a storage for this run, skipping this script' ) sys.exit(0) # Check if the database already exists try: storage_adapter = S3StorageAdapter(S3_STORAGE_BUCKET, S3_STORAGE_KEY_PREFIX, S3_STORAGE_AWS_ACCESS_KEY, S3_STORAGE_AWS_SECRET_KEY, S3_STORAGE_REGION) logging.info('Using existing S3 storage instead of creating a new one') sys.exit(0) except HistoryCollectorStorageError: if verify_file_sequence() != 0: logging.error('First file selected is invalid') sys.exit(1) setup_s3_storage()
def test_get_last_file_sequence_file_not_found(test_bucket, aws_access_key_id, aws_secret_access_key, test_region): try: S3StorageAdapter(test_bucket, 'key_that_should_not_exist', aws_access_key_id, aws_secret_access_key, test_region) except Exception as e: assert 'NoSuchKey' in str(e) else: assert False
def test_constructor_with_wrong_credentials(test_bucket, test_prefix): from botocore.errorfactory import ClientError try: S3StorageAdapter(test_bucket, test_prefix, 'foo', 'goo') except ClientError: assert True else: assert False
def test_rollback(s3_storage_adapter_instance: S3StorageAdapter): # Test Setup s3_storage_adapter_instance.operations_to_save = [] ledger_name = 'test_commit' s3_storage_adapter_instance.file_name = ledger_name ledger_key_location_on_s3 = '{}{}/{}'.format(s3_storage_adapter_instance.ledgers_prefix, ledger_name, 'test.csv') # Putting another manual file in the designated directory, to be sure that there is a file that should be deleted __put_file_on_s3(s3_storage_adapter_instance, ledger_key_location_on_s3, 'test') # Test assert len(__get_files_in_key(s3_storage_adapter_instance, ledger_key_location_on_s3)) == 1 with patch('botocore.client.BaseClient._make_api_call', new=__mock_make_api_call_fail_when_posting_complete): with pytest.raises(Exception): s3_storage_adapter_instance.save([{'type': 'payment'}], [{'type': 'creation'}], ledger_name) # At this point a roll back should be invoked, so we need to make sure there are no files in the folders assert len(__get_files_in_key(s3_storage_adapter_instance, ledger_key_location_on_s3)) == 0 # Test Cleanup s3_storage_adapter_instance.operations_to_save = []
def test_save(s3_storage_adapter_instance : S3StorageAdapter): # Test Setup s3_storage_adapter_instance.operations_to_save = [] ledger_name = 'test_commit' s3_storage_adapter_instance.file_name = ledger_name s3_storage_adapter_instance._rollback() # Test s3_storage_adapter_instance.save([{'type': 'payment'}], [{'type': 'creation'}], ledger_name) assert ledger_name == s3_storage_adapter_instance.get_last_file_sequence() list_of_files = __get_files_in_key(s3_storage_adapter_instance, '{}{}'.format(s3_storage_adapter_instance.ledgers_prefix, ledger_name)) assert len(list_of_files) == 1 # Making sure COMPLETE_INDICATION exists completion_file = __get_files_in_key(s3_storage_adapter_instance, '{}{}'.format(s3_storage_adapter_instance.completion_indication_path, ledger_name)) assert len(completion_file) == 1 # Test Cleanup s3_storage_adapter_instance.operations_to_save = [] s3_storage_adapter_instance._rollback()
def test_save_creations_empty(s3_storage_adapter_instance: S3StorageAdapter): s3_storage_adapter_instance.operations_to_save = [] test_list = [] s3_storage_adapter_instance._save_creations(test_list) assert s3_storage_adapter_instance.operations_to_save == test_list pass
def test_save_payments(s3_storage_adapter_instance: S3StorageAdapter): s3_storage_adapter_instance.operations_to_save = [] test_list = ['test'] s3_storage_adapter_instance._save_payments(test_list) assert s3_storage_adapter_instance.operations_to_save == test_list
def test_get_last_file_sequence(s3_storage_adapter_instance: S3StorageAdapter): last_file_sequence_name = 'test' __put_file_on_s3(s3_storage_adapter_instance, s3_storage_adapter_instance.last_file_location, last_file_sequence_name) assert s3_storage_adapter_instance.get_last_file_sequence() == last_file_sequence_name
def s3_storage_adapter_instance(test_bucket, test_prefix, aws_access_key_id, aws_secret_access_key, test_region): return S3StorageAdapter(test_bucket, test_prefix, aws_access_key_id, aws_secret_access_key, test_region)