def reset_test_database(): """ Reset the database used by the bokchoy tests. Use the database cache automation defined in pavelib/database.py """ update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter
def test_load_data_and_run_migrations(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is different than the stored fingerprint AND there is NO matching fingerprint file in s3, verify that we make a call to load data into the database, run migrations and update the local db cache files """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # write the local fingerprint file with a different value than # the computed fingerprint local_fingerprint = '123456789' with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(local_fingerprint) database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter calls = [ call(u'{}/scripts/reset-test-db.sh --calculate_migrations'.format( Env.REPO_ROOT)), call( u'{}/scripts/reset-test-db.sh --rebuild_cache --use-existing-db' .format(Env.REPO_ROOT)) ] _mock_sh.assert_has_calls(calls)
def test_load_data_from_local_cache(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is the same as the stored fingerprint, verify that we make a call to load data into the database without running migrations """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # write the local fingerprint file with the same value than the # computed fingerprint with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(self.expected_fingerprint) with patch.object(db_utils, 'get_file_from_s3', wraps=db_utils.get_file_from_s3) as _mock_get_file: database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter # Make sure that the local cache files are used - NOT downloaded from s3 self.assertFalse(_mock_get_file.called) calls = [ call(u'{}/scripts/reset-test-db.sh --calculate_migrations'.format( Env.REPO_ROOT)), call(u'{}/scripts/reset-test-db.sh --use-existing-db'.format( Env.REPO_ROOT)) ] _mock_sh.assert_has_calls(calls)
def reset_test_database(): """ Reset the database used by the bokchoy tests. Use the database cache automation defined in pavelib/database.py """ update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter
def reset_test_database(): """ Reset the database used by the bokchoy tests. If the tests are being run on Jenkins, use the database cache automation defined in pavelib/database.py If not, reset the test database and apply migrations """ if os.environ.get('USER', None) == 'jenkins': update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter else: sh("{}/scripts/reset-test-db.sh --migrations".format(Env.REPO_ROOT))
def reset_test_database(): """ Reset the database used by the bokchoy tests. If the tests are being run on Jenkins, use the database cache automation defined in pavelib/database.py If not, reset the test database and apply migrations """ if os.environ.get('USER', None) == 'jenkins': update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter else: sh("{}/scripts/reset-test-db.sh --migrations".format(Env.REPO_ROOT))
def test_updated_db_cache_pushed_to_s3(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is different than the stored fingerprint AND there is NO matching fingerprint file in s3, verify that an updated fingeprint file is pushed to s3 """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # write the local fingerprint file with a different value than # the computed fingerprint local_fingerprint = '123456789' with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(local_fingerprint) database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter self.assertTrue(self.bucket.get_key(self.fingerprint_filename))
def test_updated_db_cache_pushed_to_s3(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is different than the stored fingerprint AND there is NO matching fingerprint file in s3, verify that an updated fingeprint file is pushed to s3 """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # write the local fingerprint file with a different value than # the computed fingerprint local_fingerprint = '123456789' with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(local_fingerprint) database.update_local_bokchoy_db_from_s3() self.assertTrue(self.bucket.get_key(self.fingerprint_filename))
def test_load_data_from_s3_fingerprint(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is different than the stored fingerprint AND there is a matching fingerprint file in s3, verify that we make a call to load data into the database without running migrations """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # zip the temporary files and push them to s3 bucket zipfile_path = os.path.join(db_utils.CACHE_FOLDER, self.fingerprint_filename) with tarfile.open(name=zipfile_path, mode='w:gz') as tar_file: for name in database.ALL_DB_FILES: tar_file.add(os.path.join(db_utils.CACHE_FOLDER, name), arcname=name) key = boto.s3.key.Key(bucket=self.bucket, name=self.fingerprint_filename) key.set_contents_from_filename(zipfile_path, replace=False) # write the local fingerprint file with a different value than # the computed fingerprint local_fingerprint = '123456789' with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(local_fingerprint) with patch('boto.connect_s3', Mock(return_value=Mock())): with patch.object(db_utils, 'get_file_from_s3') as _mock_get_file: database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter # Make sure that the fingerprint file is downloaded from s3 _mock_get_file.assert_called_once_with( 'test_bucket', self.fingerprint_filename, db_utils.CACHE_FOLDER) calls = [ call(u'{}/scripts/reset-test-db.sh --calculate_migrations'.format( Env.REPO_ROOT)), call(u'{}/scripts/reset-test-db.sh --use-existing-db'.format( Env.REPO_ROOT)) ] _mock_sh.assert_has_calls(calls)
def test_load_data_from_local_cache(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is the same as the stored fingerprint, verify that we make a call to load data into the database without running migrations """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # write the local fingerprint file with the same value than the # computed fingerprint with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(self.expected_fingerprint) with patch.object(db_utils, 'get_file_from_s3', wraps=db_utils.get_file_from_s3) as _mock_get_file: database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter # Make sure that the local cache files are used - NOT downloaded from s3 self.assertFalse(_mock_get_file.called) calls = [ call('{}/scripts/reset-test-db.sh --calculate_migrations'.format(Env.REPO_ROOT)), call('{}/scripts/reset-test-db.sh --use-existing-db'.format(Env.REPO_ROOT)) ] _mock_sh.assert_has_calls(calls)
def test_load_data_and_run_migrations(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is different than the stored fingerprint AND there is NO matching fingerprint file in s3, verify that we make a call to load data into the database, run migrations and update the local db cache files """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # write the local fingerprint file with a different value than # the computed fingerprint local_fingerprint = '123456789' with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(local_fingerprint) database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter calls = [ call('{}/scripts/reset-test-db.sh --calculate_migrations'.format(Env.REPO_ROOT)), call('{}/scripts/reset-test-db.sh --rebuild_cache --use-existing-db'.format(Env.REPO_ROOT)) ] _mock_sh.assert_has_calls(calls)
def test_load_data_from_s3_fingerprint(self, _mock_sh): """ Assuming that the computed db cache file fingerprint is different than the stored fingerprint AND there is a matching fingerprint file in s3, verify that we make a call to load data into the database without running migrations """ self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER) self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH) _write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES) # zip the temporary files and push them to a moto s3 bucket zipfile_path = os.path.join(db_utils.CACHE_FOLDER, self.fingerprint_filename) with tarfile.open(name=zipfile_path, mode='w:gz') as tar_file: for name in database.ALL_DB_FILES: tar_file.add(os.path.join(db_utils.CACHE_FOLDER, name), arcname=name) key = boto.s3.key.Key(bucket=self.bucket, name=self.fingerprint_filename) key.set_contents_from_filename(zipfile_path, replace=False) # write the local fingerprint file with a different value than # the computed fingerprint local_fingerprint = '123456789' with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file: fingerprint_file.write(local_fingerprint) with patch.object(db_utils, 'get_file_from_s3', wraps=db_utils.get_file_from_s3) as _mock_get_file: database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter # Make sure that the fingerprint file is downloaded from s3 _mock_get_file.assert_called_once_with( 'moto_test_bucket', self.fingerprint_filename, db_utils.CACHE_FOLDER ) calls = [ call('{}/scripts/reset-test-db.sh --calculate_migrations'.format(Env.REPO_ROOT)), call('{}/scripts/reset-test-db.sh --use-existing-db'.format(Env.REPO_ROOT)) ] _mock_sh.assert_has_calls(calls)