def test_items_storing_archive(self): """Test whether items are stored in an archive""" manager = ArchiveManager(self.test_path) args = { 'origin': 'http://example.com/', 'category': 'mock_item', 'tag': 'test', 'subtype': 'mocksubtype', 'from-date': str_to_datetime('2015-01-01') } items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) for x in range(5): item = items[x] expected_uuid = uuid('http://example.com/', str(x)) self.assertEqual(item['data']['item'], x) self.assertEqual(item['origin'], 'http://example.com/') self.assertEqual(item['uuid'], expected_uuid) self.assertEqual(item['tag'], 'test') filepaths = manager.search('http://example.com/', 'CommandBackend', 'mock_item', str_to_datetime('1970-01-01')) self.assertEqual(len(filepaths), 1) archive = Archive(filepaths[0]) self.assertEqual(archive._count_table_rows('archive'), 5)
def test_remove_archive_not_found(self): """Test if an exception is raised when the archive is not found""" archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) manager = ArchiveManager(archive_mng_path) with self.assertRaisesRegex(ArchiveManagerError, 'archive mockarchive does not exist'): manager.remove_archive('mockarchive')
def test_search_archived_after(self): """Check if a set of archives created after a given date are searched""" archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) manager = ArchiveManager(archive_mng_path) # First set of archives to create metadata = [ { 'origin': 'https://example.com', 'backend_name': 'git', 'backend_version': '0.8', 'category': 'commit', 'backend_params': {}, }, { 'origin': 'https://example.com', 'backend_name': 'gerrit', 'backend_version': '0.1', 'category': 'changes', 'backend_params': {} }, ] for meta in metadata: archive = manager.create_archive() archive.init_metadata(**meta) # Second set, archived after the date we'll use to search after_dt = datetime_utcnow() metadata = [ { 'origin': 'https://example.org', 'backend_name': 'git', 'backend_version': '0.1', 'category': 'commit', 'backend_params': {} }, { 'origin': 'https://example.com', 'backend_name': 'git', 'backend_version': '0.1', 'category': 'commit', 'backend_params': {} } ] for meta in metadata: archive = manager.create_archive() archive.init_metadata(**meta) meta['filepath'] = archive.archive_path archives = manager.search('https://example.com', 'git', 'commit', after_dt) expected = [metadata[1]['filepath']] self.assertListEqual(archives, expected)
def test_ignore_corrupted_archive(self): """Check if a corrupted archive is ignored while fetching from archive""" def delete_rows(db, table_name): conn = sqlite3.connect(db) cursor = conn.cursor() cursor.execute("DELETE FROM " + table_name) cursor.close() conn.commit() manager = ArchiveManager(self.test_path) args = { 'origin': 'http://example.com/', 'category': 'mock_item', 'tag': 'test', 'subtype': 'mocksubtype', 'from-date': str_to_datetime('2015-01-01') } # First, fetch the items twice to check if several archive # are used items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) # Find archive names to delete the rows of one of them to make it # corrupted filepaths = manager.search('http://example.com/', 'CommandBackend', 'mock_item', str_to_datetime('1970-01-01')) self.assertEqual(len(filepaths), 2) to_remove = filepaths[0] delete_rows(to_remove, 'archive') # Fetch items from the archive items = fetch_from_archive(CommandBackend, args, manager, 'mock_item', str_to_datetime('1970-01-01')) items = [item for item in items] self.assertEqual(len(items), 5) for x in range(5): item = items[x] expected_uuid = uuid('http://example.com/', str(x)) self.assertEqual(item['data']['item'], x) self.assertEqual(item['data']['archive'], True) self.assertEqual(item['origin'], 'http://example.com/') self.assertEqual(item['uuid'], expected_uuid) self.assertEqual(item['tag'], 'test')
def test_remove_archive(self): """Test if an archive is removed by the archive manager""" archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) manager = ArchiveManager(archive_mng_path) archive = manager.create_archive() self.assertEqual(os.path.exists(archive.archive_path), True) manager.remove_archive(archive.archive_path) self.assertEqual(os.path.exists(archive.archive_path), False)
def test_create_archive(self, mock_uuid): """Test if a new archive is created""" mock_uuid.return_value = MockUUID('AB0123456789') archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) manager = ArchiveManager(archive_mng_path) archive = manager.create_archive() self.assertIsInstance(archive, Archive) expected = os.path.join(archive_mng_path, 'AB', '0123456789.sqlite3') self.assertEqual(archive.archive_path, expected) self.assertEqual(os.path.exists(archive.archive_path), True)
def test_archived_after(self): """Test if only those items archived after a date are returned""" manager = ArchiveManager(self.test_path) args = { 'origin': 'http://example.com/', 'category': 'mock_item', 'tag': 'test', 'subtype': 'mocksubtype', 'from-date': str_to_datetime('2015-01-01') } items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) archived_dt = datetime_utcnow() items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) # Fetch items from the archive items = fetch_from_archive(CommandBackend, args, manager, 'mock_item', str_to_datetime('1970-01-01')) items = [item for item in items] self.assertEqual(len(items), 10) # Fetch items archived after the given date items = fetch_from_archive(CommandBackend, args, manager, 'mock_item', archived_dt) items = [item for item in items] self.assertEqual(len(items), 5)
def test_search(self): """Test if a set of archives is found based on the given criteria""" archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) manager = ArchiveManager(archive_mng_path) dt = datetime_utcnow() metadata = [ { 'origin': 'https://example.com', 'backend_name': 'git', 'backend_version': '0.8', 'category': 'commit', 'backend_params': {}, }, { 'origin': 'https://example.com', 'backend_name': 'gerrit', 'backend_version': '0.1', 'category': 'changes', 'backend_params': {} }, { 'origin': 'https://example.org', 'backend_name': 'git', 'backend_version': '0.1', 'category': 'commit', 'backend_params': {} }, { 'origin': 'https://example.com', 'backend_name': 'git', 'backend_version': '0.1', 'category': 'commit', 'backend_params': {} } ] for meta in metadata: archive = manager.create_archive() archive.init_metadata(**meta) meta['filepath'] = archive.archive_path archives = manager.search('https://example.com', 'git', 'commit', dt) expected = [metadata[0]['filepath'], metadata[3]['filepath']] self.assertListEqual(archives, expected)
def test_search_no_match(self): """Check if an empty set of archives is returned when none match the criteria""" archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) manager = ArchiveManager(archive_mng_path) dt = datetime_utcnow() metadata = [ { 'origin': 'https://example.com', 'backend_name': 'git', 'backend_version': '0.8', 'category': 'commit', 'backend_params': {}, }, { 'origin': 'https://example.com', 'backend_name': 'gerrit', 'backend_version': '0.1', 'category': 'changes', 'backend_params': {} }, { 'origin': 'https://example.org', 'backend_name': 'git', 'backend_version': '0.1', 'category': 'commit', 'backend_params': {} }, { 'origin': 'https://example.com', 'backend_name': 'git', 'backend_version': '0.1', 'category': 'commit', 'backend_params': {} } ] for meta in metadata: archive = manager.create_archive() archive.init_metadata(**meta) meta['filepath'] = archive.archive_path archives = manager.search('https://example.com', 'bugzilla', 'commit', dt) self.assertListEqual(archives, [])
def test_create_existing_archive(self, mock_uuid): """Test if an exception is raised when the archive to create exists""" mock_uuid.return_value = MockUUID('AB0123456789') archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) manager = ArchiveManager(archive_mng_path) # First we create the archive archive = manager.create_archive() self.assertIsInstance(archive, Archive) expected = os.path.join(archive_mng_path, 'AB', '0123456789.sqlite3') self.assertEqual(archive.archive_path, expected) # The archive already exist so it must raise an exception with self.assertRaisesRegex(ArchiveManagerError, 'archive .+ already exists'): _ = manager.create_archive()
def test_struct(self): """Test whether the structure of an archive manager directory is created""" archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR) # Directory does not exist yet self.assertEqual(os.path.isdir(archive_mng_path), False) # Object and directory are created manager = ArchiveManager(archive_mng_path) self.assertEqual(manager.dirpath, archive_mng_path) self.assertEqual(os.path.isdir(archive_mng_path), True) # A new object using the same directory does not create # a new directory alt_manager = ArchiveManager(archive_mng_path) self.assertEqual(alt_manager.dirpath, archive_mng_path) self.assertEqual(os.path.isdir(archive_mng_path), True)
def test_remove_archive_on_error(self): """Test whether an archive is removed when an unhandled exception occurs""" manager = ArchiveManager(self.test_path) args = { 'origin': 'http://example.com/', 'category': 'mock_item', 'tag': 'test', 'subtype': 'mocksubtype', 'from-date': str_to_datetime('2015-01-01') } items = fetch(ErrorCommandBackend, args, manager=manager) with self.assertRaises(BackendError): _ = [item for item in items] filepaths = manager.search('http://example.com/', 'ErrorCommandBackend', 'mock_item', str_to_datetime('1970-01-01')) self.assertEqual(len(filepaths), 0)
def test_archive(self): """Test whether a set of items is fetched from the archive""" manager = ArchiveManager(self.test_path) args = { 'origin': 'http://example.com/', 'category': 'mock_item', 'tag': 'test', 'subtype': 'mocksubtype', 'from-date': str_to_datetime('2015-01-01') } # First, fetch the items twice to check if several archive # are used items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) # Fetch items from the archive items = fetch_from_archive(CommandBackend, args, manager, 'mock_item', str_to_datetime('1970-01-01')) items = [item for item in items] self.assertEqual(len(items), 10) for x in range(2): for y in range(5): item = items[y + (x * 5)] expected_uuid = uuid('http://example.com/', str(y)) self.assertEqual(item['data']['item'], y) self.assertEqual(item['data']['archive'], True) self.assertEqual(item['origin'], 'http://example.com/') self.assertEqual(item['uuid'], expected_uuid) self.assertEqual(item['tag'], 'test')
def test_no_archived_items(self): """Test when no archived items are available""" manager = ArchiveManager(self.test_path) args = { 'origin': 'http://example.com/', 'category': 'mock_item', 'tag': 'test', 'subtype': 'mocksubtype', 'from-date': str_to_datetime('2015-01-01') } items = fetch(CommandBackend, args, manager=manager) items = [item for item in items] self.assertEqual(len(items), 5) # There aren't items for this category items = fetch_from_archive(CommandBackend, args, manager, 'alt_item', str_to_datetime('1970-01-01')) items = [item for item in items] self.assertEqual(len(items), 0)