def test_initial_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [Mock(Changeset, repos, 0, '', '', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Import', 'joe', t2, get_changes=lambda: iter(changes))] cache = CachedRepository(self.env, repos, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT rev,time,author,message FROM revision") self.assertEquals(('0', to_utimestamp(t1), '', ''), cursor.fetchone()) self.assertEquals(('1', to_utimestamp(t2), 'joe', 'Import'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute(""" SELECT rev,path,node_type,change_type,base_path,base_rev FROM node_change """) self.assertEquals(('1', 'trunk', 'D', 'A', None, None), cursor.fetchone()) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_update_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changesets = [ None, Mock(Changeset, repos, 1, '', '', t2, get_changes=lambda: []), Mock(Changeset, repos, 2, 'Update', 'joe', t3, get_changes=lambda: iter(changes)) ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: self.assertEqual([(to_utimestamp(t3), 'joe', 'Update')], db("SELECT time, author, message FROM revision WHERE rev='2'")) self.assertEqual([('trunk/README', 'F', 'E', 'trunk/README', '1')], db("""SELECT path, node_type, change_type, base_path, base_rev FROM node_change WHERE rev='2'"""))
def sync(self): youngest_stored = self.repos.get_youngest_rev_in_cache(self.db) if youngest_stored != str(self.repos.youngest_rev): # Need to cache all information for changes since the last # sync operation. if youngest_stored is None: youngest_stored = '0' # Obtain a list of changes since the last cache sync from p4trac.repos import _P4ChangesOutputConsumer output = _P4ChangesOutputConsumer(self.repos._repos) self.repos._connection.run('changes', '-l', '-s', 'submitted', '@>%s' % youngest_stored, output=output) if output.errors: from p4trac.repos import PerforceError raise PerforceError(output.errors) changes = output.changes changes.reverse() # Perform the precaching of the file history for files in these # changes. self.repos._repos.precacheFileHistoryForChanges(changes) # Call on to the default implementation now that we've cached # enough information to make it run a bit faster. CachedRepository.sync(self)
def test_sync_changeset(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2, get_changes=lambda: iter(changes1)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync_changeset(0) rows = self.env.db_query( "SELECT time, author, message FROM revision ORDER BY rev") self.assertEqual(2, len(rows)) self.assertEqual((to_utimestamp(t1), 'joe', '**empty**'), rows[0]) self.assertEqual((to_utimestamp(t2), 'joe', 'Import'), rows[1])
def test_get_changes(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) cursor = self.db.cursor() cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (0,%s,'','')", (to_timestamp(t1),)) cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (1,%s,'joe','Import')", (to_timestamp(t2),)) cursor.executemany("INSERT INTO node_change (rev,path,node_type," "change_type,base_path,base_rev) " "VALUES ('1',%s,%s,%s,%s,%s)", [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]) cursor.execute("UPDATE system SET value='1' WHERE name='youngest_rev'") repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=lambda x: None, get_youngest_rev=lambda: 1, get_oldest_rev=lambda: 0, next_rev=lambda x: None, normalize_rev=lambda rev: rev) cache = CachedRepository(self.db, repos, None, self.log) self.assertEqual('1', cache.youngest_rev) changeset = cache.get_changeset(1) self.assertEqual('joe', changeset.author) self.assertEqual('Import', changeset.message) self.assertEqual(t2, changeset.date) changes = changeset.get_changes() self.assertEqual(('trunk', Node.DIRECTORY, Changeset.ADD, None, None), changes.next()) self.assertEqual(('trunk/README', Node.FILE, Changeset.ADD, None, None), changes.next()) self.assertRaises(StopIteration, changes.next)
def test_update_sync(self): cursor = self.db.cursor() cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (0,41000,'','')") cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (1,42000,'joe','Import')") cursor.executemany("INSERT INTO node_change (rev,path,node_type," "change_type,base_path,base_rev) " "VALUES ('1',%s,%s,%s,%s,%s)", [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]) changes = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changeset = Mock(Changeset, 2, 'Update', 'joe', 42042, get_changes=lambda: iter(changes)) repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=lambda x: changeset, get_youngest_rev=lambda: 2, next_rev=lambda x: int(x) == 1 and 2 or None) cache = CachedRepository(self.db, repos, None, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT time,author,message FROM revision WHERE rev='2'") self.assertEquals((42042, 'joe', 'Update'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute("SELECT path,node_type,change_type,base_path,base_rev " "FROM node_change WHERE rev='2'") self.assertEquals(('trunk/README', 'F', 'E', 'trunk/README', '1'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_sync_changeset(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (("0", to_utimestamp(t1), "", ""), []), ( ("1", to_utimestamp(t2), "joe", "Import"), [("trunk", "D", "A", None, None), ("trunk/README", "F", "A", None, None)], ), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes1 = [ ("trunk", Node.DIRECTORY, Changeset.ADD, None, None), ("trunk/README", Node.FILE, Changeset.ADD, None, None), ] changesets = [ Mock(Changeset, repos, 0, "**empty**", "joe", t1, get_changes=lambda: []), Mock(Changeset, repos, 1, "Initial Import", "joe", t2, get_changes=lambda: iter(changes1)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync_changeset(0) rows = self.env.db_query("SELECT time, author, message FROM revision ORDER BY rev") self.assertEquals(2, len(rows)) self.assertEquals((to_utimestamp(t1), "joe", "**empty**"), rows[0]) self.assertEquals((to_utimestamp(t2), "joe", "Import"), rows[1])
def test_initial_sync(self): changes = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [Mock(Changeset, 0, '', '', 41000, get_changes=lambda: []), Mock(Changeset, 1, 'Import', 'joe', 42000, get_changes=lambda: iter(changes))] repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=lambda x: changesets[int(x)], get_oldest_rev=lambda: 0, get_youngest_rev=lambda: 1, normalize_rev=lambda x: x, next_rev=lambda x: int(x) == 0 and 1 or None) cache = CachedRepository(self.db, repos, None, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT rev,time,author,message FROM revision") self.assertEquals(('0', 41000, '', ''), cursor.fetchone()) self.assertEquals(('1', 42000, 'joe', 'Import'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute("SELECT rev,path,node_type,change_type,base_path," "base_rev FROM node_change") self.assertEquals(('1', 'trunk', 'D', 'A', None, None), cursor.fetchone()) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_initial_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) changes = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [Mock(Changeset, 0, '', '', t1, get_changes=lambda: []), Mock(Changeset, 1, 'Import', 'joe', t2, get_changes=lambda: iter(changes))] repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=lambda x: changesets[int(x)], get_oldest_rev=lambda: 0, get_youngest_rev=lambda: 1, normalize_rev=lambda x: x, next_rev=lambda x: int(x) == 0 and 1 or None) cache = CachedRepository(self.db, repos, None, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT rev,time,author,message FROM revision") self.assertEquals(('0', to_timestamp(t1), '', ''), cursor.fetchone()) self.assertEquals(('1', to_timestamp(t2), 'joe', 'Import'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute("SELECT rev,path,node_type,change_type,base_path," "base_rev FROM node_change") self.assertEquals(('1', 'trunk', 'D', 'A', None, None), cursor.fetchone()) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_get_changes(self): cursor = self.db.cursor() cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (0,41000,'','')") cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (1,42000,'joe','Import')") cursor.executemany("INSERT INTO node_change (rev,path,node_type," "change_type,base_path,base_rev) " "VALUES ('1',%s,%s,%s,%s,%s)", [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]) repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=lambda x: None, get_youngest_rev=lambda: 1, next_rev=lambda x: None, normalize_rev=lambda rev: rev) cache = CachedRepository(self.db, repos, None, self.log) self.assertEqual('1', cache.youngest_rev) changeset = cache.get_changeset(1) self.assertEqual('joe', changeset.author) self.assertEqual('Import', changeset.message) self.assertEqual(42000, changeset.date) changes = changeset.get_changes() self.assertEqual(('trunk', Node.DIRECTORY, Changeset.ADD, None, None), changes.next()) self.assertEqual(('trunk/README', Node.FILE, Changeset.ADD, None, None), changes.next()) self.assertRaises(StopIteration, changes.next)
def test_update_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (("0", to_utimestamp(t1), "", ""), []), ( ("1", to_utimestamp(t2), "joe", "Import"), [("trunk", "D", "A", None, None), ("trunk/README", "F", "A", None, None)], ), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes = [("trunk/README", Node.FILE, Changeset.EDIT, "trunk/README", 1)] changesets = [ None, Mock(Changeset, repos, 1, "", "", t2, get_changes=lambda: []), Mock(Changeset, repos, 2, "Update", "joe", t3, get_changes=lambda: iter(changes)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: self.assertEquals( [(to_utimestamp(t3), "joe", "Update")], db("SELECT time, author, message FROM revision WHERE rev='2'") ) self.assertEquals( [("trunk/README", "F", "E", "trunk/README", "1")], db( """SELECT path, node_type, change_type, base_path, base_rev FROM node_change WHERE rev='2'""" ), )
def test_sync_changeset(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2, get_changes=lambda: iter(changes1)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync_changeset(0) cursor = self.db.cursor() cursor.execute("SELECT time,author,message FROM revision ORDER BY rev") self.assertEquals((to_utimestamp(t1), 'joe', '**empty**'), cursor.fetchone()) self.assertEquals((to_utimestamp(t2), 'joe', 'Import'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_clean_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [ ('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None) ]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changes2 = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2, get_changes=lambda: iter(changes1)), Mock(Changeset, repos, 2, 'Update', 'joe', t3, get_changes=lambda: iter(changes2)) ] cache = CachedRepository(self.env, repos, self.log) cache.sync(clean=True) rows = self.env.db_query(""" SELECT time, author, message FROM revision ORDER BY rev """) self.assertEquals(3, len(rows)) self.assertEquals((to_utimestamp(t1), 'joe', '**empty**'), rows[0]) self.assertEquals((to_utimestamp(t2), 'joe', 'Initial Import'), rows[1]) self.assertEquals((to_utimestamp(t3), 'joe', 'Update'), rows[2]) rows = self.env.db_query(""" SELECT rev, path, node_type, change_type, base_path, base_rev FROM node_change ORDER BY rev, path""") self.assertEquals(3, len(rows)) self.assertEquals(('1', 'trunk', 'D', 'A', None, None), rows[0]) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), rows[1]) self.assertEquals(('2', 'trunk/README', 'F', 'E', 'trunk/README', '1'), rows[2])
def test_update_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changesets = [ None, Mock(Changeset, repos, 1, '', '', t2, get_changes=lambda: []), Mock(Changeset, repos, 2, 'Update', 'joe', t3, get_changes=lambda: iter(changes)) ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: self.assertEquals([(to_utimestamp(t3), 'joe', 'Update')], db("SELECT time, author, message FROM revision WHERE rev='2'")) self.assertEquals([('trunk/README', 'F', 'E', 'trunk/README', '1')], db("""SELECT path, node_type, change_type, base_path, base_rev FROM node_change WHERE rev='2'"""))
def test_initial_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changesets = [ Mock(Changeset, repos, 0, '', '', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Import', 'joe', t2, get_changes=lambda: iter(changes)) ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: rows = db("SELECT rev, time, author, message FROM revision") self.assertEquals(len(rows), 2) self.assertEquals(('0', to_utimestamp(t1), '', ''), rows[0]) self.assertEquals(('1', to_utimestamp(t2), 'joe', 'Import'), rows[1]) rows = db(""" SELECT rev, path, node_type, change_type, base_path, base_rev FROM node_change""") self.assertEquals(len(rows), 2) self.assertEquals(('1', 'trunk', 'D', 'A', None, None), rows[0]) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), rows[1])
def test_initial_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=1) changes = [ ("trunk", Node.DIRECTORY, Changeset.ADD, None, None), ("trunk/README", Node.FILE, Changeset.ADD, None, None), ] changesets = [ Mock(Changeset, repos, 0, "", "", t1, get_changes=lambda: []), Mock(Changeset, repos, 1, "Import", "joe", t2, get_changes=lambda: iter(changes)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: rows = db("SELECT rev, time, author, message FROM revision") self.assertEquals(len(rows), 2) self.assertEquals(("0", to_utimestamp(t1), "", ""), rows[0]) self.assertEquals(("1", to_utimestamp(t2), "joe", "Import"), rows[1]) rows = db( """ SELECT rev, path, node_type, change_type, base_path, base_rev FROM node_change""" ) self.assertEquals(len(rows), 2) self.assertEquals(("1", "trunk", "D", "A", None, None), rows[0]) self.assertEquals(("1", "trunk/README", "F", "A", None, None), rows[1])
def test_initial_sync_with_empty_repos(self): repos = self.get_repos() cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: self.assertEquals([], db("SELECT rev, time, author, message FROM revision")) self.assertEquals(0, db("SELECT COUNT(*) FROM node_change")[0][0])
def test_initial_sync_with_empty_repos(self): repos = self.get_repos() cache = CachedRepository(self.env, repos, self.log) cache.sync() with self.env.db_query as db: self.assertEquals( [], db("SELECT rev, time, author, message FROM revision")) self.assertEquals(0, db("SELECT COUNT(*) FROM node_change")[0][0])
def test_sync_changeset_if_not_exists(self): t = [ datetime(2001, 1, 1, 1, 1, 1, 0, utc), # r0 datetime(2002, 1, 1, 1, 1, 1, 0, utc), # r1 datetime(2003, 1, 1, 1, 1, 1, 0, utc), # r2 datetime(2004, 1, 1, 1, 1, 1, 0, utc), # r3 ] self.preset_cache( (("0", to_utimestamp(t[0]), "joe", "**empty**"), []), ( ("1", to_utimestamp(t[1]), "joe", "Import"), [("trunk", "D", "A", None, None), ("trunk/README", "F", "A", None, None)], ), # not exists r2 (("3", to_utimestamp(t[3]), "joe", "Add COPYING"), [("trunk/COPYING", "F", "A", None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=3) changes = [ None, # r0 [ ("trunk", Node.DIRECTORY, Changeset.ADD, None, None), # r1 ("trunk/README", Node.FILE, Changeset.ADD, None, None), ], [ ("branches", Node.DIRECTORY, Changeset.ADD, None, None), # r2 ("tags", Node.DIRECTORY, Changeset.ADD, None, None), ], [("trunk/COPYING", Node.FILE, Changeset.ADD, None, None)], # r3 ] changesets = [ Mock(Changeset, repos, 0, "**empty**", "joe", t[0], get_changes=lambda: []), Mock(Changeset, repos, 1, "Initial Import", "joe", t[1], get_changes=lambda: iter(changes[1])), Mock(Changeset, repos, 2, "Created directories", "john", t[2], get_changes=lambda: iter(changes[2])), Mock(Changeset, repos, 3, "Add COPYING", "joe", t[3], get_changes=lambda: iter(changes[3])), ] cache = CachedRepository(self.env, repos, self.log) self.assertRaises(NoSuchChangeset, cache.get_changeset, 2) cache.sync() self.assertRaises(NoSuchChangeset, cache.get_changeset, 2) self.assertEqual(None, cache.sync_changeset(2)) cset = cache.get_changeset(2) self.assertEqual("john", cset.author) self.assertEqual("Created directories", cset.message) self.assertEqual(t[2], cset.date) cset_changes = cset.get_changes() self.assertEqual(("branches", Node.DIRECTORY, Changeset.ADD, None, None), cset_changes.next()) self.assertEqual(("tags", Node.DIRECTORY, Changeset.ADD, None, None), cset_changes.next()) self.assertRaises(StopIteration, cset_changes.next) rows = self.env.db_query("SELECT time,author,message FROM revision ORDER BY rev") self.assertEquals(4, len(rows)) self.assertEquals((to_utimestamp(t[0]), "joe", "**empty**"), rows[0]) self.assertEquals((to_utimestamp(t[1]), "joe", "Import"), rows[1]) self.assertEquals((to_utimestamp(t[2]), "john", "Created directories"), rows[2]) self.assertEquals((to_utimestamp(t[3]), "joe", "Add COPYING"), rows[3])
def test_initial_sync_with_empty_repos(self): repos = self.get_repos() cache = CachedRepository(self.env, repos, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT rev,time,author,message FROM revision") self.assertEquals(None, cursor.fetchone()) cursor.execute("SELECT COUNT(*) FROM node_change") self.assertEquals(0, cursor.fetchone()[0])
def test_clean_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), ('trunk/README', Node.FILE, Changeset.ADD, None, None)] changes2 = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t1, get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2, get_changes=lambda: iter(changes1)), Mock(Changeset, repos, 2, 'Update', 'joe', t3, get_changes=lambda: iter(changes2)) ] cache = CachedRepository(self.env, repos, self.log) cache.sync(clean=True) cursor = self.db.cursor() cursor.execute("SELECT time,author,message FROM revision") self.assertEquals((to_utimestamp(t1), 'joe', '**empty**'), cursor.fetchone()) self.assertEquals((to_utimestamp(t2), 'joe', 'Initial Import'), cursor.fetchone()) self.assertEquals((to_utimestamp(t3), 'joe', 'Update'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute(""" SELECT rev,path,node_type,change_type,base_path,base_rev FROM node_change ORDER BY rev """) self.assertEquals(('1', 'trunk', 'D', 'A', None, None), cursor.fetchone()) self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), cursor.fetchone()) self.assertEquals(('2', 'trunk/README', 'F', 'E', 'trunk/README', '1'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def test_initial_sync_with_empty_repos(self): changeset = Mock(Changeset, 0, '', '', 42000, get_changes=lambda: []) repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=lambda x: changeset, get_oldest_rev=lambda: 0, get_youngest_rev=lambda: 0, normalize_rev=lambda x: x, next_rev=lambda x: None) cache = CachedRepository(self.db, repos, None, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT rev,time,author,message FROM revision") self.assertEquals(('0', 42000, '', ''), cursor.fetchone()) cursor.execute("SELECT COUNT(*) FROM node_change") self.assertEquals(0, cursor.fetchone()[0])
def test_initial_sync_with_empty_repos(self): def no_changeset(rev): raise NoSuchChangeset(rev) repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=no_changeset, get_oldest_rev=lambda: 1, get_youngest_rev=lambda: 0, normalize_rev=no_changeset, next_rev=lambda x: None) cache = CachedRepository(self.db, repos, None, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT rev,time,author,message FROM revision") self.assertEquals(None, cursor.fetchone()) cursor.execute("SELECT COUNT(*) FROM node_change") self.assertEquals(0, cursor.fetchone()[0])
def test_clean_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (("0", to_utimestamp(t1), "", ""), []), ( ("1", to_utimestamp(t2), "joe", "Import"), [("trunk", "D", "A", None, None), ("trunk/README", "F", "A", None, None)], ), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=2) changes1 = [ ("trunk", Node.DIRECTORY, Changeset.ADD, None, None), ("trunk/README", Node.FILE, Changeset.ADD, None, None), ] changes2 = [("trunk/README", Node.FILE, Changeset.EDIT, "trunk/README", 1)] changesets = [ Mock(Changeset, repos, 0, "**empty**", "joe", t1, get_changes=lambda: []), Mock(Changeset, repos, 1, "Initial Import", "joe", t2, get_changes=lambda: iter(changes1)), Mock(Changeset, repos, 2, "Update", "joe", t3, get_changes=lambda: iter(changes2)), ] cache = CachedRepository(self.env, repos, self.log) cache.sync(clean=True) rows = self.env.db_query( """ SELECT time, author, message FROM revision ORDER BY rev """ ) self.assertEquals(3, len(rows)) self.assertEquals((to_utimestamp(t1), "joe", "**empty**"), rows[0]) self.assertEquals((to_utimestamp(t2), "joe", "Initial Import"), rows[1]) self.assertEquals((to_utimestamp(t3), "joe", "Update"), rows[2]) rows = self.env.db_query( """ SELECT rev, path, node_type, change_type, base_path, base_rev FROM node_change ORDER BY rev, path""" ) self.assertEquals(3, len(rows)) self.assertEquals(("1", "trunk", "D", "A", None, None), rows[0]) self.assertEquals(("1", "trunk/README", "F", "A", None, None), rows[1]) self.assertEquals(("2", "trunk/README", "F", "E", "trunk/README", "1"), rows[2])
def get_repository(self, type, dir, authname): """Return a `SubversionRepository`. The repository is wrapped in a `CachedRepository`, unless `type` is 'direct-svnfs'. """ if not self._version: self._version = self._get_version() self.env.systeminfo.append(("Subversion", self._version)) fs_repos = SubversionRepository(dir, None, self.log, {"tags": self.tags, "branches": self.branches}) if type == "direct-svnfs": repos = fs_repos else: repos = CachedRepository(self.env.get_db_cnx, fs_repos, None, self.log) repos.has_linear_changesets = True if authname: authz = SubversionAuthorizer(self.env, weakref.proxy(repos), authname) repos.authz = fs_repos.authz = authz return repos
def get_repository(self, type, dir, authname): """Return a `SubversionRepository`. The repository is generally wrapped in a `CachedRepository`, unless `direct-svn-fs` is the specified type. """ repos = SubversionRepository(dir, None, self.log) crepos = CachedRepository(self.env.get_db_cnx(), repos, None, self.log) if authname: authz = SubversionAuthorizer(self.env, crepos, authname) repos.authz = crepos.authz = authz return crepos
def test_get_changes(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (("0", to_utimestamp(t1), "", ""), []), ( ("1", to_utimestamp(t2), "joe", "Import"), [("trunk", "D", "A", None, None), ("trunk/RDME", "F", "A", None, None)], ), ) repos = self.get_repos() cache = CachedRepository(self.env, repos, self.log) self.assertEqual("1", cache.youngest_rev) changeset = cache.get_changeset(1) self.assertEqual("joe", changeset.author) self.assertEqual("Import", changeset.message) self.assertEqual(t2, changeset.date) changes = changeset.get_changes() self.assertEqual(("trunk", Node.DIRECTORY, Changeset.ADD, None, None), changes.next()) self.assertEqual(("trunk/RDME", Node.FILE, Changeset.ADD, None, None), changes.next()) self.assertRaises(StopIteration, changes.next)
def test_get_changes(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) self.preset_cache( (('0', to_utimestamp(t1), '', ''), []), (('1', to_utimestamp(t2), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/RDME', 'F', 'A', None, None)]), ) repos = self.get_repos() cache = CachedRepository(self.env, repos, self.log) self.assertEqual('1', cache.youngest_rev) changeset = cache.get_changeset(1) self.assertEqual('joe', changeset.author) self.assertEqual('Import', changeset.message) self.assertEqual(t2, changeset.date) changes = changeset.get_changes() self.assertEqual(('trunk', Node.DIRECTORY, Changeset.ADD, None, None), changes.next()) self.assertEqual(('trunk/RDME', Node.FILE, Changeset.ADD, None, None), changes.next()) self.assertRaises(StopIteration, changes.next)
def test_update_sync(self): t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc) t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc) t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc) cursor = self.db.cursor() cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (0,%s,'','')", (to_timestamp(t1),)) cursor.execute("INSERT INTO revision (rev,time,author,message) " "VALUES (1,%s,'joe','Import')", (to_timestamp(t2),)) cursor.executemany("INSERT INTO node_change (rev,path,node_type," "change_type,base_path,base_rev) " "VALUES ('1',%s,%s,%s,%s,%s)", [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]) cursor.execute("UPDATE system SET value='1' WHERE name='youngest_rev'") changes = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README', 1)] changeset = Mock(Changeset, 2, 'Update', 'joe', t3, get_changes=lambda: iter(changes)) repos = Mock(Repository, 'test-repos', None, self.log, get_changeset=lambda x: changeset, get_youngest_rev=lambda: 2, get_oldest_rev=lambda: 0, normalize_rev=lambda x: x, next_rev=lambda x: x and int(x) == 1 and 2 or None) cache = CachedRepository(self.db, repos, None, self.log) cache.sync() cursor = self.db.cursor() cursor.execute("SELECT time,author,message FROM revision WHERE rev='2'") self.assertEquals((to_timestamp(t3), 'joe', 'Update'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone()) cursor.execute("SELECT path,node_type,change_type,base_path,base_rev " "FROM node_change WHERE rev='2'") self.assertEquals(('trunk/README', 'F', 'E', 'trunk/README', '1'), cursor.fetchone()) self.assertEquals(None, cursor.fetchone())
def get_changesets(self, start, stop): for key, csets in itertools.groupby( CachedRepository.get_changesets(self, start, stop), key=lambda cset: cset.date): csets = list(csets) if len(csets) == 1: yield csets[0] continue rev_csets = dict((cset.rev, cset) for cset in csets) while rev_csets: revs = [rev for rev in rev_csets if not any(r in rev_csets for r in self.repos.child_revs(rev))] for rev in sorted(revs): yield rev_csets.pop(rev)
def get_changesets(self, start, stop): for key, csets in itertools.groupby( CachedRepository.get_changesets(self, start, stop), key=lambda cset: cset.date): csets = list(csets) if len(csets) == 1: yield csets[0] continue rev_csets = {cset.rev: cset for cset in csets} while rev_csets: revs = [rev for rev in rev_csets if not any(r in rev_csets for r in self.repos.child_revs(rev))] for rev in sorted(revs): yield rev_csets.pop(rev)
def get_repository(self, authname=None): """Return the version control repository configured for this environment. The repository is wrapped in a `CachedRepository`. @param authname: user name for authorization """ from trac.versioncontrol.cache import CachedRepository from trac.versioncontrol.svn_authz import SubversionAuthorizer from trac.versioncontrol.svn_fs import SubversionRepository repos_dir = self.config.get('trac', 'repository_dir') if not repos_dir: raise EnvironmentError, 'Path to repository not configured' authz = None if authname: authz = SubversionAuthorizer(self, authname) repos = SubversionRepository(repos_dir, authz, self.log) return CachedRepository(self.get_db_cnx(), repos, authz, self.log)
def get_repository(self, repos_type, repos_dir, params): """Return a `PerforceRepository`. The repository is wrapped in a `CachedRepository`. """ assert repos_type == 'perforce' self.log.debug("get_repository dir : %s" % (repos_dir)) options = dict(self.config.options('perforce')) self.log.debug("get_repository options : %s" % (options)) # Try to connect to the Perforce server from perforce import Connection, ConnectionFailed p4 = Connection(port=self.port, api='58') # Limit to 2005.2 behaviour try: from trac import __version__ as tracVersion p4.connect(prog='Trac', version=tracVersion) except ConnectionFailed: raise TracError( message="Could not connect to Perforce repository.", title="Perforce connection error") if self.user == '': raise TracError( message="Missing 'user' value in [perforce] config section.", title="Perforce configuration error") p4.user = self.user p4.password = self.password p4.charset = self.charset p4.language = self.language jobPrefixLength = len(options.get('job_prefix', 'job')) p4.client = self.workspace params.update(labels=self.labels, branches=self.branches) # Calling CachedRepository depending on version # (for backward-compatibility with 0.11) p4_repos = PerforceRepository(p4, None, self.log, jobPrefixLength, params) repos = CachedRepository(self.env, p4_repos, self.log) return repos
def __init__(self, db, repos, authz, log): CachedRepository.__init__(self, db, repos, authz, log) self.synced = 0
def get_youngest_rev(self): # return None if repository is empty return CachedRepository.get_youngest_rev(self) or None
def test_sync_changeset_if_not_exists(self): t = [ datetime(2001, 1, 1, 1, 1, 1, 0, utc), # r0 datetime(2002, 1, 1, 1, 1, 1, 0, utc), # r1 datetime(2003, 1, 1, 1, 1, 1, 0, utc), # r2 datetime(2004, 1, 1, 1, 1, 1, 0, utc), # r3 ] self.preset_cache( (('0', to_utimestamp(t[0]), 'joe', '**empty**'), []), (('1', to_utimestamp(t[1]), 'joe', 'Import'), [('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None)]), # not exists r2 (('3', to_utimestamp(t[3]), 'joe', 'Add COPYING'), [('trunk/COPYING', 'F', 'A', None, None)]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=3) changes = [ None, # r0 [('trunk', Node.DIRECTORY, Changeset.ADD, None, None), # r1 ('trunk/README', Node.FILE, Changeset.ADD, None, None)], [('branches', Node.DIRECTORY, Changeset.ADD, None, None), # r2 ('tags', Node.DIRECTORY, Changeset.ADD, None, None)], [('trunk/COPYING', Node.FILE, Changeset.ADD, None, None)], # r3 ] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t[0], get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t[1], get_changes=lambda: iter(changes[1])), Mock(Changeset, repos, 2, 'Created directories', 'john', t[2], get_changes=lambda: iter(changes[2])), Mock(Changeset, repos, 3, 'Add COPYING', 'joe', t[3], get_changes=lambda: iter(changes[3])), ] cache = CachedRepository(self.env, repos, self.log) self.assertRaises(NoSuchChangeset, cache.get_changeset, 2) cache.sync() self.assertRaises(NoSuchChangeset, cache.get_changeset, 2) self.assertEqual(None, cache.sync_changeset(2)) cset = cache.get_changeset(2) self.assertEqual('john', cset.author) self.assertEqual('Created directories', cset.message) self.assertEqual(t[2], cset.date) cset_changes = cset.get_changes() self.assertEqual(('branches', Node.DIRECTORY, Changeset.ADD, None, None), cset_changes.next()) self.assertEqual(('tags', Node.DIRECTORY, Changeset.ADD, None, None), cset_changes.next()) self.assertRaises(StopIteration, cset_changes.next) rows = self.env.db_query( "SELECT time,author,message FROM revision ORDER BY rev") self.assertEquals(4, len(rows)) self.assertEquals((to_utimestamp(t[0]), 'joe', '**empty**'), rows[0]) self.assertEquals((to_utimestamp(t[1]), 'joe', 'Import'), rows[1]) self.assertEquals((to_utimestamp(t[2]), 'john', 'Created directories'), rows[2]) self.assertEquals((to_utimestamp(t[3]), 'joe', 'Add COPYING'), rows[3])
def get_repository(self, repos_type, repos_dir, authname): assert repos_type == 'perforce' import urllib urltype, url = urllib.splittype(repos_dir) assert urltype == 'p4' or url == 'p4' options = dict(self.config.options('perforce')) if urltype != None: machine, path_query = urllib.splithost(url) user_passwd, host_port = urllib.splituser(machine) user, password = urllib.splitpasswd(user_passwd) self._update_option(options, 'port', host_port) self._update_option(options, 'password', password) self._update_option(options, 'user', user) path, query = urllib.splitquery(path_query) if path and path != '': for attr in self._splitattributes(query): key, val = urllib.splitvalue(attr) self._update_option(options, key, val) self._update_option(options, 'path', path) self.log.debug("get_repository options : %s" % (options)) if 'port' not in options: raise TracError( message="Missing 'port' value in [perforce] config section.", title="TracPerforce configuration error", ) # Try to connect to the Perforce server from perforce import Connection, ConnectionFailed p4 = Connection( port=options['port'], api='58', # Limit to 2005.2 behaviour ) try: from trac import __version__ as tracVersion p4.connect(prog='Trac', version=tracVersion) except ConnectionFailed: raise TracError( message="Could not connect to Perforce repository.", title="Perforce connection error", ) if 'user' not in options: raise TracError( message="Missing 'user' value in [perforce] config section.", title="Perforce configuration error", ) p4.user = options['user'] if 'password' in options: p4.password = options['password'] else: p4.password = '' if 'unicode' in options: if options['unicode'] == '1': p4.charset = 'utf8' elif options['unicode'] == '0': p4.charset = 'none' else: raise TracError( message="Invalid 'unicode' value in [perforce] config " \ "section.", title="Perforce configuration error", ) else: p4.charset = 'none' if 'language' in options: p4.language = options['language'] else: p4.language = '' jobPrefixLength = 3 # default value because by default prefix is 'job' if 'job_prefix' in options: jobPrefixLength = len(options['job_prefix']) p4.client = '' repos = PerforceRepository(p4, None, self.log, jobPrefixLength) crepos = CachedRepository(self.env.get_db_cnx(), repos, None, self.log) return crepos
def test_repr(self): repos = self.get_repos() cache = CachedRepository(self.env, repos, self.log) self.assertEqual("<CachedRepository 1 'test-repos' '/'>", repr(cache))
def test_sync_changeset_if_not_exists(self): t = [ datetime(2001, 1, 1, 1, 1, 1, 0, utc), # r0 datetime(2002, 1, 1, 1, 1, 1, 0, utc), # r1 datetime(2003, 1, 1, 1, 1, 1, 0, utc), # r2 datetime(2004, 1, 1, 1, 1, 1, 0, utc), # r3 ] self.preset_cache( (('0', to_utimestamp(t[0]), 'joe', '**empty**'), []), (('1', to_utimestamp(t[1]), 'joe', 'Import'), [ ('trunk', 'D', 'A', None, None), ('trunk/README', 'F', 'A', None, None) ]), # not exists r2 (('3', to_utimestamp(t[3]), 'joe', 'Add COPYING'), [ ('trunk/COPYING', 'F', 'A', None, None) ]), ) repos = self.get_repos(get_changeset=lambda x: changesets[int(x)], youngest_rev=3) changes = [ None, # r0 [ ('trunk', Node.DIRECTORY, Changeset.ADD, None, None), # r1 ('trunk/README', Node.FILE, Changeset.ADD, None, None) ], [ ('branches', Node.DIRECTORY, Changeset.ADD, None, None), # r2 ('tags', Node.DIRECTORY, Changeset.ADD, None, None) ], [('trunk/COPYING', Node.FILE, Changeset.ADD, None, None)], # r3 ] changesets = [ Mock(Changeset, repos, 0, '**empty**', 'joe', t[0], get_changes=lambda: []), Mock(Changeset, repos, 1, 'Initial Import', 'joe', t[1], get_changes=lambda: iter(changes[1])), Mock(Changeset, repos, 2, 'Created directories', 'john', t[2], get_changes=lambda: iter(changes[2])), Mock(Changeset, repos, 3, 'Add COPYING', 'joe', t[3], get_changes=lambda: iter(changes[3])), ] cache = CachedRepository(self.env, repos, self.log) self.assertRaises(NoSuchChangeset, cache.get_changeset, 2) cache.sync() self.assertRaises(NoSuchChangeset, cache.get_changeset, 2) self.assertEqual(None, cache.sync_changeset(2)) cset = cache.get_changeset(2) self.assertEqual('john', cset.author) self.assertEqual('Created directories', cset.message) self.assertEqual(t[2], cset.date) cset_changes = cset.get_changes() self.assertEqual( ('branches', Node.DIRECTORY, Changeset.ADD, None, None), cset_changes.next()) self.assertEqual(('tags', Node.DIRECTORY, Changeset.ADD, None, None), cset_changes.next()) self.assertRaises(StopIteration, cset_changes.next) rows = self.env.db_query( "SELECT time,author,message FROM revision ORDER BY rev") self.assertEquals(4, len(rows)) self.assertEquals((to_utimestamp(t[0]), 'joe', '**empty**'), rows[0]) self.assertEquals((to_utimestamp(t[1]), 'joe', 'Import'), rows[1]) self.assertEquals((to_utimestamp(t[2]), 'john', 'Created directories'), rows[2]) self.assertEquals((to_utimestamp(t[3]), 'joe', 'Add COPYING'), rows[3])