def setupClass(cls): svn_fs._import_svn() core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None cls.repos_path = tempfile.mkdtemp(prefix='svn-tmp') shutil.rmtree(cls.repos_path) dumpfile = open(os.path.join(os.path.split(__file__)[0], 'svn.dump')) try: r = repos.svn_repos_create(cls.repos_path, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def setUp(self): dumpfile = open(os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) svn_fs._import_svn() core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: if os.path.exists(REPOS_PATH): print 'trouble ahead with db/rep-cache.db... see #8278' r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def setUp(self): dumpfile = open( os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) svn_fs._import_svn() core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: if os.path.exists(REPOS_PATH): print 'trouble ahead with db/rep-cache.db... see #8278' r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def create_and_load(repopath, dumpfd): ''' create a new repository at repopath and load the given dump into it ''' pool = core.Pool() r = repos.svn_repos_create(repopath, '', '', None, None, pool) with open(os.path.join(repopath, 'db', 'fsfs.conf'), 'w') as f: f.write( textwrap.dedent("""\ # config settings for svn repos to try and speed up the testsuite [rep-sharing] enable-rep-sharing = false [deltification] enable-dir-deltification = false enable-props-deltification = false [compression] compression-level=1 """)) try: repos.svn_repos_load_fs2(r, dumpfd, None, repos.svn_repos_load_uuid_force, '', False, False, None, pool) finally: dumpfd.close() pool.destroy()
def alloc_known_repo(self, repo_id, suffix = ""): """Create a temporary repository and fill it with the contents of the specified dump. repo_id is the path to the dump, relative to the script's location. Returns the same as alloc_empty_repo.""" dump_path = os.path.join(os.path.dirname(sys.argv[0]), repo_id) (handle, repo_path, repo_uri) = self.alloc_empty_repo(suffix=suffix) repos.svn_repos_load_fs2(handle, open(dump_path, 'rb'), StringIO(), repos.load_uuid_default, None, False, False, None) return (handle, repo_path, repo_uri)
def setUp(self): dumpfile = open(os.path.join(os.path.split(__file__)[0], "svnrepos.dump")) # Remove the trac-svnrepos directory, so that we can # ensure a fresh start. self.tearDown() r = repos.svn_repos_create(REPOS_PATH, "", "", None, None) repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_ignore, "", 0, 0, None)
def alloc_known_repo(self, repo_id, suffix=""): """Create a temporary repository and fill it with the contents of the specified dump. repo_id is the path to the dump, relative to the script's location. Returns the same as alloc_empty_repo.""" dump_path = os.path.join(os.path.dirname(sys.argv[0]), repo_id) (handle, repo_path, repo_uri) = self.alloc_empty_repo(suffix=suffix) repos.svn_repos_load_fs2(handle, open(dump_path, 'rb'), StringIO(), repos.load_uuid_default, None, False, False, None) return (handle, repo_path, repo_uri)
def setUp(self): dump_path = os.path.join(os.path.split(__file__)[0], 'svnrepos.dump') with open(dump_path, 'rb') as dumpfile: # Remove the trac-svnrepos directory, so that we can # ensure a fresh start. self.tearDown() r = repos.svn_repos_create(REPOS_PATH, b'', b'', None, None) repos.svn_repos_load_fs2(r, dumpfile, BytesIO(), repos.svn_repos_load_uuid_ignore, b'', 0, 0, None)
def setUp(self): dumpfile = open( os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) # Remove the trac-svnrepos directory, so that we can # ensure a fresh start. self.tearDown() r = repos.svn_repos_create(REPOS_PATH, '', '', None, None) repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_ignore, '', 0, 0, None)
def setUp(self): """Load the mergeinfo-full Subversion repository. This dumpfile is created by dumping the repository generated for command line log tests 16. If it needs to be updated (mergeinfo format changes, for example), we can go there to get a new version.""" dumpfile = open(os.path.join(os.path.split(__file__)[0], 'data', 'mergeinfo.dump')) # Remove any existing repository to ensure a fresh start self.tearDown() self.repos = repos.svn_repos_create(REPOS_PATH, '', '', None, None) repos.svn_repos_load_fs2(self.repos, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None) self.fs = repos.fs(self.repos) self.rev = fs.youngest_rev(self.fs)
def setUp(self): """Load the mergeinfo-full Subversion repository. This dumpfile is created by dumping the repository generated for command line log tests 16. If it needs to be updated (mergeinfo format changes, for example), we can go there to get a new version.""" dumpfile = open( os.path.join(os.path.split(__file__)[0], 'data', 'mergeinfo.dump')) # Remove any existing repository to ensure a fresh start self.tearDown() self.repos = repos.svn_repos_create(REPOS_PATH, '', '', None, None) repos.svn_repos_load_fs2(self.repos, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None) self.fs = repos.fs(self.repos) self.rev = fs.youngest_rev(self.fs)
def setUp(self): dumpfile = open(os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def setUp(self): dumpfile = open( os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()