def testConfigRefOmission(self): r = TestRepo('foo', TestClock()) ref = r['refs/heads/master'] ref.make_full_tree_commit('Initial Commit', { 'cool_file': ('whazzap', 0755), # executable 'subdir': { 'crazy times': GitFile('this is awesome') # explicit entry } }) cref = r['refs/fancy-config/main'] cref.make_full_tree_commit('Config data', {'config.json': '{"hello": "world"}'}) self.assertEqual(r.snap(), { 'refs/heads/master': OD([ ('29c7b88f7eeed928d38c692052bd0a26f7899864', ['Initial Commit']) ]) }) self.assertEqual(r.snap(include_config=True), { 'refs/heads/master': OD([ ('29c7b88f7eeed928d38c692052bd0a26f7899864', ['Initial Commit']) ]), 'refs/fancy-config/main': OD([ ('ba5d4a2b2604ec58de362ec8df17b7797a142be2', ['Config data']) ]) })
def _init_target(self): assert self.origin full_path = os.path.join(os.path.dirname(self.origin.repo_path), "grimoire") self.target_url = 'file://' + full_path os.makedirs(full_path) self.target = TestRepo('target', self._clock, '(ignored)') self.target._repo_path = full_path self.target.run('init', '--bare')
def testEmptyRepo(self): r = TestRepo('foo', TestClock()) self.assertEqual(list(r.refglob()), []) self.assertIsNotNone(r.repo_path) self.assertEquals(r.short_name, 'foo') self.assertEqual(r.snap(), {}) ref = r['refs/heads/master'] ref.make_full_tree_commit('Initial Commit') self.assertEqual(list(r.refglob()), [ref])
def testGitRefResolver(self): clock = TestClock() dep = TestRepo('dep', clock) dep['refs/heads/master'].make_commit('first', {'README': 'hello, world'}) local = TestRepo('local', clock) resolver = deps2submodules.GitRefResolver(local) url = dep.repo_path result1 = resolver.Resolve(url, 'refs/heads/master') result2 = resolver.Resolve(url, 'no/such/ref') return (result1, result2)
def testRepoMirrorOf(self): r = TestRepo('local', TestClock()) m = TestRepo('mirror', TestClock(), mirror_of=r.repo_path) self.capture_stdio(m.reify) ref = r['refs/heads/master'] ref.make_full_tree_commit('Initial Commit') self.assertEqual(list(r.refglob()), [ref]) self.assertEqual(list(m.refglob()), []) self.capture_stdio(m.run, 'fetch') self.assertEqual(list(m.refglob()), [m['refs/heads/master']]) self.assertEqual(r.snap(), m.snap())
def testUpdateSubmodules(self): deps_file_content = textwrap.dedent("""\ deps = { "fount/a": "https://example.com/xyz/a@deadbeefdeadbeefdeadbeefdeadbeefdeadbeef", "fount/b": "https://example.com/xyz/b@cafebabecafebabecafebabecafebabecafebabe", } """) cut = deps2submodules.Deps2Submodules(deps_file_content, None, 'fount/') cut.Evaluate() repo = TestRepo('repo', TestClock()) hsh = cut.UpdateSubmodules(repo, EMPTY_TREE) tree_dump = repo.run('ls-tree', '-r', hsh) file_dump = repo.run('cat-file', '-p', '%s:.gitmodules' % hsh) return (file_dump, tree_dump)
def testSpecFor(self): r = TestRepo('foo', TestClock()) ref = r['refs/heads/master'] spec = { 'cool_file': ('whazzap', 0755), # executable 'subdir': { 'crazy times': ('this is awesome', 0644) } } c = ref.make_full_tree_commit('Initial Commit', spec) self.assertEquals(spec, r.spec_for(c)) # can take a raw tree hash too self.assertEquals( r.spec_for(r.run('rev-parse', '%s:subdir' % c.hsh).strip()), { 'crazy times': ('this is awesome', 0644) } )
def testSpec(self): r = TestRepo('foo', TestClock()) ref = r['refs/heads/master'] ref.make_full_tree_commit('Initial Commit', { 'cool_file': ('whazzap', 0755), # executable 'subdir': { 'crazy times': GitFile('this is awesome') # explicit entry } }) self.assertEqual(r.snap(), { 'refs/heads/master': OD([ ('29c7b88f7eeed928d38c692052bd0a26f7899864', ['Initial Commit']) ]) }) self.assertEqual('whazzap', r.run('cat-file', 'blob', 'master:cool_file')) self.assertEqual('this is awesome', r.run('cat-file', 'blob', 'master:subdir/crazy times')) with self.assertRaises(AssertionError): ref.make_full_tree_commit('invalid object', {'not', 'a', 'spec'})
def testMultiCommit(self): r = TestRepo('foo', TestClock()) ref = r['refs/heads/master'] ref.make_full_tree_commit('Initial Commit', { 'cool_file': 'whazzap', 'subdir': { 'crazy times': 'this is awesome' } }) ref.make_full_tree_commit('Second commit') self.assertEqual(r.snap(), { 'refs/heads/master': OD([ ('86fa6839ec4bb328e82bde851ad131c01b10162d', ['Second commit']), ('b7c705ceddb223c09416b78e87dc8c41e7035a36', ['Initial Commit']) ]) }) # second commit had the default tree self.assertEqual('contents', r.run('cat-file', 'blob', 'master:file')) self.assertEqual('this is awesome', r.run('cat-file', 'blob', 'master~:subdir/crazy times'))
def testItAll(self): clock = TestClock() repo = TestRepo('repo', clock) content = { 'abc': { 'file1': 'hello, world' }, 'ghi': { 'zyx': { 'file2': 'good-bye, whirled' } } } commit1 = repo['refs/heads/master'].make_commit('first', content) def _arbitrary_submod_data(hsh): return SubmodData(revision=hsh, url='unused') submods = { 'abc/pqr/wow': _arbitrary_submod_data('f719efd430d52bcfc8566a43b2eb655688d38871'), 'ghi/aaa': _arbitrary_submod_data('f719efd430d52bcfc8566a43b2eb655688d38871'), 'abc/trl': _arbitrary_submod_data('2bdf67abb163a4ffb2d7f3f0880c9fe5068ce782'), 'abc/def': _arbitrary_submod_data('8510665149157c2bc901848c3e0b746954e9cbd9'), 'ghi/zyx/deep': _arbitrary_submod_data('54f9d6da5c91d556e6b54340b1327573073030af'), 'abc/xyz': _arbitrary_submod_data('fe7900bcbd294970da3296db5cf2020b4391a639') } gitmodules = GitFile('ignored, so it does not matter what I put here') result = gitlinks.Gitlinks(repo, gitmodules.intern(repo), submods, commit1.data.tree).BuildRootTree() return repo.run('ls-tree', '-r', result).splitlines()
def testInitialCommit(self): r = TestRepo('foo', TestClock()) ref = r['refs/heads/master'] ref.make_full_tree_commit('Initial Commit', { 'cool_file': 'whazzap', 'subdir': { 'crazy times': 'this is awesome' } }) self.assertEqual(list(r.refglob()), [ref]) self.assertEqual(r.snap(include_committer=True), { 'refs/heads/master': OD([ ('b7c705ceddb223c09416b78e87dc8c41e7035a36', [ # 'line too long' pylint: disable=C0301 'committer Test User <*****@*****.**> 2014-06-13 00:09:06 +0800', '', 'Initial Commit' ]) ]) }) self.assertEqual('whazzap', r.run('cat-file', 'blob', 'master:cool_file')) self.assertEqual('this is awesome', r.run('cat-file', 'blob', 'master:subdir/crazy times'))
def testMergeSpecs(self): r = TestRepo('foo', TestClock()) ref = r['refs/heads/master'] spec = { 'cool_file': ('whazzap', 0755), # executable 'subdir': { 'crazy times': ('this is awesome', 0644) }, 'nested': { 'nested_file': 'one thing', 'nested_carry': 'can\'t touch this', }, 'carry_over': 'this is the same before and after', } ref.make_commit('Initial Commit', spec) c = ref.make_commit('Differential Commit', { 'cool_file': None, 'subdir': 'now its a file', 'nested': { 'nested_file': 'other thing' }, 'other_dir': { 'neat-o': 'it\'s a neat file!' }, }) self.assertEquals(r.spec_for(c), { 'subdir': ('now its a file', 0644), 'other_dir': { 'neat-o': ('it\'s a neat file!', 0644) }, 'nested': { 'nested_file': ('other thing', 0644), 'nested_carry': ('can\'t touch this', 0644) }, 'carry_over': ('this is the same before and after', 0644), })
def RunTest(test_name): ret = [] clock = TestClock() origin = TestRepo('origin', clock) local = TestRepo('local', clock, origin.repo_path) base_repo_path = tempfile.mkdtemp(".gsubtreed.remote_repos") enabled_paths = ['mirrored_path/subpath', 'mirrored_path', 'exception/path'] path_map_exceptions = {'exception/path': 'cool_path'} cref = TestConfigRef(origin) cref.update(enabled_paths=enabled_paths, base_url='file://' + base_repo_path, path_map_exceptions=path_map_exceptions) mirrors = {} for path in enabled_paths + ['extra_mirror']: path_in_mirror = path_map_exceptions.get(path, path) full_path = os.path.join(base_repo_path, path_in_mirror) try: os.makedirs(full_path) except OSError: pass mirrors[path_in_mirror] = TestRepo('mirror(%s)' % path_in_mirror, clock, 'fake') mirrors[path_in_mirror]._repo_path = full_path mirrors[path_in_mirror].run('init', '--bare') class LogFormatter(logging.Formatter): def format(self, record): s = super(LogFormatter, self).format(record) return s.replace(base_repo_path, '[TMPDIR]') def checkpoint(message, include_committer=False, include_config=False): repos = collections.OrderedDict() repos['origin'] = origin.snap(include_committer, include_config) for _, mirror in sorted(mirrors.items()): repos[mirror.short_name] = mirror.snap(include_committer, include_config) ret.append([message, repos]) def run(): stdout = sys.stdout stderr = sys.stderr logout = StringIO() root_logger = logging.getLogger() shandler = logging.StreamHandler(logout) shandler.setFormatter(LogFormatter('%(levelname)s: %(message)s')) root_logger.addHandler(shandler) shandler.setLevel(logging.INFO) # Run pusher threads sequentially and deterministically. gsubtreed.Pusher.FAKE_THREADING = True success = False processed = {} try: with open(os.devnull, 'w') as dn: # TODO(iannucci): Let expect_tests absorb stdio sys.stderr = sys.stdout = dn local.reify() success, processed = gsubtreed.inner_loop(local, cref) except Exception: # pragma: no cover ret.append(traceback.format_exc().splitlines()) finally: gsubtreed.Pusher.FAKE_THREADING = False sys.stdout = stdout sys.stderr = stderr root_logger.removeHandler(shandler) ret.append({'log output': logout.getvalue().splitlines()}) ret.append({ 'inner_loop success': success, 'processed': processed, }) gsubtreed_test_definitions.GSUBTREED_TESTS[test_name]( origin=origin, run=run, checkpoint=checkpoint, mirrors=mirrors, config=cref, local_origin_repo=local) return expect_tests.Result(ret)
def RunTest(test_name): ret = [] clock = TestClock() origin = TestRepo("origin", clock) local = TestRepo("local", clock, origin.repo_path) base_repo_path = tempfile.mkdtemp(".gsubtreed.remote_repos") enabled_paths = ["mirrored_path/subpath", "mirrored_path", "exception/path"] path_map_exceptions = {"exception/path": "cool_path"} cref = TestConfigRef(origin) cref.update( enabled_paths=enabled_paths, base_url="file://" + base_repo_path, path_map_exceptions=path_map_exceptions ) mirrors = {} for path in enabled_paths + ["extra_mirror"]: path_in_mirror = path_map_exceptions.get(path, path) full_path = os.path.join(base_repo_path, path_in_mirror) try: os.makedirs(full_path) except OSError: pass mirrors[path_in_mirror] = TestRepo("mirror(%s)" % path_in_mirror, clock, "fake") mirrors[path_in_mirror]._repo_path = full_path mirrors[path_in_mirror].run("init", "--bare") class LogFormatter(logging.Formatter): def format(self, record): s = super(LogFormatter, self).format(record) return s.replace(base_repo_path, "[TMPDIR]") def checkpoint(message, include_committer=False, include_config=False): repos = collections.OrderedDict() repos["origin"] = origin.snap(include_committer, include_config) for _, mirror in sorted(mirrors.items()): repos[mirror.short_name] = mirror.snap(include_committer, include_config) ret.append([message, repos]) def run(): stdout = sys.stdout stderr = sys.stderr logout = StringIO() root_logger = logging.getLogger() shandler = logging.StreamHandler(logout) shandler.setFormatter(LogFormatter("%(levelname)s: %(message)s")) root_logger.addHandler(shandler) shandler.setLevel(logging.INFO) # Run pusher threads sequentially and deterministically. gsubtreed.Pusher.FAKE_THREADING = True success = False processed = {} try: with open(os.devnull, "w") as dn: # TODO(iannucci): Let expect_tests absorb stdio sys.stderr = sys.stdout = dn local.reify() success, processed = gsubtreed.inner_loop(local, cref) except Exception: # pragma: no cover ret.append(traceback.format_exc().splitlines()) finally: gsubtreed.Pusher.FAKE_THREADING = False sys.stdout = stdout sys.stderr = stderr root_logger.removeHandler(shandler) # infra.libs.git2.repo logs this message if the command took longer than # 1s to run. This causes test flakes occasionally. log_lines = [x for x in logout.getvalue().splitlines() if "Finished in " not in x] ret.append({"log output": log_lines}) ret.append({"inner_loop success": success, "processed": processed}) gsubtreed_test_definitions.GSUBTREED_TESTS[test_name]( origin=origin, run=run, checkpoint=checkpoint, mirrors=mirrors, config=cref, local_origin_repo=local ) return expect_tests.Result(ret)
def RunTest(test_name): ret = [] clock = TestClock() origin = TestRepo('origin', clock) local = TestRepo('local', clock, origin.repo_path) cref = TestConfigRef(origin) cref.update(enabled_refglobs=['refs/heads/*'], interval=0) def checkpoint(message, include_committer=False, include_config=False): ret.append([ message, { 'origin': origin.snap(include_committer, include_config) } ]) def run(include_log=True): stdout = sys.stdout stderr = sys.stderr if include_log: logout = StringIO() root_logger = logging.getLogger() log_level = root_logger.getEffectiveLevel() shandler = logging.StreamHandler(logout) shandler.setFormatter( logging.Formatter('%(levelname)s: %(message)s')) root_logger.addHandler(shandler) root_logger.setLevel(logging.INFO) success = False synthesized_commits = [] try: sys.stderr = sys.stdout = open(os.devnull, 'w') local.reify() success, synthesized_commits = gnumbd.inner_loop( local, cref, clock) except Exception: # pragma: no cover import traceback ret.append(traceback.format_exc().splitlines()) finally: sys.stdout = stdout sys.stderr = stderr if include_log: root_logger.removeHandler(shandler) root_logger.setLevel(log_level) ret.append({'log output': logout.getvalue().splitlines()}) ret.append({ 'inner_loop success': success, 'synthesized_commits': [{ 'commit': c.hsh, 'footers': infra_types.thaw(c.data.footers), } for c in synthesized_commits], }) gnumbd_test_definitions.GNUMBD_TESTS[test_name](origin, local, cref, run, checkpoint) return expect_tests.Result(ret)
def _init_origin(self, name): self.origin = TestRepo(name, self._clock) self.local = TestRepo('local', self._clock, self.origin.repo_path)
def RunTest(test_name): ret = [] clock = TestClock() origin = TestRepo('origin', clock) local = TestRepo('local', clock, origin.repo_path) base_repo_path = tempfile.mkdtemp(".gsubtreed.remote_repos") enabled_paths = [ 'mirrored_path/subpath', 'mirrored_path', 'exception/path' ] path_map_exceptions = {'exception/path': 'cool_path'} cref = TestConfigRef(origin) cref.update(enabled_paths=enabled_paths, base_url='file://' + base_repo_path, path_map_exceptions=path_map_exceptions) mirrors = {} for path in enabled_paths + ['extra_mirror']: path_in_mirror = path_map_exceptions.get(path, path) full_path = os.path.join(base_repo_path, path_in_mirror) try: os.makedirs(full_path) except OSError: pass mirrors[path_in_mirror] = TestRepo('mirror(%s)' % path_in_mirror, clock, 'fake') mirrors[path_in_mirror]._repo_path = full_path mirrors[path_in_mirror].run('init', '--bare') class LogFormatter(logging.Formatter): def format(self, record): s = super(LogFormatter, self).format(record) return s.replace(base_repo_path, '[TMPDIR]') def checkpoint(message, include_committer=False, include_config=False): repos = collections.OrderedDict() repos['origin'] = origin.snap(include_committer, include_config) for _, mirror in sorted(mirrors.items()): repos[mirror.short_name] = mirror.snap(include_committer, include_config) ret.append([message, repos]) def run(): stdout = sys.stdout stderr = sys.stderr logout = StringIO() root_logger = logging.getLogger() shandler = logging.StreamHandler(logout) shandler.setFormatter(LogFormatter('%(levelname)s: %(message)s')) root_logger.addHandler(shandler) shandler.setLevel(logging.INFO) # Run pusher threads sequentially and deterministically. gsubtreed.Pusher.FAKE_THREADING = True success = False processed = {} try: with open(os.devnull, 'w') as dn: # TODO(iannucci): Let expect_tests absorb stdio sys.stderr = sys.stdout = dn local.reify() success, processed = gsubtreed.inner_loop(local, cref) except Exception: # pragma: no cover ret.append(traceback.format_exc().splitlines()) finally: gsubtreed.Pusher.FAKE_THREADING = False sys.stdout = stdout sys.stderr = stderr root_logger.removeHandler(shandler) ret.append({'log output': logout.getvalue().splitlines()}) ret.append({ 'inner_loop success': success, 'processed': processed, }) gsubtreed_test_definitions.GSUBTREED_TESTS[test_name]( origin=origin, run=run, checkpoint=checkpoint, mirrors=mirrors, config=cref, local_origin_repo=local) return expect_tests.Result(ret)
class Context(object): """Test fixture with pieces useful for most tests. This gets passed to each test script as an arg. Typical usage: @test def test_name(f): # `f` is the test "fixture" f.make_commit(...) # construct a commit at the origin repo f.checkpoint('before') # dump repo contents for expect output f.run() # run the code under test f.checkpoint('after') # dump modified repo content In special cases tests may customize the fixture before using it. """ def __init__(self): self._clock = TestClock() self.results = [] # The `origin` and `target` are initialized lazily, so that special # tests can customize them. self.origin = None self.local = None self.target = None self.target_url = None # pylint: disable=W0212 actual_results = property(lambda self: self.results) def _ensure_init(self): """Performs lazy initialization.""" if not self.origin: self._init_origin('fount') if not self.target: self._init_target() def _init_origin(self, name): self.origin = TestRepo(name, self._clock) self.local = TestRepo('local', self._clock, self.origin.repo_path) def _init_target(self): assert self.origin full_path = os.path.join(os.path.dirname(self.origin.repo_path), "grimoire") self.target_url = 'file://' + full_path os.makedirs(full_path) self.target = TestRepo('target', self._clock, '(ignored)') self.target._repo_path = full_path self.target.run('init', '--bare') def make_commit(self, description, spec): self._ensure_init() return self.origin[MASTER].make_commit(description, spec) def record(self, o): self.results.append(o) def run(self, **kwargs): self._ensure_init() stdout = sys.stdout stderr = sys.stderr class LogFilterer(logging.Filter): def filter(self, record): # infra.libs.git2.repo logs this message if the command took longer than # 1s to run. This causes test flakes occasionally. if (record.name.startswith('infra.libs.git2.repo.Repo') and record.msg.startswith('Finished in ')): # pragma: no cover return False return record.name.startswith(( 'infra.services.gsubmodd', 'infra.libs.deps2submodules', 'infra.libs.git2', )) logout = StringIO() root_logger = logging.getLogger() shandler = logging.StreamHandler(logout) shandler.setFormatter(logging.Formatter('%(levelname)s: %(message)s')) shandler.addFilter(LogFilterer()) root_logger.addHandler(shandler) shandler.setLevel(logging.INFO) fd, filename = tempfile.mkstemp(text=True) try: with os.fdopen(fd, 'w+') as fh: sys.stderr = sys.stdout = fh try: self.local.reify() ret = gsubmodd.reify_submodules(self.local, self.target_url, **kwargs) if not ret: self.record('reify_submodules() call failed') except Exception: # pragma: no cover self.record(traceback.format_exc().splitlines()) fh.seek(0) # Uncomment temporarily when needed for debugging # self.record({'stdio':fh.read().splitlines()}) except Exception: # pragma: no cover self.record(traceback.format_exc().splitlines()) finally: sys.stdout = stdout sys.stderr = stderr root_logger.removeHandler(shandler) self.record({'log output': logout.getvalue().splitlines()}) os.remove(filename) def checkpoint(self, message, *args): self._ensure_init() self.record([message, _preserve_commit_order(self.origin.snap()), _preserve_commit_order(self.target.snap())] + [arg for arg in args])