def test_handle_process_output(self): from git.cmd import handle_process_output line_count = 5002 count = [None, 0, 0] def counter_stdout(line): count[1] += 1 def counter_stderr(line): count[2] += 1 proc = subprocess.Popen([ sys.executable, fixture_path('cat_file.py'), str(fixture_path('issue-301_stderr')) ], stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) handle_process_output(proc, counter_stdout, counter_stderr, lambda proc: proc.wait()) assert count[1] == line_count assert count[2] == line_count
def test_index_file_base(self): # read from file index = IndexFile(self.rorepo, fixture_path("index")) assert index.entries assert index.version > 0 # test entry last_val = None entry = index.entries.itervalues().next() for attr in ("path", "ctime", "mtime", "dev", "inode", "mode", "uid", "gid", "size", "binsha", "hexsha", "stage"): val = getattr(entry, attr) # END for each method # test update entries = index.entries assert isinstance(index.update(), IndexFile) assert entries is not index.entries # test stage index_merge = IndexFile(self.rorepo, fixture_path("index_merge")) assert len(index_merge.entries) == 106 assert len(list(e for e in index_merge.entries.itervalues() if e.stage != 0)) # write the data - it must match the original tmpfile = tempfile.mktemp() index_merge.write(tmpfile) fp = open(tmpfile, 'rb') assert fp.read() == fixture("index_merge") fp.close() os.remove(tmpfile)
def test_index_file_base(self): # read from file index = IndexFile(self.rorepo, fixture_path("index")) assert index.entries assert index.version > 0 # test entry entry = next(iter(index.entries.values())) for attr in ("path", "ctime", "mtime", "dev", "inode", "mode", "uid", "gid", "size", "binsha", "hexsha", "stage"): getattr(entry, attr) # END for each method # test update entries = index.entries assert isinstance(index.update(), IndexFile) assert entries is not index.entries # test stage index_merge = IndexFile(self.rorepo, fixture_path("index_merge")) self.assertEqual(len(index_merge.entries), 106) assert len(list(e for e in index_merge.entries.values() if e.stage != 0)) # write the data - it must match the original tmpfile = tempfile.mktemp() index_merge.write(tmpfile) with open(tmpfile, 'rb') as fp: self.assertEqual(fp.read(), fixture("index_merge")) os.remove(tmpfile)
def test_handle_process_output(self): from git.cmd import handle_process_output line_count = 5002 count = [None, 0, 0] def counter_stdout(line): count[1] += 1 def counter_stderr(line): count[2] += 1 cmdline = [ sys.executable, fixture_path('cat_file.py'), str(fixture_path('issue-301_stderr')) ] proc = subprocess.Popen( cmdline, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, creationflags=cmd.PROC_CREATIONFLAGS, ) handle_process_output(proc, counter_stdout, counter_stderr, finalize_process) self.assertEqual(count[1], line_count) self.assertEqual(count[2], line_count)
def test_index_file_base(self): # read from file index = IndexFile(self.rorepo, fixture_path("index")) assert index.entries assert index.version > 0 # test entry entry = next(iter(index.entries.values())) for attr in ("path", "ctime", "mtime", "dev", "inode", "mode", "uid", "gid", "size", "binsha", "hexsha", "stage"): getattr(entry, attr) # END for each method # test update entries = index.entries assert isinstance(index.update(), IndexFile) assert entries is not index.entries # test stage index_merge = IndexFile(self.rorepo, fixture_path("index_merge")) self.assertEqual(len(index_merge.entries), 106) assert len([e for e in index_merge.entries.values() if e.stage != 0]) # write the data - it must match the original tmpfile = tempfile.mktemp() index_merge.write(tmpfile) with open(tmpfile, 'rb') as fp: self.assertEqual(fp.read(), fixture("index_merge")) os.remove(tmpfile)
def test_handle_process_output(self): from git.cmd import handle_process_output line_count = 5002 count = [None, 0, 0] def counter_stdout(line): count[1] += 1 def counter_stderr(line): count[2] += 1 cmdline = [sys.executable, fixture_path('cat_file.py'), str(fixture_path('issue-301_stderr'))] proc = subprocess.Popen(cmdline, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, creationflags=cmd.PROC_CREATIONFLAGS, ) handle_process_output(proc, counter_stdout, counter_stderr, finalize_process) self.assertEqual(count[1], line_count) self.assertEqual(count[2], line_count)
def test_index_file_base(self): # read from file index = IndexFile(self.rorepo, fixture_path("index")) assert index.entries assert index.version > 0 # test entry last_val = None entry = index.entries.itervalues().next() for attr in ("path", "ctime", "mtime", "dev", "inode", "mode", "uid", "gid", "size", "binsha", "hexsha", "stage"): val = getattr(entry, attr) # END for each method # test update entries = index.entries assert isinstance(index.update(), IndexFile) assert entries is not index.entries # test stage index_merge = IndexFile(self.rorepo, fixture_path("index_merge")) assert len(index_merge.entries) == 106 assert len( list(e for e in index_merge.entries.itervalues() if e.stage != 0)) # write the data - it must match the original tmpfile = tempfile.mktemp() index_merge.write(tmpfile) fp = open(tmpfile, 'rb') assert fp.read() == fixture("index_merge") fp.close() os.remove(tmpfile)
def test_complex_aliases(self): file_obj = self._to_memcache(fixture_path('.gitconfig')) with GitConfigParser(file_obj, read_only=False) as w_config: self.assertEqual( w_config.get('alias', 'rbi'), '"!g() { git rebase -i origin/${1:-master} ; } ; g"') self.assertEqual( file_obj.getvalue(), self._to_memcache(fixture_path('.gitconfig')).getvalue())
def test_empty_config_value(self): cr = GitConfigParser(fixture_path('git_config_with_empty_value'), read_only=True) assert cr.get_value('core', 'filemode'), "Should read keys with values" with self.assertRaises(cp.NoOptionError): cr.get_value('color', 'ui')
def test_invalid_commit(self): cmt = self.rorepo.commit() with open(fixture_path('commit_invalid_data'), 'rb') as fd: cmt._deserialize(fd) self.assertEqual(cmt.author.name, u'E.Azer Ko�o�o�oculu', cmt.author.name) self.assertEqual(cmt.author.email, '*****@*****.**', cmt.author.email)
def test_values(self): file_obj = self._to_memcache(fixture_path("git_config_values")) w_config = GitConfigParser(file_obj, read_only=False) w_config.write() # enforce writing orig_value = file_obj.getvalue() # Reading must unescape backslashes backslash = w_config.get('values', 'backslash') assert backslash == r'some\data' # Reading must unescape quotes quote = w_config.get('values', 'quote') assert quote == 'this is a "quoted value"' # Reading must remove surrounding quotes quoted = w_config.get('values', 'quoted') assert quoted == 'all your "quotes" are belong to us' # Writing must escape backslashes and quotes w_config.set('values', 'backslash', backslash) w_config.set('values', 'quote', quote) w_config.write() # enforce writing # Contents shouldn't differ assert file_obj.getvalue() == orig_value
def test_read_write(self): # writer must create the exact same file as the one read before for filename in ("git_config", "git_config_global"): file_obj = self._to_memcache(fixture_path(filename)) w_config = GitConfigParser(file_obj, read_only=False) w_config.read() # enforce reading assert w_config._sections w_config.write() # enforce writing # we stripped lines when reading, so the results differ assert file_obj.getvalue() self.assertEqual( file_obj.getvalue(), self._to_memcache(fixture_path(filename)).getvalue()) # creating an additional config writer must fail due to exclusive access self.failUnlessRaises(IOError, GitConfigParser, file_obj, read_only=False) # should still have a lock and be able to make changes assert w_config._lock._has_lock() # changes should be written right away sname = "my_section" oname = "mykey" val = "myvalue" w_config.add_section(sname) assert w_config.has_section(sname) w_config.set(sname, oname, val) assert w_config.has_option(sname, oname) assert w_config.get(sname, oname) == val sname_new = "new_section" oname_new = "new_key" ival = 10 w_config.set_value(sname_new, oname_new, ival) assert w_config.get_value(sname_new, oname_new) == ival file_obj.seek(0) r_config = GitConfigParser(file_obj, read_only=True) assert r_config.has_section(sname) assert r_config.has_option(sname, oname) assert r_config.get(sname, oname) == val w_config.release()
def test_base(self): path_repo = fixture_path("git_config") path_global = fixture_path("git_config_global") r_config = GitConfigParser([path_repo, path_global], read_only=True) assert r_config.read_only num_sections = 0 num_options = 0 # test reader methods assert r_config._is_initialized is False for section in r_config.sections(): num_sections += 1 for option in r_config.options(section): num_options += 1 val = r_config.get(section, option) val_typed = r_config.get_value(section, option) assert isinstance(val_typed, ( bool, int, float, ) + string_types) assert val assert "\n" not in option assert "\n" not in val # writing must fail with self.assertRaises(IOError): r_config.set(section, option, None) with self.assertRaises(IOError): r_config.remove_option(section, option) # END for each option with self.assertRaises(IOError): r_config.remove_section(section) # END for each section assert num_sections and num_options assert r_config._is_initialized is True # get value which doesnt exist, with default default = "my default value" assert r_config.get_value("doesnt", "exist", default) == default # it raises if there is no default though with self.assertRaises(cp.NoSectionError): r_config.get_value("doesnt", "exist")
def test_read_write(self): # writer must create the exact same file as the one read before for filename in ("git_config", "git_config_global"): file_obj = self._to_memcache(fixture_path(filename)) w_config = GitConfigParser(file_obj, read_only=False) w_config.read() # enforce reading assert w_config._sections w_config.write() # enforce writing # we stripped lines when reading, so the results differ assert file_obj.getvalue() self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path(filename)).getvalue()) # creating an additional config writer must fail due to exclusive access self.failUnlessRaises(IOError, GitConfigParser, file_obj, read_only=False) # should still have a lock and be able to make changes assert w_config._lock._has_lock() # changes should be written right away sname = "my_section" oname = "mykey" val = "myvalue" w_config.add_section(sname) assert w_config.has_section(sname) w_config.set(sname, oname, val) assert w_config.has_option(sname, oname) assert w_config.get(sname, oname) == val sname_new = "new_section" oname_new = "new_key" ival = 10 w_config.set_value(sname_new, oname_new, ival) assert w_config.get_value(sname_new, oname_new) == ival file_obj.seek(0) r_config = GitConfigParser(file_obj, read_only=True) assert r_config.has_section(sname) assert r_config.has_option(sname, oname) assert r_config.get(sname, oname) == val w_config.release()
def test_rename(self): file_obj = self._to_memcache(fixture_path('git_config')) cw = GitConfigParser(file_obj, read_only=False, merge_includes=False) self.failUnlessRaises(ValueError, cw.rename_section, "doesntexist", "foo") self.failUnlessRaises(ValueError, cw.rename_section, "core", "include") nn = "bee" assert cw.rename_section('core', nn) is cw assert not cw.has_section('core') assert len(cw.items(nn)) == 4 cw.release()
def test_rename(self): file_obj = self._to_memcache(fixture_path('git_config')) with GitConfigParser(file_obj, read_only=False, merge_includes=False) as cw: with self.assertRaises(ValueError): cw.rename_section("doesntexist", "foo") with self.assertRaises(ValueError): cw.rename_section("core", "include") nn = "bee" assert cw.rename_section('core', nn) is cw assert not cw.has_section('core') assert len(cw.items(nn)) == 4
def test_base(self): path_repo = fixture_path("git_config") path_global = fixture_path("git_config_global") r_config = GitConfigParser([path_repo, path_global], read_only=True) assert r_config.read_only num_sections = 0 num_options = 0 # test reader methods assert r_config._is_initialized == False for section in r_config.sections(): num_sections += 1 if section != 'filter "indent"': for option in r_config.options(section): num_options += 1 val = r_config.get(section, option) val_typed = r_config.get_value(section, option) assert isinstance(val_typed, (bool, long, float, basestring)) assert val assert "\n" not in option assert "\n" not in val # writing must fail self.failUnlessRaises(IOError, r_config.set, section, option, None) self.failUnlessRaises(IOError, r_config.remove_option, section, option) else: val = r_config.get(section, 'required') assert val is None # END for each option self.failUnlessRaises(IOError, r_config.remove_section, section) # END for each section assert num_sections and num_options assert r_config._is_initialized == True # get value which doesnt exist, with default default = "my default value" assert r_config.get_value("doesnt", "exist", default) == default # it raises if there is no default though self.failUnlessRaises(NoSectionError, r_config.get_value, "doesnt", "exist")
def test_base(self): path_repo = fixture_path("git_config") path_global = fixture_path("git_config_global") r_config = GitConfigParser([path_repo, path_global], read_only=True) assert r_config.read_only num_sections = 0 num_options = 0 # test reader methods assert r_config._is_initialized is False for section in r_config.sections(): num_sections += 1 for option in r_config.options(section): num_options += 1 val = r_config.get(section, option) val_typed = r_config.get_value(section, option) assert isinstance(val_typed, (bool, int, float, ) + string_types) assert val assert "\n" not in option assert "\n" not in val # writing must fail with self.assertRaises(IOError): r_config.set(section, option, None) with self.assertRaises(IOError): r_config.remove_option(section, option) # END for each option with self.assertRaises(IOError): r_config.remove_section(section) # END for each section assert num_sections and num_options assert r_config._is_initialized is True # get value which doesnt exist, with default default = "my default value" assert r_config.get_value("doesnt", "exist", default) == default # it raises if there is no default though with self.assertRaises(cp.NoSectionError): r_config.get_value("doesnt", "exist")
def test_handle_process_output(self): from git.cmd import handle_process_output line_count = 5002 count = [None, 0, 0] def counter_stdout(line): count[1] += 1 def counter_stderr(line): count[2] += 1 proc = subprocess.Popen([sys.executable, fixture_path('cat_file.py'), str(fixture_path('issue-301_stderr'))], stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) handle_process_output(proc, counter_stdout, counter_stderr, lambda proc: proc.wait()) assert count[1] == line_count assert count[2] == line_count
def test_it_accepts_environment_variables(self): filename = fixture_path("ls_tree_empty") with open(filename, 'r') as fh: tree = self.git.mktree(istream=fh) env = { 'GIT_AUTHOR_NAME': 'Author Name', 'GIT_AUTHOR_EMAIL': '*****@*****.**', 'GIT_AUTHOR_DATE': '1400000000+0000', 'GIT_COMMITTER_NAME': 'Committer Name', 'GIT_COMMITTER_EMAIL': '*****@*****.**', 'GIT_COMMITTER_DATE': '1500000000+0000', } commit = self.git.commit_tree(tree, m='message', env=env) assert_equal(commit, '4cfd6b0314682d5a58f80be39850bad1640e9241')
def test_it_accepts_environment_variables(self): filename = fixture_path("ls_tree_empty") with open(filename, 'r') as fh: tree = self.git.mktree(istream=fh) env = { 'GIT_AUTHOR_NAME': 'Author Name', 'GIT_AUTHOR_EMAIL': '*****@*****.**', 'GIT_AUTHOR_DATE': '1400000000+0000', 'GIT_COMMITTER_NAME': 'Committer Name', 'GIT_COMMITTER_EMAIL': '*****@*****.**', 'GIT_COMMITTER_DATE': '1500000000+0000', } commit = self.git.commit_tree(tree, m='message', env=env) self.assertEqual(commit, '4cfd6b0314682d5a58f80be39850bad1640e9241')
def test_add_to_multiple(self): file_obj = self._to_memcache(fixture_path('git_config_multiple')) with GitConfigParser(file_obj, read_only=False) as cw: cw.add_value('section1', 'option1', 'value1c') cw.write() file_obj.seek(0) cr = GitConfigParser(file_obj, read_only=True) self.assertEqual(cr.get_value('section1', 'option1'), 'value1c') self.assertEqual(cr.get_values('section1', 'option1'), ['value1a', 'value1b', 'value1c']) self.assertEqual(cr.items('section1'), [('option1', 'value1c'), ('other_option1', 'other_value1')]) self.assertEqual(cr.items_all('section1'), [('option1', ['value1a', 'value1b', 'value1c']), ('other_option1', ['other_value1'])])
def test_multi_line_config(self): file_obj = self._to_memcache(fixture_path("git_config_with_comments")) config = GitConfigParser(file_obj, read_only=False) ev = "ruby -e '\n" ev += " system %(git), %(merge-file), %(--marker-size=%L), %(%A), %(%O), %(%B)\n" ev += " b = File.read(%(%A))\n" ev += " b.sub!(/^<+ .*\\nActiveRecord::Schema\\.define.:version => (\\d+). do\\n=+\\nActiveRecord::Schema\\." ev += "define.:version => (\\d+). do\\n>+ .*/) do\n" ev += " %(ActiveRecord::Schema.define(:version => #{[$1, $2].max}) do)\n" ev += " end\n" ev += " File.open(%(%A), %(w)) {|f| f.write(b)}\n" ev += " exit 1 if b.include?(%(<)*%L)'" assert_equal(config.get('merge "railsschema"', 'driver'), ev) assert_equal(config.get('alias', 'lg'), "log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr)%Creset'" " --abbrev-commit --date=relative") assert len(config.sections()) == 23
def test_gpgsig(self): cmt = self.rorepo.commit() with open(fixture_path('commit_with_gpgsig'), 'rb') as fd: cmt._deserialize(fd) fixture_sig = """-----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.11 (GNU/Linux) iQIcBAABAgAGBQJRk8zMAAoJEG5mS6x6i9IjsTEP/0v2Wx/i7dqyKban6XMIhVdj uI0DycfXqnCCZmejidzeao+P+cuK/ZAA/b9fU4MtwkDm2USvnIOrB00W0isxsrED sdv6uJNa2ybGjxBolLrfQcWutxGXLZ1FGRhEvkPTLMHHvVriKoNFXcS7ewxP9MBf NH97K2wauqA+J4BDLDHQJgADCOmLrGTAU+G1eAXHIschDqa6PZMH5nInetYZONDh 3SkOOv8VKFIF7gu8X7HC+7+Y8k8U0TW0cjlQ2icinwCc+KFoG6GwXS7u/VqIo1Yp Tack6sxIdK7NXJhV5gAeAOMJBGhO0fHl8UUr96vGEKwtxyZhWf8cuIPOWLk06jA0 g9DpLqmy/pvyRfiPci+24YdYRBua/vta+yo/Lp85N7Hu/cpIh+q5WSLvUlv09Dmo TTTG8Hf6s3lEej7W8z2xcNZoB6GwXd8buSDU8cu0I6mEO9sNtAuUOHp2dBvTA6cX PuQW8jg3zofnx7CyNcd3KF3nh2z8mBcDLgh0Q84srZJCPRuxRcp9ylggvAG7iaNd XMNvSK8IZtWLkx7k3A3QYt1cN4y1zdSHLR2S+BVCEJea1mvUE+jK5wiB9S4XNtKm BX/otlTa8pNE3fWYBxURvfHnMY4i3HQT7Bc1QjImAhMnyo2vJk4ORBJIZ1FTNIhJ JzJMZDRLQLFvnzqZuCjE =przd -----END PGP SIGNATURE-----""" self.assertEqual(cmt.gpgsig, fixture_sig) cmt.gpgsig = "<test\ndummy\nsig>" assert cmt.gpgsig != fixture_sig cstream = BytesIO() cmt._serialize(cstream) assert re.search(r"^gpgsig <test\n dummy\n sig>$", cstream.getvalue().decode('ascii'), re.MULTILINE) self.assert_gpgsig_deserialization(cstream) cstream.seek(0) cmt.gpgsig = None cmt._deserialize(cstream) self.assertEqual(cmt.gpgsig, "<test\ndummy\nsig>") cmt.gpgsig = None cstream = BytesIO() cmt._serialize(cstream) assert not re.search(r"^gpgsig ", cstream.getvalue().decode('ascii'), re.MULTILINE)
def test_multiple_values(self): file_obj = self._to_memcache(fixture_path('git_config_multiple')) with GitConfigParser(file_obj, read_only=False) as cw: self.assertEqual(cw.get('section0', 'option0'), 'value0') self.assertEqual(cw.get_values('section0', 'option0'), ['value0']) self.assertEqual(cw.items('section0'), [('option0', 'value0')]) # Where there are multiple values, "get" returns the last. self.assertEqual(cw.get('section1', 'option1'), 'value1b') self.assertEqual(cw.get_values('section1', 'option1'), ['value1a', 'value1b']) self.assertEqual(cw.items('section1'), [('option1', 'value1b'), ('other_option1', 'other_value1')]) self.assertEqual(cw.items_all('section1'), [('option1', ['value1a', 'value1b']), ('other_option1', ['other_value1'])]) with self.assertRaises(KeyError): cw.get_values('section1', 'missing') self.assertEqual(cw.get_values('section1', 'missing', 1), [1]) self.assertEqual(cw.get_values('section1', 'missing', 's'), ['s'])
def test_it_accepts_stdin(self): filename = fixture_path("cat_file_blob") with open(filename, 'r') as fh: assert_equal("70c379b63ffa0795fdbfbc128e5a2818397b7ef8", self.git.hash_object(istream=fh, stdin=True))
def test_it_accepts_stdin(self): filename = fixture_path("cat_file_blob") fh = open(filename, 'r') assert_equal("70c379b63ffa0795fdbfbc128e5a2818397b7ef8", self.git.hash_object(istream=fh, stdin=True)) fh.close()
def test_invalid_commit(self): cmt = self.rorepo.commit() cmt._deserialize(open(fixture_path('commit_invalid_data'), 'rb')) assert cmt.author.name == u'E.Azer Ko�o�o�oculu', cmt.author.name assert cmt.author.email == '*****@*****.**', cmt.author.email
def test_complex_aliases(self): file_obj = self._to_memcache(fixture_path('.gitconfig')) w_config = GitConfigParser(file_obj, read_only=False) self.assertEqual(w_config.get('alias', 'rbi'), '"!g() { git rebase -i origin/${1:-master} ; } ; g"') w_config.release() self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path('.gitconfig')).getvalue())
def test_base(self): rlp_head = fixture_path('reflog_HEAD') rlp_master = fixture_path('reflog_master') tdir = tempfile.mktemp(suffix="test_reflogs") os.mkdir(tdir) rlp_master_ro = RefLog.path(self.rorepo.head) assert os.path.isfile(rlp_master_ro) # simple read reflog = RefLog.from_file(rlp_master_ro) assert reflog._path is not None assert isinstance(reflog, RefLog) assert len(reflog) # iter_entries works with path and with stream assert len(list(RefLog.iter_entries(open(rlp_master, 'rb')))) assert len(list(RefLog.iter_entries(rlp_master))) # raise on invalid revlog # TODO: Try multiple corrupted ones ! pp = 'reflog_invalid_' for suffix in ('oldsha', 'newsha', 'email', 'date', 'sep'): self.failUnlessRaises(ValueError, RefLog.from_file, fixture_path(pp + suffix)) # END for each invalid file # cannot write an uninitialized reflog self.failUnlessRaises(ValueError, RefLog().write) # test serialize and deserialize - results must match exactly binsha = hex_to_bin(('f' * 40).encode('ascii')) msg = "my reflog message" cr = self.rorepo.config_reader() for rlp in (rlp_head, rlp_master): reflog = RefLog.from_file(rlp) tfile = os.path.join(tdir, os.path.basename(rlp)) reflog.to_file(tfile) assert reflog.write() is reflog # parsed result must match ... treflog = RefLog.from_file(tfile) assert treflog == reflog # ... as well as each bytes of the written stream assert open(tfile).read() == open(rlp).read() # append an entry entry = RefLog.append_entry(cr, tfile, IndexObject.NULL_BIN_SHA, binsha, msg) assert entry.oldhexsha == IndexObject.NULL_HEX_SHA assert entry.newhexsha == 'f' * 40 assert entry.message == msg assert RefLog.from_file(tfile)[-1] == entry # index entry # raises on invalid index self.failUnlessRaises(IndexError, RefLog.entry_at, rlp, 10000) # indices can be positive ... assert isinstance(RefLog.entry_at(rlp, 0), RefLogEntry) RefLog.entry_at(rlp, 23) # ... and negative for idx in (-1, -24): RefLog.entry_at(rlp, idx) # END for each index to read # END for each reflog # finally remove our temporary data shutil.rmtree(tdir)
def test_base(self): rlp_head = fixture_path('reflog_HEAD') rlp_master = fixture_path('reflog_master') tdir = tempfile.mktemp(suffix="test_reflogs") os.mkdir(tdir) rlp_master_ro = RefLog.path(self.rorepo.head) assert osp.isfile(rlp_master_ro) # simple read reflog = RefLog.from_file(rlp_master_ro) assert reflog._path is not None assert isinstance(reflog, RefLog) assert len(reflog) # iter_entries works with path and with stream assert len(list(RefLog.iter_entries(open(rlp_master, 'rb')))) assert len(list(RefLog.iter_entries(rlp_master))) # raise on invalid revlog # TODO: Try multiple corrupted ones ! pp = 'reflog_invalid_' for suffix in ('oldsha', 'newsha', 'email', 'date', 'sep'): self.failUnlessRaises(ValueError, RefLog.from_file, fixture_path(pp + suffix)) # END for each invalid file # cannot write an uninitialized reflog self.failUnlessRaises(ValueError, RefLog().write) # test serialize and deserialize - results must match exactly binsha = hex_to_bin(('f' * 40).encode('ascii')) msg = "my reflog message" cr = self.rorepo.config_reader() for rlp in (rlp_head, rlp_master): reflog = RefLog.from_file(rlp) tfile = osp.join(tdir, osp.basename(rlp)) reflog.to_file(tfile) assert reflog.write() is reflog # parsed result must match ... treflog = RefLog.from_file(tfile) assert treflog == reflog # ... as well as each bytes of the written stream assert open(tfile).read() == open(rlp).read() # append an entry entry = RefLog.append_entry(cr, tfile, IndexObject.NULL_BIN_SHA, binsha, msg) assert entry.oldhexsha == IndexObject.NULL_HEX_SHA assert entry.newhexsha == 'f' * 40 assert entry.message == msg assert RefLog.from_file(tfile)[-1] == entry # index entry # raises on invalid index self.failUnlessRaises(IndexError, RefLog.entry_at, rlp, 10000) # indices can be positive ... assert isinstance(RefLog.entry_at(rlp, 0), RefLogEntry) RefLog.entry_at(rlp, 23) # ... and negative for idx in (-1, -24): RefLog.entry_at(rlp, idx) # END for each index to read # END for each reflog # finally remove our temporary data rmtree(tdir)