def log_xml_empty_date(sbox): "svn log --xml must not print empty date elements" sbox.build() # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(svntest.main.current_repo_dir) date_re = re.compile('<date') # Ensure that we get a date before we delete the property. output, errput = svntest.actions.run_and_verify_svn( "", None, [], 'log', '--xml', '-r1', sbox.wc_dir) matched = 0 for line in output: if date_re.search(line): matched = 1 if not matched: raise svntest.Failure("log contains no date element") # Set the svn:date revprop to the empty string on revision 1. svntest.actions.run_and_verify_svn("", None, [], 'pdel', '--revprop', '-r1', 'svn:date', sbox.wc_dir) output, errput = svntest.actions.run_and_verify_svn( "", None, [], 'log', '--xml', '-r1', sbox.wc_dir) for line in output: if date_re.search(line): raise svntest.Failure( "log contains date element when svn:date is empty")
def test_filesize(sbox): "test 'svnlook filesize'" sbox.build() repo_dir = sbox.repo_dir wc_dir = sbox.wc_dir tree_output = run_svnlook('tree', '--full-paths', repo_dir) for line in tree_output: # Drop line endings line = line.rstrip() # Skip directories if line[-1] == '/': continue # Run 'svnlook cat' and measure the size of the output. cat_output = run_svnlook('cat', repo_dir, line) cat_size = len("".join(cat_output)) # Run 'svnlook filesize' and compare the results with the CAT_SIZE. filesize_output = run_svnlook('filesize', repo_dir, line) if len(filesize_output) != 1: raise svntest.Failure( "'svnlook filesize' printed something other than " "a single line of output.") filesize = int(filesize_output[0].strip()) if filesize != cat_size: raise svntest.Failure( "'svnlook filesize' and the counted length of " "'svnlook cat's output differ for the path " "'%s'." % (line))
def test_svnmucc(repo_url, expected_path_changes, *varargs): """Run svnmucc with the list of SVNMUCC_ARGS arguments. Verify that its run results in a new commit with 'svn log -rHEAD' changed paths that match the list of EXPECTED_PATH_CHANGES.""" # First, run svnmucc. exit_code, outlines, errlines = svntest.main.run_svnmucc( '-U', repo_url, *varargs) if errlines: raise svntest.main.SVNCommitFailure(str(errlines)) if len(outlines) != 1 or not _svnmucc_re.match(outlines[0]): raise svntest.main.SVNLineUnequal(str(outlines)) # Now, run 'svn log -vq -rHEAD' changed_paths = [] exit_code, outlines, errlines = \ svntest.main.run_svn(None, 'log', '-vqrHEAD', repo_url) if errlines: raise svntest.Failure("Unable to verify commit with 'svn log': %s" % (str(errlines))) for line in outlines: match = _log_re.match(line) if match: changed_paths.append(match.group(1).rstrip('\n\r')) expected_path_changes.sort() changed_paths.sort() if changed_paths != expected_path_changes: raise svntest.Failure("Logged path changes differ from expectations\n" " expected: %s\n" " actual: %s" % (str(expected_path_changes), str(changed_paths)))
def update_change_modified_external(sbox): "update changes to a modified external module" externals_test_setup(sbox) wc_dir = sbox.wc_dir other_wc_dir = sbox.add_wc_path('other') repo_dir = sbox.repo_dir repo_url = sbox.repo_url other_repo_url = repo_url + ".other" # Checkout two working copies. svntest.actions.run_and_verify_svn("", None, [], 'checkout', '--username', svntest.main.wc_author, '--password', svntest.main.wc_passwd, repo_url, wc_dir) svntest.actions.run_and_verify_svn("", None, [], 'checkout', '--username', svntest.main.wc_author, '--password', svntest.main.wc_passwd, repo_url, other_wc_dir) # Make a couple of mods in the "x/y/z/blah/" external. alpha_path = os.path.join(other_wc_dir, "A", "D", "x", "y", "z", "blah", "alpha") svntest.main.file_append(alpha_path, "Some new text in alpha.\n") new_file = os.path.join(other_wc_dir, "A", "D", "x", "y", "z", "blah", "fish.txt") svntest.main.file_append(new_file, "This is an unversioned file.\n") # Change the "x/y/z/blah" external on A/D to point to a different # URL. There are some local mods under the old checked-out external, # so the old dir should be saved under a new name. new_externals_desc = \ "exdir_A " + other_repo_url + "/A" + \ "\n" + \ "exdir_A/G " + other_repo_url + "/A/D/G" + \ "\n" + \ "exdir_A/H -r 1 " + other_repo_url + "/A/D/H" + \ "\n" + \ "x/y/z/blah " + other_repo_url + "/A/B/F" + \ "\n" # Set and commit the property change_external(os.path.join(wc_dir, "A/D"), new_externals_desc) # Update other working copy, see if get the right change. svntest.actions.run_and_verify_svn("", None, [], 'up', other_wc_dir) xyzb_path = os.path.join(other_wc_dir, "x", "y", "z", "blah") alpha_path = os.path.join(xyzb_path, "alpha") if (os.path.exists(alpha_path)): raise svntest.Failure(alpha_path + " unexpectedly still exists.") beta_path = os.path.join(xyzb_path, "beta") if (os.path.exists(beta_path)): raise svntest.Failure(beta_path + " unexpectedly still exists.")
def refresh_read_only_attribute(sbox): "refresh the WC file system read-only attribute " sbox.build() wc_dir = sbox.wc_dir # Create a branch. url = svntest.main.current_repo_url + '/A' branch_url = svntest.main.current_repo_url + '/A-branch' svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 2.\n'], [], 'cp', '-m', 'svn:needs-lock not set', url, branch_url) # Set the svn:needs-lock property on a file from the "trunk". A_path = os.path.join(wc_dir, 'A') mu_path = os.path.join(A_path, 'mu') svntest.actions.run_and_verify_svn(None, None, [], 'ps', 'svn:needs-lock', '1', mu_path) # Commit the propset of svn:needs-lock. expected_output = svntest.wc.State(wc_dir, { 'A/mu' : Item(verb='Sending'), }) expected_status = svntest.actions.get_virginal_state(wc_dir, 3) expected_status.tweak(wc_rev=1) expected_status.tweak('A/mu', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, None, None, None, None, None, mu_path) # The file on which svn:needs-lock was set is now expected to be read-only. if os.access(mu_path, os.W_OK): raise svntest.Failure("'%s' expected to be read-only after having had " "its svn:needs-lock property set" % mu_path) # Switch to the branch with the WC state from before the propset of # svn:needs-lock. expected_output = svntest.wc.State(wc_dir, { 'A/mu' : Item(status=' U'), }) expected_disk = svntest.main.greek_state.copy() expected_status = svntest.actions.get_virginal_state(wc_dir, 3) expected_status.tweak('', wc_rev=1) expected_status.tweak('iota', wc_rev=1) expected_status.tweak('A', switched='S') svntest.actions.run_and_verify_switch(wc_dir, A_path, branch_url, expected_output, expected_disk, expected_status) # The file with we set svn:needs-lock on should now be writable, but # is still read-only! if not os.access(mu_path, os.W_OK): raise svntest.Failure("'%s' expected to be writable after being switched " "to a branch on which its svn:needs-lock property " "is not set" % mu_path)
def verify_changelist_output(output, expected_adds=None, expected_removals=None, expected_skips=None): """Compare lines of OUTPUT from 'svn changelist' against EXPECTED_ADDS (a dictionary mapping paths to changelist names), EXPECTED_REMOVALS (a dictionary mapping paths to ... whatever), and EXPECTED_SKIPS (a dictionary mapping paths to ... whatever). EXPECTED_SKIPS is ignored if None.""" num_expected = 0 if expected_adds: num_expected += len(expected_adds) if expected_removals: num_expected += len(expected_removals) if expected_skips: num_expected += len(expected_skips) if not expected_skips: output = [line for line in output if (not _re_cl_skip.match(line))] for line in output: line = line.rstrip() match = _re_cl_rem.match(line) if match \ and expected_removals \ and match.group(2) in expected_removals: continue elif match: raise svntest.Failure("Unexpected changelist removal line: " + line) match = _re_cl_add.match(line) if match \ and expected_adds \ and expected_adds.get(match.group(2)) == match.group(1): continue elif match: raise svntest.Failure("Unexpected changelist add line: " + line) match = _re_cl_skip.match(line) if match \ and expected_skips \ and match.group(2) in expected_skips: continue elif match: raise svntest.Failure("Unexpected changelist skip line: " + line) raise svntest.Failure("Unexpected line: " + line) if len(output) != num_expected: raise svntest.Failure("Unexpected number of 'svn changelist' output " + "lines (%d vs %d)" % (len(output), num_expected))
def update_change_pristine_external(sbox): "update change to an unmodified external module" externals_test_setup(sbox) wc_dir = sbox.wc_dir other_wc_dir = sbox.add_wc_path('other') repo_dir = sbox.repo_dir repo_url = sbox.repo_url other_repo_url = repo_url + ".other" # Checkout two working copies. svntest.actions.run_and_verify_svn("", None, [], 'checkout', '--username', svntest.main.wc_author, '--password', svntest.main.wc_passwd, repo_url, wc_dir) svntest.actions.run_and_verify_svn("", None, [], 'checkout', '--username', svntest.main.wc_author, '--password', svntest.main.wc_passwd, repo_url, other_wc_dir) # Change the "x/y/z/blah" external on A/D to point to a different # URL. Since no changes were made to the old checked-out external, # we should get a clean replace. new_externals_desc = \ "exdir_A " + other_repo_url + "/A" + \ "\n" + \ "exdir_A/G " + other_repo_url + "/A/D/G" + \ "\n" + \ "exdir_A/H -r 1 " + other_repo_url + "/A/D/H" + \ "\n" + \ "x/y/z/blah " + other_repo_url + "/A/B/F" + \ "\n" # Set and commit the property change_external(os.path.join(wc_dir, "A/D"), new_externals_desc) # Update other working copy, see if get the right change. svntest.actions.run_and_verify_svn("", None, [], 'up', other_wc_dir) xyzb_path = os.path.join(other_wc_dir, "x", "y", "z", "blah") alpha_path = os.path.join(xyzb_path, "alpha") if (os.path.exists(alpha_path)): raise svntest.Failure(alpha_path + " unexpectedly still exists.") beta_path = os.path.join(xyzb_path, "beta") if (os.path.exists(beta_path)): raise svntest.Failure(beta_path + " unexpectedly still exists.")
def verify_pget_output(output, expected_props): """Compare lines of OUTPUT from 'svn propget' against EXPECTED_PROPS (a dictionary mapping paths to property values).""" _re_pget = re.compile('^(.*) - (.*)$') actual_props = {} for line in output: try: path, prop = line.rstrip().split(' - ') except: raise svntest.Failure("Unexpected output line: " + line) actual_props[path] = prop if expected_props != actual_props: raise svntest.Failure("Got unexpected property results")
def probe_paths_exist(paths): """ Probe each one of PATHS to see if it exists, otherwise throw a Failure exception. """ for path in paths: if not os.path.exists(path): raise svntest.Failure("Probing for " + path + " failed.")
def multiple_url_targets(): "multiple url targets" # First, make a new revision of iota. iota = os.path.join(sbox.wc_dir, 'iota') iota_url = sbox.repo_url + '/iota' non_existent = sbox.repo_url + '/non-existent' svntest.main.file_append(iota, "New contents for iota\n") svntest.main.run_svn(None, 'ci', '-m', '', iota) expected_output = [ " 1 jrandom This is the file 'iota'.\n", " 2 jrandom New contents for iota\n", ] expected_err = ".*(W160017|W160013).*\n.*E200009.*" expected_err_re = re.compile(expected_err, re.DOTALL) exit_code, output, error = svntest.main.run_svn( 1, 'blame', non_existent, iota_url) # Verify error if not expected_err_re.match("".join(error)): raise svntest.Failure( 'blame failed: expected error "%s", but received ' '"%s"' % (expected_err, "".join(error)))
def probe_paths_missing(paths): """ Probe each one of PATHS to see if does not exist, otherwise throw a Failure exception. """ for path in paths: if os.path.exists(path): raise svntest.Failure(path + " unexpectedly still exists.")
def backport_otherproject_change(sbox): "inoperative revision" # r6: a change outside ^/subversion sbox.simple_mkdir('elsewhere') sbox.simple_commit() # r7: Nominate r6 by mistake approved_entries = [make_entry([6])] sbox.simple_append(STATUS, serialize_STATUS(approved_entries)) sbox.simple_commit(message='Nominate r6 by mistake') # Run it. exit_code, output, errput = run_backport(sbox, error_expected=True) # Verify no commit occurred. svntest.actions.run_and_verify_svnlook(["7\n"], [], 'youngest', sbox.repo_dir) # Verify the failure mode. expected_stdout = None expected_stderr = ".*only svn:mergeinfo changes.*" if exit_code == 0: # Can't use verify_exit_code() since the exact code used varies. raise svntest.Failure("exit_code should be non-zero") svntest.verify.verify_outputs(None, output, errput, expected_stdout, expected_stderr)
def export_with_url_unsafe_characters(sbox): "export file with URL unsafe characters" ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3683 ## sbox.build() wc_dir = sbox.wc_dir # Define the paths url_unsafe_path = os.path.join(wc_dir, 'A', 'test- @#$&.txt') url_unsafe_path_url = sbox.repo_url + '/A/test- @#$&.txt@' export_target = os.path.join(wc_dir, 'test- @#$&.txt') # Create the file with special name and commit it. svntest.main.file_write(url_unsafe_path, 'This is URL unsafe path file.') svntest.main.run_svn(None, 'add', url_unsafe_path + '@') svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', wc_dir) # Export the file and verify it. svntest.actions.run_and_verify_svn(None, [], 'export', url_unsafe_path_url, export_target + '@') if not os.path.exists(export_target): raise svntest.Failure("export did not fetch file with URL unsafe path")
def checkout_with_externals(sbox): "test checkouts with externals" externals_test_setup(sbox) wc_dir = sbox.wc_dir repo_url = sbox.repo_url # Create a working copy. svntest.actions.run_and_verify_svn(None, None, [], 'checkout', repo_url, wc_dir) # Probe the working copy a bit, see if it's as expected. expected_existing_paths = [ os.path.join(wc_dir, "A", "C", "exdir_G"), os.path.join(wc_dir, "A", "C", "exdir_G", "pi"), os.path.join(wc_dir, "A", "C", "exdir_H"), os.path.join(wc_dir, "A", "C", "exdir_H", "omega"), os.path.join(wc_dir, "A", "D", "x"), os.path.join(wc_dir, "A", "D", "x", "y"), os.path.join(wc_dir, "A", "D", "x", "y", "z"), os.path.join(wc_dir, "A", "D", "x", "y", "z", "blah"), os.path.join(wc_dir, "A", "D", "x", "y", "z", "blah", "E", "alpha"), os.path.join(wc_dir, "A", "D", "x", "y", "z", "blah", "E", "beta"), ] probe_paths_exist(expected_existing_paths) # Pick a file at random, make sure it has the expected contents. exdir_H_omega_path = os.path.join(wc_dir, "A", "C", "exdir_H", "omega") fp = open(exdir_H_omega_path, 'r') lines = fp.readlines() if not ((len(lines) == 1) and (lines[0] == "This is the file 'omega'.\n")): raise svntest.Failure("Unexpected contents for rev 1 of " + exdir_H_omega_path)
def blame_directory(sbox): "annotating a directory not allowed" # Issue 2154 - blame on directory fails without error message import re # Setup sbox.build() wc_dir = sbox.wc_dir dir = os.path.join(wc_dir, 'A') # Run blame against directory 'A'. The repository error will # probably include a leading slash on the path, but we'll tolerate # it either way, since either way it would still be a clean error. expected_error = ".*'[/]{0,1}A' is not a file" outlines, errlines = svntest.main.run_svn(1, 'blame', dir) # Verify expected error message is output for line in errlines: if re.match (expected_error, line): break else: raise svntest.Failure ('Failed to find %s in %s' % (expected_error, str(errlines)))
def new_style_externals(sbox): "check the new '-rN URL PATH' syntax" external_url_for = externals_test_setup(sbox) wc_dir = sbox.wc_dir repo_url = sbox.repo_url # Checkout a working copy. svntest.actions.run_and_verify_svn(None, None, [], 'checkout', repo_url, wc_dir) # Set an external property using the new '-rN URL PATH' syntax. new_externals_desc = \ external_url_for["A/C/exdir_G"] + " exdir_G" + \ "\n" + \ "-r 1 " + external_url_for["A/C/exdir_H"] + " exdir_H" + \ "\n" + \ "-r1 " + external_url_for["A/C/exdir_H"] + " exdir_I" + \ "\n" # Set and commit the property. change_external(os.path.join(wc_dir, "A/C"), new_externals_desc) # Update other working copy. svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) for dir_name in ["exdir_H", "exdir_I"]: exdir_X_omega_path = os.path.join(wc_dir, "A", "C", dir_name, "omega") fp = open(exdir_X_omega_path, 'r') lines = fp.readlines() if not ((len(lines) == 1) and (lines[0] == "This is the file 'omega'.\n")): raise svntest.Failure("Unexpected contents for rev 1 of " + exdir_X_omega_path)
def build(self, name=None, create_wc=True, read_only=False): self._set_name(name, read_only) if svntest.actions.make_repo_and_wc(self, create_wc, read_only): raise svntest.Failure( "Could not build repository and sandbox '%s'" % self.name) else: self._is_built = True
def info_with_changelists(sbox): "info --changelist" sbox.build() wc_dir = sbox.wc_dir # Add files to changelists based on the last character in their names. changelist_all_files(wc_dir, clname_from_lastchar_cb) # Now, test various combinations of changelist specification and depths. for clname in [['a'], ['i'], ['a', 'i']]: for depth in [None, 'files', 'infinity']: # Figure out what we expect to see in our info output. expected_paths = [] if 'a' in clname: if depth == 'infinity': expected_paths.append('A/B/lambda') expected_paths.append('A/B/E/alpha') expected_paths.append('A/B/E/beta') expected_paths.append('A/D/gamma') expected_paths.append('A/D/H/omega') if depth == 'files' or depth == 'infinity': expected_paths.append('iota') if 'i' in clname: if depth == 'infinity': expected_paths.append('A/D/G/pi') expected_paths.append('A/D/H/chi') expected_paths.append('A/D/H/psi') expected_paths = sorted([ os.path.join(wc_dir, x.replace('/', os.sep)) for x in expected_paths ]) # Build the command line. args = ['info', wc_dir] for cl in clname: args.append('--changelist') args.append(cl) if depth: args.append('--depth') args.append(depth) # Run 'svn info ...' exit_code, output, errput = svntest.main.run_svn(None, *args) # Filter the output for lines that begin with 'Path:', and # reduce even those lines to just the actual path. def startswith_path(line): return line[:6] == 'Path: ' and 1 or 0 paths = [x[6:].rstrip() for x in filter(startswith_path, output)] paths.sort() # And, compare! if (paths != expected_paths): raise svntest.Failure( "Expected paths (%s) and actual paths (%s) " "don't gel" % (str(expected_paths), str(paths)))
def parse_and_verify_blame(output, expected_blame, with_merged=0): "tokenize and validate the output of blame" max_split = 2 keys = ['revision', 'author', 'text'] if with_merged: keys.append('merged') results = [] # Tokenize and parse each line for line_str in output: this_line = {} if with_merged: this_line['merged'] = (line_str[0] == 'G') line_str = line_str[2:] tokens = line_str.split(None, max_split) if tokens[0] == '-': this_line['revision'] = None else: this_line['revision'] = int(tokens[0]) if tokens[1] == '-': this_line['author'] = None else: this_line['author'] = tokens[1] this_line['text'] = tokens[2] results.append(this_line) # Verify the results if len(results) != len(expected_blame): raise svntest.Failure( "expected and actual results not the same length") pairs = list(zip(results, expected_blame)) for num in range(len(pairs)): (item, expected_item) = pairs[num] for key in keys: if item[key] != expected_item[key]: raise svntest.Failure('on line %d, expecting %s "%s", found "%s"' % \ (num+1, key, str(expected_item[key]), str(item[key])))
def last_modified_header(sbox): "verify 'Last-Modified' header on 'external' GETs" sbox.build(create_wc=False, read_only=True) headers = { 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(), } h = svntest.main.create_http_connection(sbox.repo_url) # GET /repos/iota # Expect to see a Last-Modified header. h.request('GET', sbox.repo_url + '/iota', None, headers) r = h.getresponse() if r.status != httplib.OK: raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason)) svntest.verify.compare_and_display_lines(None, 'Last-Modified', svntest.verify.RegexOutput('.+'), r.getheader('Last-Modified')) r.read() # HEAD /repos/iota # Expect to see a Last-Modified header. h.request('HEAD', sbox.repo_url + '/iota', None, headers) r = h.getresponse() if r.status != httplib.OK: raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason)) svntest.verify.compare_and_display_lines(None, 'Last-Modified', svntest.verify.RegexOutput('.+'), r.getheader('Last-Modified')) r.read() # GET /repos/!svn/rvr/1/iota # There should not be a Last-Modified header (it's costly and not useful, # see r1724790) h.request('GET', sbox.repo_url + '/!svn/rvr/1/iota', None, headers) r = h.getresponse() if r.status != httplib.OK: raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason)) last_modified = r.getheader('Last-Modified') if last_modified: raise svntest.Failure('Unexpected Last-Modified header: %s' % last_modified) r.read()
def test_stderr(re_string, stderr): exp_err_re = re.compile(re_string) for line in stderr: if exp_err_re.search(line): return for x in stderr: logger.debug(x[:-1]) logger.info("Expected stderr reg-ex: '" + re_string + "'") raise svntest.Failure("Checkout failed but not in the expected way")
def compare_dump_files(label_expected, label_actual, expected, actual, ignore_uuid=False, expect_content_length_always=False, ignore_empty_prop_sections=False, ignore_number_of_blank_lines=False): """Parse two dump files EXPECTED and ACTUAL, both of which are lists of lines as returned by run_and_verify_dump, and check that the same revisions, nodes, properties, etc. are present in both dumps. """ parsed_expected = DumpParser(expected, not svntest.main.fs_has_sha1()).parse() parsed_actual = DumpParser(actual).parse() if ignore_uuid: parsed_expected['uuid'] = '<ignored>' parsed_actual['uuid'] = '<ignored>' for parsed in [parsed_expected, parsed_actual]: for rev_name, rev_record in parsed.items(): #print "Found %s" % (rev_name,) if b'nodes' in rev_record: #print "Found %s.%s" % (rev_name, 'nodes') for path_name, path_record in rev_record['nodes'].items(): #print "Found %s.%s.%s" % (rev_name, 'nodes', path_name) for action_name, action_record in path_record.items(): #print "Found %s.%s.%s.%s" % (rev_name, 'nodes', path_name, action_name) if expect_content_length_always: if action_record.get('content_length') == None: #print 'Adding: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'content_length=0') action_record['content_length'] = '0' if ignore_empty_prop_sections: if action_record.get('prop_length') == '10': #print 'Removing: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'prop_length') action_record['prop_length'] = None del action_record['props'] old_content_length = int( action_record['content_length']) action_record['content_length'] = str( old_content_length - 10) if ignore_number_of_blank_lines: action_record['blanks'] = 0 if parsed_expected != parsed_actual: print('DIFF of raw dumpfiles (including expected differences)') print('--- ' + (label_expected or 'expected')) print('+++ ' + (label_actual or 'actual')) print(''.join(ndiff(expected, actual))) raise svntest.Failure( 'DIFF of parsed dumpfiles (ignoring expected differences)\n' + '\n'.join( ndiff( pprint.pformat(parsed_expected).splitlines(), pprint.pformat(parsed_actual).splitlines())))
def test_stderr(re_string, stderr): exp_err_re = re.compile(re_string) for line in stderr: if exp_err_re.search(line): return if svntest.main.verbose_mode: for x in stderr: sys.stdout.write(x) print("Expected stderr reg-ex: '" + re_string + "'") raise svntest.Failure("Checkout failed but not in the expected way")
def build(self, name=None, create_wc=True, read_only=False): """Make a 'Greek Tree' repo (or refer to the central one if READ_ONLY), and check out a WC from it (unless CREATE_WC is false). Change the sandbox's name to NAME. See actions.make_repo_and_wc() for details.""" self._set_name(name, read_only) if svntest.actions.make_repo_and_wc(self, create_wc, read_only): raise svntest.Failure( "Could not build repository and sandbox '%s'" % self.name) else: self._is_built = True
def basic_dump(sbox): "dump: standard sbox repos" sbox.build(read_only=True, create_wc=False) out = \ svntest.actions.run_and_verify_svnrdump(None, svntest.verify.AnyOutput, [], 0, '-q', 'dump', sbox.repo_url) if not out[0].startswith('SVN-fs-dump-format-version:'): raise svntest.Failure('No valid output')
def fail_add_directory(sbox): "'svn mkdir' should clean up after itself on error" # This test doesn't use a working copy svntest.main.safe_rmtree(sbox.wc_dir) os.makedirs(sbox.wc_dir) os.chdir(sbox.wc_dir) svntest.actions.run_and_verify_svn('Failed mkdir', None, svntest.verify.AnyOutput, 'mkdir', 'A') if os.path.exists('A'): raise svntest.Failure('svn mkdir created an unversioned directory')
def limit_history(sbox): "history --limit" sbox.build(create_wc=False) repo_url = sbox.repo_url svntest.actions.run_and_verify_svn(None, None, [], 'mv', '-m', 'log msg', repo_url + "/iota", repo_url + "/iota2") svntest.actions.run_and_verify_svn(None, None, [], 'mv', '-m', 'log msg', repo_url + "/A/mu", repo_url + "/iota") history = run_svnlook("history", "--limit=1", sbox.repo_dir) # Ignore the two lines of header, and verify expected number of items. if len(history[2:]) != 1: raise svntest.Failure("Output not limited to expected number of items")
def compare_dump_files(message, label, expected, actual): """Parse two dump files EXPECTED and ACTUAL, both of which are lists of lines as returned by run_and_verify_dump, and check that the same revisions, nodes, properties, etc. are present in both dumps. """ parsed_expected = DumpParser(expected).parse() parsed_actual = DumpParser(actual).parse() if parsed_expected != parsed_actual: raise svntest.Failure('\n' + '\n'.join(ndiff( pprint.pformat(parsed_expected).splitlines(), pprint.pformat(parsed_actual).splitlines())))
def propfind_multiple_props(sbox): "verify multi-prop PROPFIND response" sbox.build(create_wc=False, read_only=True) repo_uripath = '/' + svntest.wc.svn_uri_quote( svntest.main.pristine_greek_repos_dir.replace(os.path.sep, '/')) h = svntest.main.create_http_connection(sbox.repo_url) # PROPFIND /repos/!svn/rvr/1/iota, Depth = 0 headers = { 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(), 'Depth': '0', } req_body = ('<?xml version="1.0" encoding="utf-8" ?>\n' '<D:propfind xmlns:D="DAV:">\n' '<D:prop xmlns:S="http://subversion.tigris.org/xmlns/dav/">\n' '<D:resourcetype/>\n' '<S:md5-checksum/>\n' '</D:prop>\n' '</D:propfind>\n') h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1/iota', req_body, headers) r = h.getresponse() if r.status != httplib.MULTI_STATUS: raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason)) expected_response = ( '<?xml version="1.0" encoding="utf-8"?>\n' '<D:multistatus xmlns:D="DAV:" ' 'xmlns:ns1="http://subversion.tigris.org/xmlns/dav/" ' 'xmlns:ns0="DAV:">\n' '<D:response xmlns:lp1="DAV:" ' 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n' '<D:href>' + repo_uripath + '/!svn/rvr/1/iota</D:href>\n' '<D:propstat>\n' '<D:prop>\n' '<lp1:resourcetype/>\n' '<lp2:md5-checksum>' '2d18c5e57e84c5b8a5e9a6e13fa394dc' '</lp2:md5-checksum>\n' '</D:prop>\n' '<D:status>HTTP/1.1 200 OK</D:status>\n' '</D:propstat>\n' '</D:response>\n' '</D:multistatus>\n') actual_response = r.read() verify_xml_response(expected_response, actual_response)
def backport_STATUS_mods(sbox): "local mods to STATUS" # Introduce a local mod. sbox.simple_append(STATUS, "\n") exit_code, output, errput = run_backport(sbox, error_expected=True) expected_stdout = None expected_stderr = ".*Local mods.*STATUS.*" if exit_code == 0: # Can't use verify_exit_code() since the exact code used varies. raise svntest.Failure("exit_code should be non-zero") svntest.verify.verify_outputs(None, output, errput, expected_stdout, expected_stderr)