def do_plugin_test(self, type, plugin_fields, expected_content, *, fetch_dir=None): fetch_dir = fetch_dir or shared.create_dir() output = test_plugin_fetch( self.plugin_context, type, plugin_fields, fetch_dir) assert_contents(fetch_dir, expected_content) return output
def test_executable(self): exe = yield from rule.make_files_executable(self.cache, self.content_tree, ["b/*"]) new_content_dir = shared.create_dir() yield from self.cache.export_tree(exe, new_content_dir) shared.assert_contents(new_content_dir, self.content) shared.assert_not_executable(os.path.join(new_content_dir, "a")) shared.assert_executable(os.path.join(new_content_dir, "b/c"))
def do_excludes_and_files_test(self, excludes, files, expected): tree = self.cache.import_tree(self.content_dir, excludes=excludes, files=files) out_dir = shared.create_dir() self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected)
async def do_excludes_and_files_test(self, excludes, picks, expected): tree = await self.cache.import_tree(self.content_dir, excludes=excludes, picks=picks) out_dir = create_dir() await self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected)
def do_integration_test(self, args, expected, *, cwd=None, **peru_cmd_kwargs): if not cwd: cwd = self.test_dir run_peru_command(args, cwd, **peru_cmd_kwargs) assert_contents(self.test_dir, expected, excludes=[DEFAULT_PERU_FILE_NAME, '.peru'])
def test_import_with_gitignore(self): # Make sure our git imports don't get confused by .gitignore files. new_content = {"fee/fi": "fo fum", ".gitignore": "fee/"} new_tree = self.cache.import_tree(shared.create_dir(new_content)) export_dir = shared.create_dir() self.cache.export_tree(new_tree, export_dir) assert_contents(export_dir, new_content)
async def test_import_with_gitignore(self): # Make sure our git imports don't get confused by .gitignore files. new_content = {'fee/fi': 'fo fum', '.gitignore': 'fee/'} new_tree = await self.cache.import_tree(create_dir(new_content)) export_dir = create_dir() await self.cache.export_tree(new_tree, export_dir) assert_contents(export_dir, new_content)
def test_import_with_gitignore(self): # Make sure our git imports don't get confused by .gitignore files. new_content = {'fee/fi': 'fo fum', '.gitignore': 'fee/'} new_tree = yield from self.cache.import_tree(create_dir(new_content)) export_dir = create_dir() yield from self.cache.export_tree(new_tree, export_dir) assert_contents(export_dir, new_content)
def test_reup_all(self): yaml_with_imports = dedent('''\ imports: foo: ./ bar: ./ git module foo: url: {} rev: {} git module bar: url: {} reup: otherbranch ''').format(self.foo_dir, self.foo_master, self.bar_dir) test_dir = shared.create_dir({'peru.yaml': yaml_with_imports}) expected = dedent('''\ imports: foo: ./ bar: ./ git module foo: url: {} rev: {} git module bar: url: {} reup: otherbranch rev: {} ''').format(self.foo_dir, self.foo_master, self.bar_dir, self.bar_otherbranch) run_peru_command(['reup'], test_dir) # This time we finally pull in barfile. assert_contents(test_dir, {'peru.yaml': expected, 'a': 'b', 'barfile': 'new'}, excludes=['.peru'])
async def test_executable(self): exe = await rule.make_files_executable(self.cache, self.content_tree, ['b/*']) new_content_dir = shared.create_dir() await self.cache.export_tree(exe, new_content_dir) shared.assert_contents(new_content_dir, self.content) shared.assert_not_executable(os.path.join(new_content_dir, 'a')) shared.assert_executable(os.path.join(new_content_dir, 'b/c'))
def test_import_with_files(self): all_content = {'foo': '', 'bar': '', 'baz/bing': ''} test_dir = shared.create_dir(all_content) tree = self.cache.import_tree(test_dir, ['foo', 'baz']) expected_content = {'foo': '', 'baz/bing': ''} out_dir = shared.create_dir() self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected_content)
def test_executable(self): exe = rule.make_files_executable(self.cache, self.content_tree, ['b/*']) new_content_dir = shared.create_dir() self.cache.export_tree(exe, new_content_dir) shared.assert_contents(new_content_dir, self.content) shared.assert_not_executable(os.path.join(new_content_dir, 'a')) shared.assert_executable(os.path.join(new_content_dir, 'b/c'))
def do_plugin_test(self, type, plugin_fields, expected_content, *, hide_stderr=False): fetch_dir = shared.create_dir() output = plugin_fetch( self.plugin_context, type, plugin_fields, fetch_dir, capture_output=True, stderr_to_stdout=hide_stderr) assert_contents(fetch_dir, expected_content) return output
def test_import_with_files(self): all_content = {'foo': '', 'bar': '', 'baz/bing': ''} test_dir = create_dir(all_content) tree = yield from self.cache.import_tree(test_dir, picks=['foo', 'baz']) expected_content = {'foo': '', 'baz/bing': ''} out_dir = create_dir() yield from self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected_content)
def test_merge_from_map(self): imports = {'foo': ('path1',), 'bar': ('path2',)} target_trees = {'foo': self.content_tree, 'bar': self.content_tree} merged_tree = merge_imports_tree(self.cache, imports, target_trees) merged_dir = create_dir() self.cache.export_tree(merged_tree, merged_dir) expected_content = {'path1/a': 'a', 'path2/a': 'a'} assert_contents(merged_dir, expected_content)
def test_unpack_windows_zip(self): '''This zip was packed on Windows, so it doesn't include any file permissions. This checks that our executable-flag-restoring code doesn't barf when the flag isn't there.''' test_dir = shared.create_dir() archive = shared.test_resources / 'from_windows.zip' curl_plugin.extract_zip(str(archive), test_dir) shared.assert_contents(test_dir, {'windows_test/test.txt': 'Notepad!'}) txt_file = join(test_dir, 'windows_test/test.txt') shared.assert_not_executable(txt_file)
async def test_merge_with_deep_prefix(self): '''This test was inspired by a bug on Windows where we would give git a backslash-separated merge prefix, even though git demands forward slash as a path separator.''' content = {'file': 'stuff'} content_dir = create_dir(content) tree = await self.cache.import_tree(content_dir) prefixed_tree = await self.cache.merge_trees(None, tree, 'a/b/') export_dir = create_dir() await self.cache.export_tree(prefixed_tree, export_dir) assert_contents(export_dir, {'a/b/file': 'stuff'})
async def test_merge_from_map(self): imports = {'foo': ('path1', ), 'bar': ('path2', )} target_trees = {'foo': self.content_tree, 'bar': self.content_tree} merged_tree = await merge_imports_tree(self.cache, imports, target_trees) merged_dir = create_dir() await self.cache.export_tree(merged_tree, merged_dir) expected_content = {'path1/a': 'a', 'path2/a': 'a'} assert_contents(merged_dir, expected_content)
def test_merge_with_deep_prefix(self): '''This test was inspired by a bug on Windows where we would give git a backslash-separated merge prefix, even though git demands forward slash as a path separator.''' content = {'file': 'stuff'} content_dir = create_dir(content) tree = yield from self.cache.import_tree(content_dir) prefixed_tree = yield from self.cache.merge_trees(None, tree, 'a/b/') export_dir = create_dir() yield from self.cache.export_tree(prefixed_tree, export_dir) assert_contents(export_dir, {'a/b/file': 'stuff'})
async def test_export_force_with_preexisting_files(self): # Create a working tree with a conflicting file. dirty_content = {'a': 'junk'} export_dir = create_dir(dirty_content) # Export should fail by default. with self.assertRaises(peru.cache.DirtyWorkingCopyError): await self.cache.export_tree(self.content_tree, export_dir) assert_contents(export_dir, dirty_content) # But it should suceed with the force flag. await self.cache.export_tree(self.content_tree, export_dir, force=True) assert_contents(export_dir, self.content)
def test_import_with_files(self): all_content = {'foo': '', 'bar': '', 'baz/bing': ''} test_dir = create_dir(all_content) tree = self.cache.import_tree(test_dir, picks=['foo', 'baz']) expected_content = {'foo': '', 'baz/bing': ''} out_dir = create_dir() self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected_content)
def test_missing_files_in_previous_tree(self): '''Export should allow missing files, and it should recreate them.''' export_dir = create_dir() # Nothing in content_tree exists yet, so this export should be the same # as if previous_tree wasn't specified. yield from self.cache.export_tree( self.content_tree, export_dir, previous_tree=self.content_tree) assert_contents(export_dir, self.content) # Make sure the same applies with just a single missing file. os.remove(os.path.join(export_dir, 'a')) yield from self.cache.export_tree( self.content_tree, export_dir, previous_tree=self.content_tree) assert_contents(export_dir, self.content)
def test_export_force_with_changed_files(self): export_dir = create_dir() self.cache.export_tree(self.content_tree, export_dir) # If we dirty a file, a resync should fail. with open(os.path.join(export_dir, 'a'), 'w') as f: f.write('dirty') with self.assertRaises(peru.cache.DirtyWorkingCopyError): self.cache.export_tree(self.content_tree, export_dir, previous_tree=self.content_tree) # But it should succeed with the --force flag. self.cache.export_tree(self.content_tree, export_dir, force=True, previous_tree=self.content_tree) assert_contents(export_dir, self.content)
def test_merge_trees(self): merged_tree = self.cache.merge_trees(self.content_tree, self.content_tree, "subdir") expected_content = dict(self.content) for path, content in self.content.items(): expected_content[os.path.join("subdir", path)] = content export_dir = shared.create_dir() self.cache.export_tree(merged_tree, export_dir) assert_contents(export_dir, expected_content) with self.assertRaises(peru.cache.MergeConflictError): # subdir/ is already populated, so this merge should throw. self.cache.merge_trees(merged_tree, self.content_tree, "subdir")
def test_single_reup(self): expected = dedent('''\ git module foo: url: {} rev: {} git module bar: url: {} reup: otherbranch ''').format(self.foo_dir, self.foo_master, self.bar_dir) run_peru_command(['reup', 'foo'], self.test_dir) assert_contents(self.test_dir, {'peru.yaml': expected}, excludes=['.peru'])
def test_missing_files_in_previous_tree(self): '''Export should allow missing files, and it should recreate them.''' export_dir = create_dir() # Nothing in content_tree exists yet, so this export should be the same # as if previous_tree wasn't specified. self.cache.export_tree(self.content_tree, export_dir, previous_tree=self.content_tree) assert_contents(export_dir, self.content) # Make sure the same applies with just a single missing file. os.remove(os.path.join(export_dir, 'a')) self.cache.export_tree(self.content_tree, export_dir, previous_tree=self.content_tree) assert_contents(export_dir, self.content)
def test_merge_trees(self): merged_tree = yield from self.cache.merge_trees( self.content_tree, self.content_tree, 'subdir') expected_content = dict(self.content) for path, content in self.content.items(): expected_content[os.path.join('subdir', path)] = content export_dir = create_dir() yield from self.cache.export_tree(merged_tree, export_dir) assert_contents(export_dir, expected_content) with self.assertRaises(peru.cache.MergeConflictError): # subdir/ is already populated, so this merge should throw. yield from self.cache.merge_trees( merged_tree, self.content_tree, 'subdir')
def test_merge_from_multimap(self): # This represents a list of key-value pairs in YAML, for example: # imports: # foo: # - path1 # - path2 imports = {'foo': ('path1', 'path2')} target_trees = {'foo': self.content_tree} merged_tree = merge_imports_tree(self.cache, imports, target_trees) merged_dir = create_dir() self.cache.export_tree(merged_tree, merged_dir) expected_content = {'path1/a': 'a', 'path2/a': 'a'} assert_contents(merged_dir, expected_content)
def test_merge_from_list(self): # This represents a list of key-value pairs in YAML, for example: # imports: # - foo: path1 # - foo: path2 imports_list = [{'foo': 'path1'}, {'foo': 'path2'}] imports = build_imports(imports_list) target_trees = {'foo': self.content_tree} merged_tree = merge_imports_tree(self.cache, imports, target_trees) merged_dir = create_dir() self.cache.export_tree(merged_tree, merged_dir) expected_content = {'path1/a': 'a', 'path2/a': 'a'} assert_contents(merged_dir, expected_content)
async def test_merge_from_multimap(self): # This represents a list of key-value pairs in YAML, for example: # imports: # foo: # - path1 # - path2 imports = {'foo': ('path1', 'path2')} target_trees = {'foo': self.content_tree} merged_tree = await merge_imports_tree(self.cache, imports, target_trees) merged_dir = create_dir() await self.cache.export_tree(merged_tree, merged_dir) expected_content = {'path1/a': 'a', 'path2/a': 'a'} assert_contents(merged_dir, expected_content)
def test_sync_from_subdir(self): peru_yaml = dedent('''\ # Use a relative module path, to make sure it gets resolved # relative to the project root and not the dir where peru was # called. cp module relative_foo: path: {} imports: relative_foo: subdir '''.format(os.path.relpath(self.module_dir, start=self.test_dir))) shared.write_files(self.test_dir, {'peru.yaml': peru_yaml}) subdir = os.path.join(self.test_dir, 'a', 'b') peru.compat.makedirs(subdir) run_peru_command(['sync'], subdir) self.assertTrue(os.path.isdir(os.path.join(self.test_dir, '.peru')), msg=".peru dir didn't end up in the right place") assert_contents(os.path.join(self.test_dir, 'subdir'), {'foo': 'bar'})
def test_assert_contents(self): content = {'foo': 'a', 'bar/baz': 'b'} test_dir = shared.create_dir(content) shared.assert_contents(test_dir, content) shared.write_files(test_dir, {'bing': 'c'}) with self.assertRaises(AssertionError): shared.assert_contents(test_dir, content) shared.assert_contents(test_dir, content, excludes=['bing']) try: shared.assert_contents(test_dir, content, excludes=['foo']) except AssertionError as e: assert e.args[0].startswith('EXPECTED FILES WERE EXCLUDED')
async def test_git_attributes(self): # Setting the 'text' attribute when files contain Windows-style # newlines makes them appear dirty, which leads to errors where the # cache thinks its own checked out files are dirty. (I don't honestly # understand all the details.) The cache's git calls will read # .gitattributes in the sync dir, so we need to set our own attributes # in the $GIT_DIR to override. Everything in this test has to be done # in binary mode or it will all get muddled up when we actually run it # on Windows. windows_content = {'file': b'windows newline\r\n'} gitattributes_content = {'.gitattributes': b'* text'} both_content = windows_content.copy() both_content.update(gitattributes_content) windows_dir = create_dir(windows_content) tree = await self.cache.import_tree(windows_dir) out_dir = create_dir(gitattributes_content) # This export fails without the fix mentioned above. await self.cache.export_tree(tree, out_dir) assert_contents(out_dir, both_content, binary=True)
def test_sync_from_subdir(self): module_dir = shared.create_dir({'foo': 'bar'}) self.write_yaml( '''\ # Use a relative module path, to make sure it gets resolved # relative to the project root and not the dir where peru was # called. cp module relative_foo: path: {} imports: relative_foo: subdir ''', os.path.relpath(module_dir, start=self.test_dir)) subdir = os.path.join(self.test_dir, 'a', 'b') peru.compat.makedirs(subdir) run_peru_command(['sync'], subdir) self.assertTrue(os.path.isdir(os.path.join(self.test_dir, '.peru')), msg=".peru dir didn't end up in the right place") assert_contents(os.path.join(self.test_dir, 'subdir'), {'foo': 'bar'})
def test_assert_contents(self): content = {"foo": "a", "bar/baz": "b"} test_dir = shared.create_dir(content) shared.assert_contents(test_dir, content) shared.write_files(test_dir, {"bing": "c"}) with self.assertRaises(AssertionError): shared.assert_contents(test_dir, content) shared.assert_contents(test_dir, content, excludes=["bing"]) try: shared.assert_contents(test_dir, content, excludes=["foo"]) except AssertionError as e: assert e.args[0].startswith("EXPECTED FILES WERE EXCLUDED")
def test_modify_tree(self): base_dir = create_dir({'a': 'foo', 'b/c': 'bar'}) base_tree = yield from self.cache.import_tree(base_dir) entries = yield from self.cache.ls_tree(base_tree, recursive=True) cases = [] # Test regular deletions. cases.append(({'a': None}, {'b/c': 'bar'})) cases.append(({'a//./': None}, # Paths should get normalized. {'b/c': 'bar'})) cases.append(({'b': None}, {'a': 'foo'})) cases.append(({'b/c': None}, {'a': 'foo'})) cases.append(({'x/y/z': None}, {'a': 'foo', 'b/c': 'bar'})) cases.append(({'b/x': None}, {'a': 'foo', 'b/c': 'bar'})) # Test the case where we try to delete below a file. cases.append(({'a/x': None}, {'a': 'foo', 'b/c': 'bar'})) # Test insertions. cases.append(({'b': entries['a']}, {'a': 'foo', 'b': 'foo'})) cases.append(({'x': entries['a']}, {'a': 'foo', 'x': 'foo', 'b/c': 'bar'})) cases.append(({'x': entries['b']}, {'a': 'foo', 'b/c': 'bar', 'x/c': 'bar'})) cases.append(({'d/e/f': entries['a']}, {'a': 'foo', 'b/c': 'bar', 'd/e/f': 'foo'})) cases.append(({'d/e/f': entries['b']}, {'a': 'foo', 'b/c': 'bar', 'd/e/f/c': 'bar'})) for modifications, result in cases: modified_tree = yield from self.cache.modify_tree( base_tree, modifications) modified_dir = create_dir() yield from self.cache.export_tree(modified_tree, modified_dir) error_msg = ('modify_tree failed to give result {} ' 'for modifications {}'.format( repr(result), repr(modifications))) assert_contents(modified_dir, result, message=error_msg)
async def test_import_with_files(self): # Include a leading colon, to check that we escape pathspecs correctly # with a leading ./ all_content = {'foo': '', 'bar': '', COLON + 'baz/bing': ''} test_dir = create_dir(all_content) tree = await self.cache.import_tree( test_dir, picks=['foo', COLON + 'baz']) expected_content = {'foo': '', COLON + 'baz/bing': ''} out_dir = create_dir() await self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected_content) # Repeat the same test with an exclude, again with a colon. tree = await self.cache.import_tree( test_dir, excludes=['foo', COLON + 'baz']) expected_content = {'bar': ''} out_dir = create_dir() await self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected_content)
async def test_import_with_files(self): # Include a leading colon, to check that we escape pathspecs correctly # with a leading ./ all_content = {'foo': '', 'bar': '', COLON + 'baz/bing': ''} test_dir = create_dir(all_content) tree = await self.cache.import_tree(test_dir, picks=['foo', COLON + 'baz']) expected_content = {'foo': '', COLON + 'baz/bing': ''} out_dir = create_dir() await self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected_content) # Repeat the same test with an exclude, again with a colon. tree = await self.cache.import_tree(test_dir, excludes=['foo', COLON + 'baz']) expected_content = {'bar': ''} out_dir = create_dir() await self.cache.export_tree(tree, out_dir) assert_contents(out_dir, expected_content)
def test_git_attributes(self): # Setting the 'text' attribute when files contain Windows-style # newlines makes them appear dirty, which leads to errors where the # cache thinks its own checked out files are dirty. (I don't honestly # understand all the details.) The cache's git calls will read # .gitattributes in the sync dir, so we need to set our own attributes # in the $GIT_DIR to override. Everything in this test has to be done # in binary mode or it will all get muddled up when we actually run it # on Windows. windows_content = {'file': b'windows newline\r\n'} gitattributes_content = {'.gitattributes': b'* text'} both_content = windows_content.copy() both_content.update(gitattributes_content) windows_dir = create_dir(windows_content) tree = yield from self.cache.import_tree(windows_dir) out_dir = create_dir(gitattributes_content) # This export fails without the fix mentioned above. yield from self.cache.export_tree(tree, out_dir) assert_contents(out_dir, both_content, binary=True)
async def test_previous_tree(self): export_dir = create_dir(self.content) # Create some new content. new_content = self.content.copy() new_content['a'] += ' different' new_content['newfile'] = 'newfile stuff' new_dir = create_dir(new_content) new_tree = await self.cache.import_tree(new_dir) # Now use cache.export_tree to move from the original content to the # different content. await self.cache.export_tree(new_tree, export_dir, previous_tree=self.content_tree) assert_contents(export_dir, new_content) # Now do the same thing again, but use a dirty working copy. This # should cause an error. dirty_content = self.content.copy() dirty_content['a'] += ' dirty' dirty_dir = create_dir(dirty_content) with self.assertRaises(peru.cache.DirtyWorkingCopyError): await self.cache.export_tree(new_tree, dirty_dir, previous_tree=self.content_tree) # But if the file is simply missing, it should work. os.remove(os.path.join(dirty_dir, 'a')) await self.cache.export_tree(new_tree, dirty_dir, previous_tree=self.content_tree) assert_contents(dirty_dir, new_content) # Make sure we get an error even if the dirty file is unchanged between # the previous tree and the new one. no_conflict_dirty_content = self.content.copy() no_conflict_dirty_content['b/c'] += ' dirty' no_conflict_dirty_dir = create_dir(no_conflict_dirty_content) with self.assertRaises(peru.cache.DirtyWorkingCopyError): await self.cache.export_tree(new_tree, no_conflict_dirty_dir, previous_tree=self.content_tree)
def test_export_with_existing_files(self): # Create a dir with an existing file that doesn't conflict. more_content = {'untracked': 'stuff'} export_dir = create_dir(more_content) yield from self.cache.export_tree(self.content_tree, export_dir) expected_content = self.content.copy() expected_content.update(more_content) assert_contents(export_dir, expected_content) # But if we try to export twice, the export_dir will now have # conflicting files, and export_tree() should throw. with self.assertRaises(peru.cache.DirtyWorkingCopyError): yield from self.cache.export_tree(self.content_tree, export_dir) # By default, git's checkout safety doesn't protect files that are # .gitignore'd. Make sure we still throw the right errors in the # presence of a .gitignore file. with open(os.path.join(export_dir, '.gitignore'), 'w') as f: f.write('*\n') # .gitignore everything with self.assertRaises(peru.cache.DirtyWorkingCopyError): yield from self.cache.export_tree(self.content_tree, export_dir)
def test_reup_sync(self): yaml_with_imports = dedent('''\ imports: foo: ./ bar: ./ git module foo: url: {} rev: {} git module bar: url: {} reup: otherbranch ''').format(self.foo_dir, self.foo_master, self.bar_dir) test_dir = shared.create_dir({'peru.yaml': yaml_with_imports}) # First reup without the sync. run_peru_command(['reup', 'foo', '--nosync'], test_dir) assert_contents(test_dir, {}, excludes=['.peru', 'peru.yaml']) # Now do it with the sync. run_peru_command(['reup', 'foo', '--quiet'], test_dir) assert_contents(test_dir, {'a': 'b'}, excludes=['.peru', 'peru.yaml'])
async def test_export_with_existing_files(self): # Create a dir with an existing file that doesn't conflict. more_content = {'untracked': 'stuff'} export_dir = create_dir(more_content) await self.cache.export_tree(self.content_tree, export_dir) expected_content = self.content.copy() expected_content.update(more_content) assert_contents(export_dir, expected_content) # But if we try to export twice, the export_dir will now have # conflicting files, and export_tree() should throw. with self.assertRaises(peru.cache.DirtyWorkingCopyError): await self.cache.export_tree(self.content_tree, export_dir) # By default, git's checkout safety doesn't protect files that are # .gitignore'd. Make sure we still throw the right errors in the # presence of a .gitignore file. with open(os.path.join(export_dir, '.gitignore'), 'w') as f: f.write('*\n') # .gitignore everything with self.assertRaises(peru.cache.DirtyWorkingCopyError): await self.cache.export_tree(self.content_tree, export_dir)
async def test_previous_tree(self): export_dir = create_dir(self.content) # Create some new content. new_content = self.content.copy() new_content['a'] += ' different' new_content['newfile'] = 'newfile stuff' new_dir = create_dir(new_content) new_tree = await self.cache.import_tree(new_dir) # Now use cache.export_tree to move from the original content to the # different content. await self.cache.export_tree( new_tree, export_dir, previous_tree=self.content_tree) assert_contents(export_dir, new_content) # Now do the same thing again, but use a dirty working copy. This # should cause an error. dirty_content = self.content.copy() dirty_content['a'] += ' dirty' dirty_dir = create_dir(dirty_content) with self.assertRaises(peru.cache.DirtyWorkingCopyError): await self.cache.export_tree( new_tree, dirty_dir, previous_tree=self.content_tree) # But if the file is simply missing, it should work. os.remove(os.path.join(dirty_dir, 'a')) await self.cache.export_tree( new_tree, dirty_dir, previous_tree=self.content_tree) assert_contents(dirty_dir, new_content) # Make sure we get an error even if the dirty file is unchanged between # the previous tree and the new one. no_conflict_dirty_content = self.content.copy() no_conflict_dirty_content['b/c'] += ' dirty' no_conflict_dirty_dir = create_dir(no_conflict_dirty_content) with self.assertRaises(peru.cache.DirtyWorkingCopyError): await self.cache.export_tree( new_tree, no_conflict_dirty_dir, previous_tree=self.content_tree)
def test_single_reup(self): yaml_without_imports = dedent('''\ git module foo: url: {} rev: master git module bar: url: {} reup: otherbranch ''').format(self.foo_dir, self.bar_dir) test_dir = shared.create_dir({'peru.yaml': yaml_without_imports}) expected = dedent('''\ git module foo: url: {} rev: {} git module bar: url: {} reup: otherbranch ''').format(self.foo_dir, self.foo_master, self.bar_dir) run_peru_command(['reup', 'foo'], test_dir) assert_contents(test_dir, {'peru.yaml': expected}, excludes=['.peru'])
def test_reup_all(self): yaml_with_imports = dedent('''\ imports: foo: ./ bar: ./ git module foo: url: {} rev: {} git module bar: url: {} reup: otherbranch ''').format(self.foo_dir, self.foo_master, self.bar_dir) test_dir = shared.create_dir({'peru.yaml': yaml_with_imports}) expected = dedent('''\ imports: foo: ./ bar: ./ git module foo: url: {} rev: {} git module bar: url: {} reup: otherbranch rev: {} ''').format(self.foo_dir, self.foo_master, self.bar_dir, self.bar_otherbranch) run_peru_command(['reup'], test_dir) # This time we finally pull in barfile. assert_contents(test_dir, { 'peru.yaml': expected, 'a': 'b', 'barfile': 'new' }, excludes=['.peru'])
async def test_multiple_imports(self): new_content = {'fee/fi': 'fo fum'} new_tree = await self.cache.import_tree(create_dir(new_content)) export_dir = create_dir() await self.cache.export_tree(new_tree, export_dir) assert_contents(export_dir, new_content)
def do_integration_test(self, args, expected, cwd=None, **kwargs): if not cwd: cwd = self.test_dir run_peru_command(args, cwd, **kwargs) assert_contents(self.test_dir, expected, excludes=['peru.yaml', '.peru'])
async def test_basic_export(self): export_dir = create_dir() await self.cache.export_tree(self.content_tree, export_dir) assert_contents(export_dir, self.content)