Example #1
0
    def test_identical_plugin_cache_fields(self):
        # Plugins that use caching also need to avoid running in parallel, if
        # their cache directories are the same. The noop_cache plugin (created
        # for this test) uses the path field (but not the nonce field) in its
        # plugin cache key. Check that these two modules are not fetched in
        # parallel, even though their module fields aren't exactly the same.
        foo = shared.create_dir()
        peru_yaml = dedent('''\
            imports:
                foo1: ./
                foo2: ./

            noop_cache module foo1:
                path: {}
                # nonce is ignored, but it makes foo1 different from foo2 as
                # far as the module cache is concerned
                nonce: '1'

            noop_cache module foo2:
                path: {}
                nonce: '2'
            '''.format(foo, foo))
        test_dir = shared.create_dir({'peru.yaml': peru_yaml})
        shared.run_peru_command(['sync'], test_dir)
        assert_parallel(1)
Example #2
0
    def test_import_module_defined_in_another_module(self):
        # Project B contains project A
        dir_a = shared.create_dir({'afile': 'stuff'})
        dir_b = shared.create_dir()
        # Create the peru.yaml file for B.
        self.write_yaml('''\
            cp module a:
                path: {}
            ''', dir_a, dir=dir_b)
        # Now create the peru.yaml file in the actual test project.
        self.write_yaml('''\
            imports:
                b.a: a_via_b/

            cp module b:
                path: {}
            ''', dir_b)
        self.do_integration_test(['sync'], {'a_via_b/afile': 'stuff'})
        # Test the error message from an invalid module.
        self.write_yaml('''\
            imports:
                b.missing_module: some_path

            cp module b:
                path: {}
            ''', dir_b)
        try:
            self.do_integration_test(['sync'], {})
        except peru.error.PrintableError as e:
            assert 'b.missing_module' in e.message
        else:
            assert False, 'should throw invalid module error'
Example #3
0
 def test_import_with_gitignore(self):
     # Make sure our git imports don't get confused by .gitignore files.
     new_content = {'fee/fi': 'fo fum', '.gitignore': 'fee/'}
     new_tree = yield from self.cache.import_tree(create_dir(new_content))
     export_dir = create_dir()
     yield from self.cache.export_tree(new_tree, export_dir)
     assert_contents(export_dir, new_content)
Example #4
0
 def test_import_with_gitignore(self):
     # Make sure our git imports don't get confused by .gitignore files.
     new_content = {"fee/fi": "fo fum", ".gitignore": "fee/"}
     new_tree = self.cache.import_tree(shared.create_dir(new_content))
     export_dir = shared.create_dir()
     self.cache.export_tree(new_tree, export_dir)
     assert_contents(export_dir, new_content)
Example #5
0
 def test_flags_override_vars(self):
     flag_cache_dir = shared.create_dir()
     env_cache_dir = shared.create_dir()
     shared.run_peru_command(['--cache-dir', flag_cache_dir, 'sync'],
                             self.cwd,
                             env={'PERU_CACHE_DIR': env_cache_dir})
     self.assert_success(self.project_dir, self.state_dir, flag_cache_dir)
Example #6
0
    def test_override(self):
        module_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml('''\
            cp module foo:
                path: {}

            imports:
                foo: ./
            ''', module_dir)
        override_dir = shared.create_dir({'foo': 'override'})
        # Set the override.
        run_peru_command(['override', 'add', 'foo', override_dir],
                         self.test_dir)
        # Confirm that the override is configured.
        output = run_peru_command(['override'], self.test_dir)
        self.assertEqual(output, 'foo: {}\n'.format(override_dir))
        # Make sure 'override list' gives the same output as 'override'.
        output = run_peru_command(['override', 'list'], self.test_dir)
        self.assertEqual(output, 'foo: {}\n'.format(override_dir))
        # Run the sync and confirm that the override worked.
        self.do_integration_test(['sync'], {'foo': 'override'})
        # Delete the override.
        run_peru_command(['override', 'delete', 'foo'], self.test_dir)
        # Confirm that the override was deleted.
        output = run_peru_command(['override'], self.test_dir)
        self.assertEqual(output, '')
        # Rerun the sync and confirm the original content is back.
        self.do_integration_test(['sync'], {'foo': 'bar'})
Example #7
0
 def setUp(self):
     self.cache_dir = shared.create_dir()
     self.cache = cache.Cache(self.cache_dir)
     self.content = {'a': 'foo', 'b/c': 'bar'}
     self.content_dir = shared.create_dir(self.content)
     self.content_tree = self.cache.import_tree(self.content_dir)
     self.entries = self.cache.ls_tree(self.content_tree, recursive=True)
Example #8
0
    def test_override(self):
        module_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml('''\
            cp module foo:
                path: {}

            imports:
                foo: ./
            ''', module_dir)
        override_dir = shared.create_dir({'foo': 'override'})
        # Set the override.
        run_peru_command(['override', 'add', 'foo', override_dir],
                         self.test_dir)
        # Confirm that the override is configured.
        output = run_peru_command(['override'], self.test_dir)
        self.assertEqual(output, 'foo: {}\n'.format(override_dir))
        # Make sure 'override list' gives the same output as 'override'.
        output = run_peru_command(['override', 'list'], self.test_dir)
        self.assertEqual(output, 'foo: {}\n'.format(override_dir))
        # Run the sync with --no-overrides and confirm nothing changes. Also
        # check that there's no overrides-related output.
        output = self.do_integration_test(['sync', '--no-overrides'],
                                          {'foo': 'bar'})
        self.assertNotIn('overrides', output)
        # Now run the sync normally and confirm that the override worked. Also
        # confirm that we mentioned the override in output.
        output = self.do_integration_test(['sync'], {'foo': 'override'})
        self.assertIn('overrides', output)
        # Delete the override.
        run_peru_command(['override', 'delete', 'foo'], self.test_dir)
        # Confirm that the override was deleted.
        output = run_peru_command(['override'], self.test_dir)
        self.assertEqual(output, '')
        # Rerun the sync and confirm the original content is back.
        self.do_integration_test(['sync'], {'foo': 'bar'})
Example #9
0
 def setUp(self):
     self.cache_dir = shared.create_dir()
     self.cache = yield from cache.Cache(self.cache_dir)
     self.content = {"a": "foo", "b/c": "bar"}
     self.content_dir = shared.create_dir(self.content)
     self.content_tree = yield from self.cache.import_tree(self.content_dir)
     self.entries = yield from self.cache.ls_tree(self.content_tree, recursive=True)
Example #10
0
    def test_touched_file(self):
        # Bumping the mtime on a file makes it appear dirty to `git
        # diff-files`. However, when the index is refreshed with `git
        # update-index`, the dirtiness should go away. This test guarantees
        # that we do that refresh, both with and without a cached index file.
        # Note that because the index file only has an mtime resolution of 1
        # second, we have to artificially inflate the mtime to guarantee that
        # the file will actually appear dirty.
        export_dir = create_dir()
        a_path = os.path.join(export_dir, 'a')
        t = time.time()

        def bump_mtime_one_minute():
            nonlocal t
            t += 60  # Add a whole minute to the mtime we set.
            os.utime(a_path, (t, t))

        # Do the first export.
        yield from self.cache.export_tree(self.content_tree, export_dir)
        # Touch a and rerun the export with no cached index.
        bump_mtime_one_minute()
        yield from self.cache.export_tree(
            self.content_tree, export_dir, previous_tree=self.content_tree)
        # Create a cached index file.
        index_dir = create_dir()
        index_file = os.path.join(index_dir, 'test_index_file')
        yield from self.cache.export_tree(
            self.content_tree, export_dir, previous_tree=self.content_tree,
            previous_index_file=index_file)
        # Finally, touch a again and rerun the export using the cached index.
        bump_mtime_one_minute()
        yield from self.cache.export_tree(
            self.content_tree, export_dir, previous_tree=self.content_tree,
            previous_index_file=index_file)
Example #11
0
 def test_keyval(self):
     root = shared.create_dir()
     tmp_dir = shared.create_dir()
     keyval = KeyVal(root, tmp_dir)
     key = "mykey"
     # keyval should be empty
     self.assertFalse(key in keyval)
     self.assertSetEqual(set(keyval), set())
     # set a key
     keyval[key] = "myval"
     self.assertEqual(keyval[key], "myval")
     self.assertTrue(key in keyval)
     self.assertSetEqual(set(keyval), {key})
     # overwrite the value
     keyval[key] = "anotherval"
     self.assertEqual(keyval[key], "anotherval")
     # instantiate a second keyval on the same dir, should have same content
     another_keyval = KeyVal(root, tmp_dir)
     self.assertTrue(key in another_keyval)
     self.assertEqual(another_keyval[key], "anotherval")
     self.assertSetEqual(set(another_keyval), {key})
     # test deletions
     del keyval[key]
     self.assertFalse(key in keyval)
     self.assertFalse(key in another_keyval)
Example #12
0
    def setUp(self):
        self.cache_dir = create_dir()
        self.cache = Cache(self.cache_dir)

        # These tests use this simple one-file tree as module contents.
        content = {'a': 'a'}
        content_dir = create_dir(content)
        self.content_tree = self.cache.import_tree(content_dir)
Example #13
0
 def setUp(self):
     self.cache = peru.cache.Cache(shared.create_dir())
     self.content = {
         "a": "foo",
         "b/c": "bar",
         "b/d": "baz",
     }
     self.content_dir = shared.create_dir(self.content)
     self.content_tree = self.cache.import_tree(self.content_dir)
Example #14
0
 def setUp(self):
     self.cache = yield from peru.cache.Cache(create_dir())
     self.content = {
         'a': 'foo',
         'b/c': 'bar',
         'b/d': 'baz',
     }
     self.content_dir = create_dir(self.content)
     self.content_tree = yield from self.cache.import_tree(self.content_dir)
Example #15
0
 async def setUp(self):
     self.cache_dir = shared.create_dir()
     self.cache = await cache.Cache(self.cache_dir)
     # Include a leading colon to test that we prepend ./ to pathspecs.
     self.content = {'a': 'foo', 'b/c': 'bar', COLON + 'd': 'baz'}
     self.content_dir = shared.create_dir(self.content)
     self.content_tree = await self.cache.import_tree(self.content_dir)
     self.entries = await self.cache.ls_tree(
         self.content_tree, recursive=True)
Example #16
0
 async def setUp(self):
     self.cache = await peru.cache.Cache(create_dir())
     self.content = {
         'a': 'foo',
         'b/c': 'bar',
         'b/d': 'baz',
     }
     self.content_dir = create_dir(self.content)
     self.content_tree = await self.cache.import_tree(self.content_dir)
Example #17
0
 def test_setting_all_flags(self):
     cwd = shared.create_dir()
     sync_dir = shared.create_dir()
     state_dir = shared.create_dir()
     cache_dir = shared.create_dir()
     shared.run_peru_command(
         ['--file', self.peru_file, '--sync-dir', sync_dir,
          '--state-dir', state_dir, '--cache-dir', cache_dir, 'sync'],
         cwd)
     self.assert_success(sync_dir, state_dir, cache_dir)
Example #18
0
 def setUp(self):
     self.content = {"some": "stuff", "foo/bar": "baz"}
     self.content_dir = shared.create_dir(self.content)
     self.cache_root = shared.create_dir()
     self.plugin_context = plugin.PluginContext(
         cwd='.',
         plugin_cache_root=self.cache_root,
         plugin_paths=(),
         parallelism_semaphore=asyncio.BoundedSemaphore(
             plugin.DEFAULT_PARALLEL_FETCH_LIMIT),
         plugin_cache_locks=defaultdict(asyncio.Lock))
Example #19
0
 def test_import_with_files(self):
     all_content = {'foo': '',
                    'bar': '',
                    'baz/bing': ''}
     test_dir = create_dir(all_content)
     tree = self.cache.import_tree(test_dir, picks=['foo', 'baz'])
     expected_content = {'foo': '',
                         'baz/bing': ''}
     out_dir = create_dir()
     self.cache.export_tree(tree, out_dir)
     assert_contents(out_dir, expected_content)
Example #20
0
 def test_merge_with_deep_prefix(self):
     '''This test was inspired by a bug on Windows where we would give git a
     backslash-separated merge prefix, even though git demands forward slash
     as a path separator.'''
     content = {'file': 'stuff'}
     content_dir = create_dir(content)
     tree = yield from self.cache.import_tree(content_dir)
     prefixed_tree = yield from self.cache.merge_trees(None, tree, 'a/b/')
     export_dir = create_dir()
     yield from self.cache.export_tree(prefixed_tree, export_dir)
     assert_contents(export_dir, {'a/b/file': 'stuff'})
Example #21
0
    def test_recursive_imports(self):
        # Project B contains project A
        dir_a = shared.create_dir({'afile': 'aaa'})
        dir_b = shared.create_dir({'exports/bfile': 'bbb'})
        # Create the peru.yaml file for B.
        self.write_yaml(
            '''\
            imports:
                a: exports/where_b_put_a
            cp module a:
                path: {}
            ''',
            dir_a,
            dir=dir_b)
        # Now create the peru.yaml file in the actual test project.
        self.write_yaml(
            '''\
            imports:
                b: where_c_put_b

            cp module b:
                # recursive is false by default
                path: {}
                export: exports  # omit the peru.yaml file from b
            ''', dir_b)
        self.do_integration_test(['sync'], {'where_c_put_b/bfile': 'bbb'})

        # Repeat the same test with explicit 'recursive' settings.
        self.write_yaml(
            '''\
            imports:
                b: where_c_put_b

            cp module b:
                path: {}
                pick: exports/where_b_put_a
                export: exports  # omit the peru.yaml file from b
                recursive: true
            ''', dir_b)
        self.do_integration_test(['sync'],
                                 {'where_c_put_b/where_b_put_a/afile': 'aaa'})

        self.write_yaml(
            '''\
            imports:
                b: where_c_put_b

            cp module b:
                path: {}
                export: exports  # omit the peru.yaml file from b
                recursive: false
            ''', dir_b)
        self.do_integration_test(['sync'], {'where_c_put_b/bfile': 'bbb'})
Example #22
0
 def test_create_dir(self):
     empty_dir = shared.create_dir()
     self.assertListEqual([], os.listdir(empty_dir))
     content = {Path("foo"): "a", Path("bar/baz"): "b"}
     content_dir = shared.create_dir(content)
     # Don't use read_dir, because the read_dir test relies on create_dir.
     actual_content = {}
     for p in Path(content_dir).glob("**/*"):
         if p.is_dir():
             continue
         with p.open() as f:
             actual_content[p.relative_to(content_dir)] = f.read()
     self.assertDictEqual(content, actual_content)
Example #23
0
 def setUp(self):
     self.foo_dir = shared.create_dir({'a': 'b'})
     self.foo_repo = shared.GitRepo(self.foo_dir)
     self.foo_master = self.foo_repo.run('git', 'rev-parse', 'master')
     self.bar_dir = shared.create_dir()
     self.bar_repo = shared.GitRepo(self.bar_dir)
     self.bar_repo.run('git', 'checkout', '-q', '-b', 'otherbranch')
     with open(os.path.join(self.bar_dir, 'barfile'), 'w') as f:
         f.write('new')
     self.bar_repo.run('git', 'add', '-A')
     self.bar_repo.run('git', 'commit', '-m', 'creating barfile')
     self.bar_otherbranch = self.bar_repo.run(
         'git', 'rev-parse', 'otherbranch')
Example #24
0
 def test_copy_nested(self):
     # Project B contains project A
     dir_a = shared.create_dir({'afile': 'stuff'})
     dir_b = shared.create_dir()
     # Create the peru.yaml file for B.
     self.write_yaml('''\
         cp module a:
             path: {}
         ''', dir_a, dir=dir_b)
     # Now create the peru.yaml file in the actual test project.
     self.write_yaml('''\
         cp module b:
             path: {}
         ''', dir_b)
     self.do_integration_test(['copy', 'b.a', '.'], {'afile': 'stuff'})
Example #25
0
    def test_lastimports_timestamp(self):
        module_dir = shared.create_dir({'foo': 'bar'})
        template = '''\
            cp module foo:
                path: {}

            imports:
                foo: {}
            '''
        self.write_yaml(template, module_dir, "subdir1")
        self.do_integration_test(['sync'], {'subdir1/foo': 'bar'})
        lastimports = os.path.join(self.test_dir, '.peru', 'lastimports')

        def get_timestamp():
            return os.stat(lastimports).st_mtime

        original_timestamp = get_timestamp()

        # Running it again should be a no-op. Assert that the lastimports
        # timestamp hasn't changed.
        self.do_integration_test(['sync'], {'subdir1/foo': 'bar'})
        assert get_timestamp() == original_timestamp, \
            "Expected an unchanged timestamp."

        # Modify peru.yaml and sync again. This should change the timestamp.
        self.write_yaml(template, module_dir, "subdir2")
        self.do_integration_test(['sync'], {'subdir2/foo': 'bar'})
        assert get_timestamp() > original_timestamp, \
            "Expected an updated timestamp."
Example #26
0
    def test_override_after_regular_sync(self):
        module_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml('''\
            cp module foo:
                path: {}

            imports:
                foo: ./
            ''', module_dir)
        # First, do a regular sync.
        self.do_integration_test(['sync'], {'foo': 'bar'})
        # Now, add an override, and confirm that the new sync works.
        override_dir = shared.create_dir({'foo': 'override'})
        run_peru_command(['override', 'add', 'foo', override_dir],
                         self.test_dir)
        self.do_integration_test(['sync'], {'foo': 'override'})
Example #27
0
    def test_rule_with_files(self):
        content = {name: '' for name in [
            'foo',
            'bar',
            'special',
            'baz/bing',
            'baz/boo/a',
            'baz/boo/b',
        ]}
        self.module_dir = shared.create_dir(content)
        self.write_peru_yaml('''\
            cp module foo:
                path: {}

            rule filter:
                files:
                  - "**/*oo"
                  - special

            imports:
                foo|filter: ./
            ''')
        filtered_content = {name: '' for name in [
            'foo',
            'special',
            'baz/boo/a',
            'baz/boo/b',
        ]}
        self.do_integration_test(['sync'], filtered_content)
Example #28
0
    def test_relative_override_from_subdir(self):
        self.write_peru_yaml('''\
            empty module foo:

            imports:
                foo: ./
            ''')
        # Create some subdirs inside the project.
        subdir = os.path.join(self.test_dir, 'a', 'b')
        peru.compat.makedirs(subdir)
        # Create an override dir outside the project.
        override_dir = shared.create_dir({'foo': 'override'})
        # Set the override from inside subdir, using the relative path that's
        # valid from that location. Peru is going to store this path in
        # .peru/overrides/ at the root, so this tests that we resolve the
        # stored path properly.
        relative_path = os.path.relpath(override_dir, start=subdir)
        run_peru_command(['override', 'add', 'foo', relative_path],
                         subdir)
        # Confirm that the right path is stored on disk.
        expected_stored_path = os.path.relpath(
            override_dir, start=self.test_dir)
        with open(os.path.join(self.peru_dir, "overrides", "foo")) as f:
            actual_stored_path = f.read()
        self.assertEqual(expected_stored_path, actual_stored_path)
        # Confirm that `peru override` prints output that respects the cwd.
        output = run_peru_command(['override'], subdir)
        self.assertEqual("foo: {}\n".format(relative_path), output)
        # Confirm that syncing works.
        self.do_integration_test(['sync'], {'foo': 'override'}, cwd=subdir)
Example #29
0
    def test_rules_in_override(self):
        def _write_peru_yaml(target):
            self.write_peru_yaml('''\
                imports:
                    TARGET: ./

                cp module foo:
                    path: {}

                rule test_build:
                    build: |
                        printf fee >> fi
                        mkdir -p subdir
                        printf fo >> subdir/fum

                rule test_export:
                    export: subdir
                '''.replace('TARGET', target))

        _write_peru_yaml('foo|test_build')
        override_dir = shared.create_dir()
        run_peru_command(['override', 'add', 'foo', override_dir],
                         self.test_dir)

        # Syncing against a build rule should build in the override.
        self.do_integration_test(['sync'], {'fi': 'fee', 'subdir/fum': 'fo'})

        # Another sync should run the build again.
        self.do_integration_test(
            ['sync'], {'fi': 'feefee', 'subdir/fum': 'fofo'})

        # Make sure export dirs are respected in rules that come after.
        _write_peru_yaml('foo|test_build|test_export|test_build')
        self.do_integration_test(
            ['sync'], {'fum': 'fofofo', 'fi': 'fee', 'subdir/fum': 'fo'})
Example #30
0
    def test_rule_with_copied_files(self):
        content = {
            'foo': 'foo',
            'bar/baz': 'baz'
        }
        module_dir = shared.create_dir(content)
        self.write_yaml('''\
            cp module foo:
                path: {}
                copy:
                    foo: foo-copy
                    bar:
                      - bar-copy-1
                      - bar-copy-2

            imports:
                foo: ./
            ''', module_dir)
        copied_content = {
            'foo': 'foo',
            'bar/baz': 'baz',
            'foo-copy': 'foo',
            'bar-copy-1/baz': 'baz',
            'bar-copy-2/baz': 'baz'
        }
        self.do_integration_test(['sync'], copied_content)
Example #31
0
    def test_import_module_defined_in_another_module(self):
        # Project B contains project A
        dir_a = shared.create_dir({'afile': 'stuff'})
        dir_b = shared.create_dir()
        # Create the peru.yaml file for B.
        self.write_yaml('''\
            cp module a:
                path: {}
            ''',
                        dir_a,
                        dir=dir_b)
        # Now create the peru.yaml file in the actual test project.
        self.write_yaml(
            '''\
            imports:
                b.a: a_via_b/

            cp module b:
                path: {}
            ''', dir_b)
        self.do_integration_test(['sync'], {'a_via_b/afile': 'stuff'})
        # Test the error message from an invalid module.
        self.write_yaml(
            '''\
            imports:
                b.missing_module: some_path

            cp module b:
                path: {}
            ''', dir_b)
        try:
            self.do_integration_test(['sync'], {})
        except peru.error.PrintableError as e:
            assert 'b.missing_module' in e.message
        else:
            assert False, 'should throw invalid module error'
Example #32
0
 def test_copy(self):
     module_dir = shared.create_dir({'foo': 'bar'})
     self.write_yaml(
         '''\
         cp module foo:
             path: {}
         ''', module_dir)
     # Do a simple copy and check the results.
     self.do_integration_test(['copy', 'foo', '.'], {'foo': 'bar'})
     # Running the same copy again should fail, because of conflicts.
     with self.assertRaises(peru.cache.DirtyWorkingCopyError):
         self.do_integration_test(['copy', 'foo', '.'], {'foo': 'bar'})
     # Passing the --force flag should pave over conflicts.
     self.do_integration_test(['copy', '--force', 'foo', '.'],
                              {'foo': 'bar'})
Example #33
0
 def test_curl_plugin_fetch_archives(self):
     for type in 'zip', 'tar':
         fields = {
             'url': (shared.test_resources / ('with_exe.' + type)).as_uri(),
             'unpack': type,
         }
         fetch_dir = shared.create_dir()
         self.do_plugin_test('curl',
                             fields, {
                                 'not_exe.txt': 'Not executable.\n',
                                 'exe.sh': 'echo Executable.\n',
                             },
                             fetch_dir=fetch_dir)
         shared.assert_not_executable(os.path.join(fetch_dir,
                                                   'not_exe.txt'))
         shared.assert_executable(os.path.join(fetch_dir, 'exe.sh'))
Example #34
0
    async def test_merge_from_multimap(self):
        # This represents a list of key-value pairs in YAML, for example:
        #     imports:
        #         foo:
        #           - path1
        #           - path2
        imports = {'foo': ('path1', 'path2')}
        target_trees = {'foo': self.content_tree}

        merged_tree = await merge_imports_tree(self.cache, imports,
                                               target_trees)

        merged_dir = create_dir()
        await self.cache.export_tree(merged_tree, merged_dir)
        expected_content = {'path1/a': 'a', 'path2/a': 'a'}
        assert_contents(merged_dir, expected_content)
Example #35
0
    def test_rule_with_dropped_files(self):
        content = {'foo': 'one', 'bar': 'two'}
        module_dir = shared.create_dir(content)
        self.write_yaml(
            '''\
            cp module foobar:
                path: {}

            rule filter:
                drop: foo

            imports:
                foobar|filter: ./
            ''', module_dir)
        filtered_content = {'bar': 'two'}
        self.do_integration_test(['sync'], filtered_content)
Example #36
0
 async def test_export_force_with_changed_files(self):
     export_dir = create_dir()
     await self.cache.export_tree(self.content_tree, export_dir)
     # If we dirty a file, a resync should fail.
     with open(os.path.join(export_dir, 'a'), 'w') as f:
         f.write('dirty')
     with self.assertRaises(peru.cache.DirtyWorkingCopyError):
         await self.cache.export_tree(self.content_tree,
                                      export_dir,
                                      previous_tree=self.content_tree)
     # But it should succeed with the --force flag.
     await self.cache.export_tree(self.content_tree,
                                  export_dir,
                                  force=True,
                                  previous_tree=self.content_tree)
     assert_contents(export_dir, self.content)
Example #37
0
 def test_curl_plugin_reup(self):
     curl_content = {'myfile': 'content'}
     test_dir = shared.create_dir(curl_content)
     test_url = (Path(test_dir) / 'myfile').as_uri()
     digest = hashlib.sha1()
     digest.update(b'content')
     real_hash = digest.hexdigest()
     fields = {'url': test_url}
     output = test_plugin_get_reup_fields(
         self.plugin_context, 'curl', fields)
     self.assertDictEqual({'sha1': real_hash}, output)
     # Confirm that we get the same thing with a preexisting hash.
     fields['sha1'] = 'preexisting junk'
     output = test_plugin_get_reup_fields(
         self.plugin_context, 'curl', fields)
     self.assertDictEqual({'sha1': real_hash}, output)
Example #38
0
    def test_git_plugin_with_submodule(self):
        content_repo = GitRepo(self.content_dir)
        # Git has a small bug: The .gitmodules file is always created with "\n"
        # line endings, even on Windows. With core.autocrlf turned on, that
        # causes a warning when the file is added/committed, because those line
        # endings would get replaced with "\r\n" when the file was checked out.
        # We can just turn autocrlf off for this test to silence the warning.
        content_repo.run('git', 'config', 'core.autocrlf', 'false')
        submodule_dir = shared.create_dir({'another': 'file'})
        submodule_repo = GitRepo(submodule_dir)
        content_repo.run(
            'git', 'submodule', 'add', '-q', submodule_dir, 'subdir/')
        content_repo.run('git', 'commit', '-m', 'submodule commit')
        expected_content = self.content.copy()
        expected_content['subdir/another'] = 'file'
        with open(os.path.join(self.content_dir, '.gitmodules')) as f:
            expected_content['.gitmodules'] = f.read()
        self.do_plugin_test('git', {'url': self.content_dir}, expected_content)
        # Now move the submodule forward. Make sure it gets fetched again.
        shared.write_files(submodule_dir, {'more': 'stuff'})
        submodule_repo.run('git', 'add', '-A')
        submodule_repo.run('git', 'commit', '-m', 'more stuff')
        subprocess.check_output(
            ['git', 'pull', '-q'],
            cwd=os.path.join(self.content_dir, 'subdir'))
        content_repo.run('git', 'commit', '-am', 'submodule update')
        expected_content['subdir/more'] = 'stuff'
        self.do_plugin_test('git', {'url': self.content_dir}, expected_content)

        # Normally when you run `git submodule add ...`, git puts two things in
        # your repo: an entry in .gitmodules, and a commit object at the
        # appropriate path inside your repo. However, it's possible for those
        # two to get out of sync, especially if you use mv/rm on a directory
        # followed by `git add`, instead of the smarter `git mv`/`git rm`. We
        # need to create this condition and check that we then ignore the
        # submodule.
        shutil.rmtree(os.path.join(self.content_dir, 'subdir'))
        content_repo.run('git', 'commit', '-am', 'inconsistent delete')
        del expected_content['subdir/another']
        del expected_content['subdir/more']
        self.do_plugin_test('git', {'url': self.content_dir}, expected_content)

        # Finally, test explicitly disabling submodule fetching. Start by
        # reverting the 'inconsistent delete' commit from above.
        content_repo.run('git', 'revert', '--no-edit', 'HEAD')
        fields = {'url': self.content_dir, 'submodules': 'false'}
        self.do_plugin_test('git', fields, expected_content)
Example #39
0
    def test_conflicting_imports(self):
        module_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml(
            '''\
            cp module foo:
                path: {0}

            # same as foo
            cp module bar:
                path: {0}

            imports:
                foo: subdir
                bar: subdir
            ''', module_dir)
        with self.assertRaises(peru.cache.MergeConflictError):
            self.do_integration_test(['sync'], {'subdir/foo': 'bar'})
Example #40
0
 def test_rule_with_move_error(self):
     module_dir = shared.create_dir()
     self.write_yaml(
         '''\
         cp module foo:
             path: {}
             move:
                 doesntexist: also_nonexistent
         imports:
             foo: ./
         ''', module_dir)
     with raises_gathered(peru.rule.NoMatchingFilesError) as cm:
         self.do_integration_test(['sync'], {
             'newa': 'foo',
             'newb/c': 'bar'
         })
     assert 'doesntexist' in cm.exception.message
Example #41
0
 def test_rule_with_executable(self):
     contents = {'a.txt': '', 'b.txt': '', 'c.foo': ''}
     module_dir = shared.create_dir(contents)
     self.write_yaml(
         '''\
         cp module foo:
             path: {}
             executable: "*.txt"
         imports:
             foo: ./
         ''', module_dir)
     self.do_integration_test(['sync'], contents)
     for f in ('a.txt', 'b.txt'):
         mode = (Path(self.test_dir) / f).stat().st_mode
         assert mode & stat.S_IXUSR
         assert mode & stat.S_IXGRP
         assert mode & stat.S_IXOTH
Example #42
0
 def test_curl_plugin_fetch(self):
     curl_content = {'myfile': 'content'}
     test_dir = shared.create_dir(curl_content)
     test_url = (Path(test_dir) / 'myfile').as_uri()
     fields = {'url': test_url}
     self.do_plugin_test('curl', fields, curl_content)
     # Run the test again with an explicit hash and an explicit filename.
     digest = hashlib.sha1()
     digest.update(b'content')
     real_hash = digest.hexdigest()
     fields['sha1'] = real_hash
     fields['filename'] = 'newname'
     self.do_plugin_test('curl', fields, {'newname': 'content'})
     # Now run it with the wrong hash, and confirm that there's an error.
     fields['sha1'] = 'wrong hash'
     with self.assertRaises(plugin.PluginRuntimeError):
         self.do_plugin_test('curl', fields, {'newname': 'content'})
Example #43
0
    def test_rule_with_exported_files_that_are_not_picked(self):
        content = {
            name: ''
            for name in ['foo', 'bar', 'baz/bing', 'baz/boo/a', 'baz/boo/b']
        }
        module_dir = shared.create_dir(content)
        self.write_yaml(
            '''\
            cp module foo:
                path: {}
                pick: foo
                export: baz/

            imports:
                foo: ./
            ''', module_dir)
        with raises_gathered(peru.rule.NoMatchingFilesError):
            self.do_integration_test(['sync'], {})
Example #44
0
    def test_git_plugin_with_relative_submodule(self):
        content_repo = GitRepo(self.content_dir)
        # Same autocrlf workaround as above.
        content_repo.run('git', 'config', 'core.autocrlf', 'false')

        # Similar to above, but this time we use a relative path.
        submodule_dir = shared.create_dir({'another': 'file'})
        GitRepo(submodule_dir)
        submodule_basename = os.path.basename(submodule_dir)
        relative_path = "../" + submodule_basename
        content_repo.run('git', 'submodule', 'add', '-q', relative_path,
                         'subdir/')
        content_repo.run('git', 'commit', '-m', 'submodule commit')
        expected_content = self.content.copy()
        expected_content['subdir/another'] = 'file'
        with open(os.path.join(self.content_dir, '.gitmodules')) as f:
            expected_content['.gitmodules'] = f.read()
        self.do_plugin_test('git', {'url': self.content_dir}, expected_content)
Example #45
0
 def test_rule_with_move_error(self):
     module_dir = shared.create_dir()
     self.write_yaml(
         '''\
         cp module foo:
             path: {}
             move:
                 doesntexist: also_nonexistent
         imports:
             foo: ./
         ''', module_dir)
     try:
         self.do_integration_test(['sync'], {
             'newa': 'foo',
             'newb/c': 'bar'
         })
     except peru.error.PrintableError as e:
         assert 'doesntexist' in e.message
Example #46
0
    def test_basic_sync(self):
        module_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml(
            '''\
            cp module foo:
                path: {}

            imports:
                foo: subdir
            ''', module_dir)
        self.do_integration_test(['sync'], {'subdir/foo': 'bar'})

        # Running it again should be a no-op.
        self.do_integration_test(['sync'], {'subdir/foo': 'bar'})

        # Running it with a dirty working copy should be an error.
        shared.write_files(self.test_dir, {'subdir/foo': 'dirty'})
        with self.assertRaises(peru.cache.DirtyWorkingCopyError):
            self.do_integration_test(['sync'], {'subdir/foo': 'bar'})
Example #47
0
    def test_sync_from_subdir(self):
        module_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml(
            '''\
            # Use a relative module path, to make sure it gets resolved
            # relative to the project root and not the dir where peru was
            # called.
            cp module relative_foo:
                path: {}

            imports:
                relative_foo: subdir
            ''', os.path.relpath(module_dir, start=self.test_dir))
        subdir = os.path.join(self.test_dir, 'a', 'b')
        peru.compat.makedirs(subdir)
        run_peru_command(['sync'], subdir)
        self.assertTrue(os.path.isdir(os.path.join(self.test_dir, '.peru')),
                        msg=".peru dir didn't end up in the right place")
        assert_contents(os.path.join(self.test_dir, 'subdir'), {'foo': 'bar'})
Example #48
0
    def test_rule_with_picked_files_that_are_not_exported(self):
        content = {
            name: ''
            for name in ['foo', 'bar', 'baz/bing', 'baz/boo/a', 'baz/boo/b']
        }
        module_dir = shared.create_dir(content)
        self.write_yaml(
            '''\
            cp module foo:
                path: {}
                pick: foo
                export: baz/

            imports:
                foo: ./
            ''', module_dir)
        with self.assertRaises(peru.rule.NoMatchingFilesError) as cm:
            self.do_integration_test(['sync'], {})
        self.assertTrue(
            'none are beneath the export path' in cm.exception.message)
Example #49
0
    def test_drop_then_pick_is_an_error(self):
        '''We want drop to run before pick, so that deleting a bunch of stuff
        and then trying to pick it turns into an error. The opposite execution
        order would make this silently succeed. See the discussion at
        https://github.com/buildinspace/peru/issues/150#issuecomment-212580912.
        '''
        content = {'foo': 'stuff'}
        module_dir = shared.create_dir(content)
        self.write_yaml(
            '''\
            cp module foobar:
                path: {}
                drop: foo
                pick: foo

            imports:
                foobar: ./
            ''', module_dir)
        with raises_gathered(peru.rule.NoMatchingFilesError):
            run_peru_command(['sync'], self.test_dir)
Example #50
0
 def test_empty_imports(self):
     module_dir = shared.create_dir({'foo': 'bar'})
     empty_yaml = '''\
         cp module foo:
             path: {}
         '''.format(module_dir)
     nonempty_yaml = '''\
         cp module foo:
             path: {}
         imports:
             foo: ./
         '''.format(module_dir)
     self.write_yaml(empty_yaml)
     self.do_integration_test(['sync'], {})
     # Now test switching back and forth between non-empty and empty.
     self.write_yaml(nonempty_yaml)
     self.do_integration_test(['sync'], {'foo': 'bar'})
     # Back to empty.
     self.write_yaml(empty_yaml)
     self.do_integration_test(['sync'], {})
Example #51
0
    def test_reup_sync(self):
        yaml_with_imports = dedent('''\
            imports:
                foo: ./
                bar: ./

            git module foo:
                url: {}
                rev: {}

            git module bar:
                url: {}
                reup: otherbranch
            ''').format(self.foo_dir, self.foo_master, self.bar_dir)
        test_dir = shared.create_dir({'peru.yaml': yaml_with_imports})
        # First reup without the sync.
        run_peru_command(['reup', 'foo', '--nosync'], test_dir)
        assert_contents(test_dir, {}, excludes=['.peru', 'peru.yaml'])
        # Now do it with the sync.
        run_peru_command(['reup', 'foo', '--quiet'], test_dir)
        assert_contents(test_dir, {'a': 'b'}, excludes=['.peru', 'peru.yaml'])
Example #52
0
    def test_no_cache_flag(self):
        foo_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml(
            '''\
            cp module foo:
                path: {}

            imports:
                foo: subdir
            ''', foo_dir)

        # Sync the foo module once.
        self.do_integration_test(['sync'], {'subdir/foo': 'bar'})

        # Change the contents of foo and sync again. Because foo is cached, we
        # shouldn't see any changes.
        shared.write_files(foo_dir, {'foo': 'woo'})
        self.do_integration_test(['sync'], {'subdir/foo': 'bar'})

        # Now sync with --no-cache. This time we should see the changes.
        self.do_integration_test(['sync', '--no-cache'], {'subdir/foo': 'woo'})
Example #53
0
    def test_module_rules(self):
        module_dir = shared.create_dir({'a/b': '', 'c/d': ''})
        yaml = '''\
            cp module foo:
                path: {}

            rule get_a:
                export: a

            rule get_c:
                export: c

            imports:
                foo|get_a: ./
            '''.format(module_dir)
        self.write_yaml(yaml)
        self.do_integration_test(['sync'], {'b': ''})
        # Run it again with a different import to make sure we clean up.
        yaml_different = yaml.replace('foo|get_a', 'foo|get_c')
        self.write_yaml(yaml_different)
        self.do_integration_test(['sync'], {'d': ''})
Example #54
0
    async def test_export_with_existing_files(self):
        # Create a dir with an existing file that doesn't conflict.
        more_content = {'untracked': 'stuff'}
        export_dir = create_dir(more_content)
        await self.cache.export_tree(self.content_tree, export_dir)
        expected_content = self.content.copy()
        expected_content.update(more_content)
        assert_contents(export_dir, expected_content)

        # But if we try to export twice, the export_dir will now have
        # conflicting files, and export_tree() should throw.
        with self.assertRaises(peru.cache.DirtyWorkingCopyError):
            await self.cache.export_tree(self.content_tree, export_dir)

        # By default, git's checkout safety doesn't protect files that are
        # .gitignore'd. Make sure we still throw the right errors in the
        # presence of a .gitignore file.
        with open(os.path.join(export_dir, '.gitignore'), 'w') as f:
            f.write('*\n')  # .gitignore everything
        with self.assertRaises(peru.cache.DirtyWorkingCopyError):
            await self.cache.export_tree(self.content_tree, export_dir)
Example #55
0
 async def test_import_ignores_dotperu(self):
     # We have a security problem similar to git's if we allow '.peru'
     # directories in the trees we write to disk. (See
     # https://github.com/blog/1938-vulnerability-announced-update-your-git-clients.)
     # We need to check that *all* '.peru' dirs are ignored in imported
     # trees, including inside of nested subdirectories. And as in the git
     # issue, we need to do this in a case-insensitive way.
     content = {
         'foo': 'bar',
         '.peru/foo1': 'bar',
         '.PERU/foo2': 'bar',
         '.pErU/foo3': 'bar',
         'dir/foo': 'bar',
         'dir/.peru/foo1': 'bar',
         'dir/.PERU/foo2': 'bar',
         'dir/.peRU/foo3': 'bar',
     }
     tree = await self.cache.import_tree(create_dir(content))
     entries = await self.cache.ls_tree(tree, recursive=True)
     self.assertEqual({'foo', 'dir', 'dir/foo'}, entries.keys(),
                      "Expected all of the .peru dirs to be omitted.")
Example #56
0
    def test_single_reup(self):
        yaml_without_imports = dedent('''\
            git module foo:
                url: {}
                rev: master

            git module bar:
                url: {}
                reup: otherbranch
            ''').format(self.foo_dir, self.bar_dir)
        test_dir = shared.create_dir({'peru.yaml': yaml_without_imports})
        expected = dedent('''\
            git module foo:
                url: {}
                rev: {}

            git module bar:
                url: {}
                reup: otherbranch
            ''').format(self.foo_dir, self.foo_master, self.bar_dir)
        run_peru_command(['reup', 'foo'], test_dir)
        assert_contents(test_dir, {'peru.yaml': expected}, excludes=['.peru'])
Example #57
0
    def test_reup_all(self):
        yaml_with_imports = dedent('''\
            imports:
                foo: ./
                bar: ./

            git module foo:
                url: {}
                rev: {}

            git module bar:
                url: {}
                reup: otherbranch
            ''').format(self.foo_dir, self.foo_master, self.bar_dir)
        test_dir = shared.create_dir({'peru.yaml': yaml_with_imports})
        expected = dedent('''\
            imports:
                foo: ./
                bar: ./

            git module foo:
                url: {}
                rev: {}

            git module bar:
                url: {}
                reup: otherbranch
                rev: {}
            ''').format(self.foo_dir, self.foo_master, self.bar_dir,
                        self.bar_otherbranch)
        run_peru_command(['reup'], test_dir)
        # This time we finally pull in barfile.
        assert_contents(test_dir, {
            'peru.yaml': expected,
            'a': 'b',
            'barfile': 'new'
        },
                        excludes=['.peru'])
Example #58
0
    def test_rule_with_copied_files(self):
        content = {'foo': 'foo', 'bar/baz': 'baz'}
        module_dir = shared.create_dir(content)
        self.write_yaml(
            '''\
            cp module foo:
                path: {}
                copy:
                    foo: foo-copy
                    bar:
                      - bar-copy-1
                      - bar-copy-2

            imports:
                foo: ./
            ''', module_dir)
        copied_content = {
            'foo': 'foo',
            'bar/baz': 'baz',
            'foo-copy': 'foo',
            'bar-copy-1/baz': 'baz',
            'bar-copy-2/baz': 'baz'
        }
        self.do_integration_test(['sync'], copied_content)
Example #59
0
    def test_rule_with_legacy_files(self):
        content = {
            name: ''
            for name in [
                'foo',
                'bar',
                'special',
                'baz/bing',
                'baz/boo/a',
                'baz/boo/b',
            ]
        }
        module_dir = shared.create_dir(content)
        self.write_yaml(
            '''\
            cp module foo:
                path: {}

            rule filter:
                files:
                  - "**/*oo"
                  - special

            imports:
                foo|filter: ./
            ''', module_dir)
        filtered_content = {
            name: ''
            for name in [
                'foo',
                'special',
                'baz/boo/a',
                'baz/boo/b',
            ]
        }
        self.do_integration_test(['sync'], filtered_content)
Example #60
0
 async def test_multiple_imports(self):
     new_content = {'fee/fi': 'fo fum'}
     new_tree = await self.cache.import_tree(create_dir(new_content))
     export_dir = create_dir()
     await self.cache.export_tree(new_tree, export_dir)
     assert_contents(export_dir, new_content)