コード例 #1
0
def test_clear_cache(tmpdir, settings, capsys, monkeypatch):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write('all:\n  foovideos:\n    name: Videos from Foo')

    remote = {
        'id': 'foo',
        'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
    }
    expected = {}

    call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                 remote['url'])
    call_command('catalog', 'cache', 'update')

    call_command('catalog', 'cache', 'clear')

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    assert catalog_cache_dir.join('catalog.yml').check(file=True)

    with catalog_cache_dir.join('catalog.yml').open('r') as f:
        assert yaml.safe_load(f.read()) == expected

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == ''
コード例 #2
0
def test_clear_cache(tmpdir, settings, capsys):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Videos from Foo')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))
    call_command('catalog', 'cache', 'update')

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    catalog_cache_path = catalog_cache_dir.join('catalog.json')
    downloaded_path = catalog_cache_dir.join('packages').join('foovideos')
    downloaded_path.write_binary(b'content')
    assert yaml.safe_load(catalog_cache_path.read_text('utf-8')) != {}

    call_command('catalog', 'cache', 'clear')

    assert catalog_cache_dir.join('catalog.json').check(file=True)
    assert yaml.safe_load(catalog_cache_path.read_text('utf-8')) == {}
    assert downloaded_path.check(exists=False)

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == ''
コード例 #3
0
def test_update_cache_with_remote(tmpdir, settings, capsys):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Videos from Foo')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))

    # Now let's say the remote published an update to their catalog
    remote_catalog_file = tmpdir.join('source', 'catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Great videos from Foo')

    call_command('catalog', 'cache', 'update')

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    assert catalog_cache_dir.join('catalog.json').check(file=True)

    expected = {'foovideos': {'name': 'Great videos from Foo'}}

    with catalog_cache_dir.join('catalog.json').open('r') as f:
        assert yaml.safe_load(f.read()) == expected

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == ''
コード例 #4
0
def test_update_cache_with_remote(tmpdir, settings, capsys, monkeypatch):
    monkeypatch.setattr('ideascube.serveradmin.catalog.urlretrieve',
                        fake_urlretrieve)

    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write('all:\n  foovideos:\n    name: Videos from Foo')

    remote = {
        'id': 'foo',
        'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
    }

    call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                 remote['url'])

    # Now let's say the remote published an update to their catalog
    remote_catalog_file = tmpdir.join('source', 'catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Great videos from Foo')

    call_command('catalog', 'cache', 'update')

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    assert catalog_cache_dir.join('catalog.yml').check(file=True)

    expected = {'foovideos': {'name': 'Great videos from Foo'}}

    with catalog_cache_dir.join('catalog.yml').open('r') as f:
        assert yaml.safe_load(f.read()) == expected

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == ''
コード例 #5
0
def test_reinstall_not_installed_package(tmpdir, capsys, settings, staticsite_path):
    sha256sum = get_file_sha256(staticsite_path.strpath)

    remote_catalog_file = tmpdir.join('source').join('catalog.yml')
    remote_catalog_file.write_text(
        'all:\n'
        '  the-site:\n'
        '    name: A great web site\n'
        '    version: 2017-06\n'
        '    sha256sum: {sha256sum}\n'
        '    size: 3027988\n'
        '    url: file://{staticsite_path}\n'
        '    type: static-site'.format(sha256sum=sha256sum, staticsite_path=staticsite_path),
        'utf-8')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))
    call_command('catalog', 'cache', 'update')

    install_dir = Path(settings.CATALOG_NGINX_INSTALL_DIR)
    assert install_dir.join('the-site').check(exists=False)

    call_command('catalog', 'reinstall', 'the-site')
    out, err = capsys.readouterr()
    assert out.strip() == 'Installing the-site-2017-06'
    assert err.strip() == 'the-site is not installed'
    assert install_dir.join('the-site').join('index.html').read_binary() == (
        b'<html></html>')
コード例 #6
0
    def test_eq(self, tmpdir: local) -> None:
        """
        Two nodes are equal iff their IP addresses are equal.
        """

        content = str(uuid.uuid4())
        key1_filename = 'foo.key'
        key1_file = tmpdir.join(key1_filename)
        key1_file.write(content)
        key2_filename = 'bar.key'
        key2_file = tmpdir.join(key2_filename)
        key2_file.write(content)

        node_public_ip_address = IPv4Address('172.0.0.1')
        node_private_ip_address = IPv4Address('172.0.0.3')
        other_ip_address = IPv4Address('172.0.0.4')
        node_ssh_key_path = Path(str(key1_file))
        other_ssh_key_path = Path(str(key2_file))
        node_user = '******'
        other_user = '******'
        node_transport = Transport.DOCKER_EXEC
        other_transport = Transport.SSH
        node = Node(
            public_ip_address=node_public_ip_address,
            private_ip_address=node_private_ip_address,
            ssh_key_path=node_ssh_key_path,
            default_user=node_user,
            default_transport=node_transport,
        )
        for transport in (node_transport, other_transport):
            for public_ip_address in (
                    node_public_ip_address,
                    other_ip_address,
            ):
                for private_ip_address in (
                        node_private_ip_address,
                        other_ip_address,
                ):
                    for ssh_key_path in (
                            node_ssh_key_path,
                            other_ssh_key_path,
                    ):
                        for user in (node_user, other_user):
                            other_node = Node(
                                public_ip_address=public_ip_address,
                                private_ip_address=private_ip_address,
                                ssh_key_path=ssh_key_path,
                                default_user=user,
                                default_transport=transport,
                            )

                            should_match = bool(
                                (public_ip_address, private_ip_address) == (
                                    node_public_ip_address,
                                    node_private_ip_address,
                                ), )

                            do_match = bool(node == other_node)
                            assert should_match == do_match
コード例 #7
0
def test_move_remotes(tmpdir, settings, monkeypatch):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write('all:\n  foovideos:\n    name: Videos from Foo')
    remote = {
        'id': 'foo',
        'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
    }

    call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                 remote['url'])

    # Now move the remotes to the old location
    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    catalog_storage_dir = Path(settings.CATALOG_STORAGE_ROOT)

    catalog_storage_dir.join('remotes').move(catalog_cache_dir.join('remotes'))
    assert catalog_cache_dir.join('remotes', 'foo.yml').check(file=True)
    assert catalog_storage_dir.join('remotes').check(exists=False)

    # And check that it migrates properly
    call_command('catalog', 'cache', 'update')

    assert catalog_cache_dir.join('remotes').check(exists=False)
    assert catalog_storage_dir.join('remotes', 'foo.yml').check(file=True)
コード例 #8
0
def test_split_cache(tmpdir, settings):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Videos from Foo')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))
    call_command('catalog', 'cache', 'update')

    # Now write the catalog cache in the old format
    old_cache = yaml.dump({'installed': {}, 'available': {
        'foovideos': {'name': 'Videos from Foo'}}})

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    catalog_storage_dir = Path(settings.CATALOG_STORAGE_ROOT)

    catalog_cache_dir.join('catalog.yml').write(old_cache)
    catalog_storage_dir.join('installed.json').remove()

    # And check that it migrates properly
    call_command('catalog', 'cache', 'update')

    expected = {
        'foovideos': {'name': 'Videos from Foo'},
        }
    assert yaml.safe_load(
        catalog_cache_dir.join('catalog.json').read()) == expected
    assert yaml.safe_load(
        catalog_storage_dir.join('installed.json').read()) == {}
コード例 #9
0
def test_update_cache_without_remote(settings, capsys):
    expected = {}

    call_command('catalog', 'cache', 'update')

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    assert catalog_cache_dir.join('catalog.json').check(file=True)

    with catalog_cache_dir.join('catalog.json').open('r') as f:
        assert yaml.safe_load(f.read()) == expected

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == ''
コード例 #10
0
def test_update_cache_without_remote(tmpdir, settings, capsys):
    expected = {}

    call_command('catalog', 'cache', 'update')

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    assert catalog_cache_dir.join('catalog.yml').check(file=True)

    with catalog_cache_dir.join('catalog.yml').open('r') as f:
        assert yaml.safe_load(f.read()) == expected

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == ''
コード例 #11
0
    def test_filehandler_format(self, temporary_log_directory: local,
                                patch_logging_formatter: MagicMock,
                                patch_logging_filehandler: MagicMock,
                                test_input_name: str,
                                test_input_filename: str) -> None:
        """Test the file handler is set with the correct logging format."""

        # Create a temporary log file, and get its path, if `test_input_filename` is not None
        if test_input_filename:
            temporary_log_file = temporary_log_directory.join(
                test_input_filename)
            temporary_log_file_path = os.path.join(temporary_log_file.dirname,
                                                   temporary_log_file.basename)
        else:
            temporary_log_file_path = None

        # Run the `create_logger` function
        _ = create_logger(test_input_name, temporary_log_file_path)

        # If a filename is given, check the file handler is set with the correct logging format. Otherwise check that
        # the file handler is not called
        if test_input_filename:
            patch_logging_filehandler.return_value.setFormatter.assert_called_once_with(
                patch_logging_formatter.return_value)
        else:
            assert not patch_logging_filehandler.called
コード例 #12
0
    def test_send_file_to_directory(
        self,
        dcos_node: Node,
        tmpdir: local,
    ) -> None:
        """
        It is possible to send a file to a cluster node to a directory that
        is mounted as tmpfs.
        See ``DockerExecTransport.send_file`` for details.
        """
        content = str(uuid.uuid4())
        file_name = 'example_file.txt'
        local_file = tmpdir.join(file_name)
        local_file.write(content)

        master_destination_path = Path(
            '/etc/{random}'.format(random=uuid.uuid4().hex),
        )
        dcos_node.run(args=['mkdir', '--parent', str(master_destination_path)])
        dcos_node.send_file(
            local_path=Path(str(local_file)),
            remote_path=master_destination_path,
        )
        args = ['cat', str(master_destination_path / file_name)]
        result = dcos_node.run(args=args)
        assert result.stdout.decode() == content
コード例 #13
0
ファイル: test_cli.py プロジェクト: Fabs/dcos-e2e
    def test_invalid_yaml(self, oss_artifact: Path, tmpdir: local) -> None:
        """
        An error is shown if invalid YAML is given in the file given to
        ``--extra-config``.
        """
        invalid_file = tmpdir.join(uuid.uuid4().hex)
        invalid_file.write('@')
        runner = CliRunner()
        result = runner.invoke(
            dcos_docker,
            [
                'create',
                str(oss_artifact),
                '--extra-config',
                str(invalid_file),
            ],
            catch_exceptions=False,
        )
        assert result.exit_code == 2
        # yapf breaks multi-line noqa, see
        # https://github.com/google/yapf/issues/524.
        # yapf: disable
        expected_message = dedent(
            """\
           Usage: dcos-docker create [OPTIONS] ARTIFACT

           Error: Invalid value for "--extra-config": "@" is not valid YAML
            """,# noqa: E501,E261
        )
        # yapf: enable
        assert result.output == expected_message
コード例 #14
0
ファイル: test_cli.py プロジェクト: Fabs/dcos-e2e
    def test_genconf_path_is_file(
        self,
        oss_artifact: Path,
        tmpdir: local,
    ) -> None:
        """
        Genconf path must be a directory.
        """
        genconf_file = tmpdir.join('testfile')
        genconf_file.write('test')

        runner = CliRunner()
        result = runner.invoke(
            dcos_docker,
            [
                'create',
                str(oss_artifact),
                '--genconf-dir',
                str(genconf_file),
            ],
            catch_exceptions=False,
        )
        assert result.exit_code == 2
        expected_error = (
            'Error: Invalid value for "--genconf-dir": '
            '"{path}" is not a directory.'
        ).format(path=str(genconf_file))
        assert expected_error in result.output
コード例 #15
0
    def test_suffix_not_ovpn(self, tmpdir: local) -> None:
        """
        If a configuration file does not have the 'ovpn' suffix, an error is
        shown.
        """
        configuration_file = tmpdir.join('example.txt')
        configuration_file.write('example')
        runner = CliRunner()
        result = runner.invoke(
            minidcos,
            [
                'docker',
                'setup-mac-network',
                '--configuration-dst',
                str(configuration_file),
            ],
            catch_exceptions=False,
        )
        # yapf breaks multi-line noqa, see
        # https://github.com/google/yapf/issues/524.
        # yapf: disable
        expected_error = dedent(
            """\
            Usage: minidcos docker setup-mac-network [OPTIONS]

            Error: Invalid value for "--configuration-dst": "{value}" does not have the suffix ".ovpn".
            """,# noqa: E501,E261
        ).format(
            value=str(configuration_file),
        )
        # yapf: enable
        assert result.exit_code == 2
        assert result.output == expected_error
コード例 #16
0
def test_pandoc_warnings_are_not_printed_with_ignore_warnings_flag(
    capsys,
    tmpdir: Path,
):
    """Test that when pandoc prints a warning the warning is is not printed
       when the `--ignore-warnings` flag is passed.
    """
    outfile_path = tmpdir.join('outfile.html')

    main([
        'convert',
        # Next to the fact that it contains errors, it also has
        # no title, which means that pandoc will throw a warning
        'tests/test_documents/contains_errors.md',
        str(outfile_path),
        # Standalone raises an warning
        '--pandoc-args="--standalone"',
        # Default template not installed in CI.
        # Therefor not using a template (because throws error)
        '--pandoc-template=None',
        '--ignore-warnings',
    ])

    captured = capsys.readouterr()
    assert 'PANDOC WARNING' not in captured.out
    assert '[WARNING]' not in captured.out
コード例 #17
0
    def test_workspace_path_is_file(
        self,
        oss_installer: Path,
        tmpdir: local,
    ) -> None:
        """
        ``--workspace-dir`` must be a directory.
        """
        workspace_file = tmpdir.join('testfile')
        workspace_file.write('test')

        runner = CliRunner()
        result = runner.invoke(
            minidcos,
            [
                'docker',
                'create',
                str(oss_installer),
                '--workspace-dir',
                str(workspace_file),
            ],
            catch_exceptions=False,
        )
        assert result.exit_code == 2
        expected_error = ('Error: Invalid value for "--workspace-dir": '
                          '"{path}" is not a directory.').format(
                              path=str(workspace_file))
        assert expected_error in result.output
コード例 #18
0
 def test_docker_exec_transport(
     self,
     docker_network: Network,
     tmpdir: local,
 ) -> None:
     """
     ``Node`` operations with the Docker exec transport work even if the
     node is on a custom network.
     """
     with Cluster(
         cluster_backend=Docker(
             network=docker_network,
             transport=Transport.DOCKER_EXEC,
         ),
         agents=0,
         public_agents=0,
     ) as cluster:
         (master, ) = cluster.masters
         content = str(uuid.uuid4())
         local_file = tmpdir.join('example_file.txt')
         local_file.write(content)
         random = uuid.uuid4().hex
         master_destination_dir = '/etc/{random}'.format(random=random)
         master_destination_path = Path(master_destination_dir) / 'file.txt'
         master.send_file(
             local_path=Path(str(local_file)),
             remote_path=master_destination_path,
             transport=Transport.DOCKER_EXEC,
         )
         args = ['cat', str(master_destination_path)]
         result = master.run(args=args, transport=Transport.DOCKER_EXEC)
         assert result.stdout.decode() == content
コード例 #19
0
    def test_custom_user(
        self,
        dcos_node: Node,
        tmpdir: local,
    ) -> None:
        """
        It is possible to send a file to a cluster node as a custom user.
        """
        testuser = str(uuid.uuid4().hex)
        dcos_node.run(args=['useradd', testuser])
        dcos_node.run(
            args=['cp', '-R', '$HOME/.ssh', '/home/{}/'.format(testuser)],
            shell=True,
        )

        random = str(uuid.uuid4())
        local_file = tmpdir.join('example_file.txt')
        local_file.write(random)
        master_destination_dir = '/home/{testuser}/{random}'.format(
            testuser=testuser,
            random=random,
        )
        master_destination_path = Path(master_destination_dir) / 'file.txt'
        dcos_node.send_file(
            local_path=Path(str(local_file)),
            remote_path=master_destination_path,
            user=testuser,
        )
        args = ['stat', '-c', '"%U"', str(master_destination_path)]
        result = dcos_node.run(args=args, shell=True)
        assert result.stdout.decode().strip() == testuser

        # Implicitly asserts SSH connection closed by ``send_file``.
        dcos_node.run(args=['userdel', '-r', testuser])
コード例 #20
0
 def test_copy_files(
     self,
     cluster_backend: ClusterBackend,
     tmpdir: local,
     oss_artifact: Path,
 ) -> None:
     """
     Files can be copied from the host to master nodes and the installer
     node at creation time.
     """
     content = str(uuid.uuid4())
     local_file = tmpdir.join('example_file.txt')
     local_file.write(content)
     source_path = Path(str(local_file))
     master_destination_path = Path('/etc/on_master_nodes.txt')
     files_to_copy_to_masters = {source_path: master_destination_path}
     # We currently do not have a way of testing that this works without
     # using custom CA certificates on an enterprise cluster.
     # We add it to the test to at least exercise the code which uses this,
     # but this is insufficient.
     files_to_copy_to_installer = {
         source_path: Path('/genconf/on_installer.txt'),
     }
     with Cluster(
             cluster_backend=cluster_backend,
             generate_config_path=oss_artifact,
             files_to_copy_to_masters=files_to_copy_to_masters,
             files_to_copy_to_installer=files_to_copy_to_installer,
             agents=0,
             public_agents=0,
     ) as cluster:
         (master, ) = cluster.masters
         args = ['cat', str(master_destination_path)]
         result = master.run_as_root(args=args)
         assert result.stdout.decode() == content
コード例 #21
0
    def test_not_key_value(self, oss_installer: Path, tmpdir: local) -> None:
        """
        An error is shown if YAML is given for ``--extra-config`` which is not
        a key-value mapping.
        """
        invalid_file = tmpdir.join(uuid.uuid4().hex)
        invalid_file.write('example')
        runner = CliRunner()
        result = runner.invoke(
            minidcos,
            [
                'docker',
                'create',
                str(oss_installer),
                '--extra-config',
                str(invalid_file),
            ],
            catch_exceptions=False,
        )
        assert result.exit_code == 2
        # yapf breaks multi-line noqa, see
        # https://github.com/google/yapf/issues/524.
        # yapf: disable
        expected_message = dedent(
           """\
           Usage: minidcos docker create [OPTIONS] INSTALLER

           Error: Invalid value for "--extra-config": "example" is not a valid DC/OS configuration
            """,# noqa: E501,E261
        )
        # yapf: enable
        assert result.output == expected_message
コード例 #22
0
def test_reinstall_package_and_keep_downloads(
        tmpdir, capsys, settings, staticsite_path):
    sha256sum = get_file_sha256(staticsite_path.strpath)

    remote_catalog_file = tmpdir.join('source').join('catalog.yml')
    remote_catalog_file.write_text(
        'all:\n'
        '  the-site:\n'
        '    name: A great web site\n'
        '    version: 2017-06\n'
        '    sha256sum: {sha256sum}\n'
        '    size: 3027988\n'
        '    url: file://{staticsite_path}\n'
        '    type: static-site'.format(sha256sum=sha256sum, staticsite_path=staticsite_path),
        'utf-8')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))
    call_command('catalog', 'cache', 'update')

    package_cache = Path(settings.CATALOG_CACHE_ROOT) / 'packages'
    install_dir = Path(settings.CATALOG_NGINX_INSTALL_DIR)
    assert install_dir.join('the-site').check(exists=False)

    call_command('catalog', 'install', 'the-site')
    assert not (package_cache / 'the-site-2017-06').exists()

    # Reset the output
    out, err = capsys.readouterr()

    call_command('catalog', 'reinstall', 'the-site')
    out, err = capsys.readouterr()
    assert out.strip() == (
        'Removing the-site-2017-06\n'
        'Installing the-site-2017-06')
    assert err.strip() == ''
    assert not (package_cache / 'the-site-2017-06').exists()

    call_command('catalog', 'reinstall', '--keep-downloads', 'the-site')
    out, err = capsys.readouterr()
    assert out.strip() == (
        'Removing the-site-2017-06\n'
        'Installing the-site-2017-06')
    assert err.strip() == ''
    assert (package_cache / 'the-site-2017-06').exists()
コード例 #23
0
def test_install_unavailable_package(tmpdir, settings, staticsite_path):
    remote_catalog_file = tmpdir.join('source').join('catalog.yml')
    remote_catalog_file.write_text('all: {}', 'utf-8')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))
    call_command('catalog', 'cache', 'update')

    install_dir = Path(settings.CATALOG_NGINX_INSTALL_DIR)
    assert install_dir.join('the-site').check(exists=False)

    with pytest.raises(CommandError) as excinfo:
        call_command('catalog', 'install', 'the-site')

    assert 'No such package: the-site' in excinfo.exconly()
    assert install_dir.join('the-site').check(exists=False)
コード例 #24
0
ファイル: conftest.py プロジェクト: jvrana/poetry-hooks
def create_project(path: LocalPath, project_name, pkg=None, git: bool = True):
    project = path.join(project_name).mkdir()
    if pkg:
        project.join(pkg).mkdir()
    with project.as_cwd():
        if git:
            cmd_output("git", "init")
        yield project
コード例 #25
0
def test_install_package_already_in_extra_cache(
        tmpdir, capsys, settings, staticsite_path, mocker):
    sha256sum = get_file_sha256(staticsite_path.strpath)

    remote_catalog_file = tmpdir.join('source').join('catalog.yml')
    remote_catalog_file.write_text(
        'all:\n'
        '  the-site:\n'
        '    name: A great web site\n'
        '    version: 2017-06\n'
        '    sha256sum: {sha256sum}\n'
        '    size: 3027988\n'
        '    url: file://{staticsite_path}\n'
        '    type: static-site'.format(sha256sum=sha256sum, staticsite_path=staticsite_path),
        'utf-8')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))
    call_command('catalog', 'cache', 'update')

    install_dir = Path(settings.CATALOG_NGINX_INSTALL_DIR)
    assert install_dir.join('the-site').check(exists=False)

    extra_cache = tmpdir.mkdir('extra-cache')
    pre_downloaded = extra_cache.join('the-site-2017-06')
    pre_downloaded.write_binary(staticsite_path.read_binary())

    # Get urlretrieve to fail unconditionally, so that the test will fail if
    # it is called, which would imply the extra cache was not used.
    def fake_urlretrieve(*args, **kwargs):
        raise URLRetrieveError('failed', 'file://{staticsite_path}'.format(staticsite_path=staticsite_path))

    mocker.patch(
        'ideascube.serveradmin.catalog.urlretrieve',
        side_effect=fake_urlretrieve)

    call_command(
        'catalog', 'install', '--package-cache', extra_cache.strpath,
        'the-site')
    out, err = capsys.readouterr()
    assert out.strip() == 'Installing the-site-2017-06'
    assert err.strip() == ''
    assert install_dir.join('the-site').join('index.html').read_binary() == (
        b'<html></html>')
コード例 #26
0
def test_remove_remote(tmpdir, settings, capsys):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Videos from Foo')

    remote = {
        'id': 'foo', 'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
        }

    call_command(
        'catalog', 'remotes', 'add', remote['id'], remote['name'],
        remote['url'])
    call_command('catalog', 'remotes', 'remove', remote['id'])

    # Ensure the remote has been removed
    remotes_dir = Path(settings.CATALOG_STORAGE_ROOT).join('remotes')

    assert remotes_dir.check(dir=True)
    assert remotes_dir.listdir() == []
コード例 #27
0
 def test_send_symlink(self, dcos_node: Node, tmpdir: local) -> None:
     """
     If sending the path to a symbolic link, the link's target is sent.
     """
     random = str(uuid.uuid4())
     dir_containing_real_file = tmpdir.mkdir(uuid.uuid4().hex)
     dir_containing_symlink = tmpdir.mkdir(uuid.uuid4().hex)
     local_file = dir_containing_real_file.join('example_file.txt')
     local_file.write(random)
     symlink_file = dir_containing_symlink.join('symlink.txt')
     symlink_file_path = Path(str(symlink_file))
     symlink_file_path.symlink_to(target=Path(str(local_file)))
     master_destination_dir = '/etc/{random}'.format(random=random)
     master_destination_path = Path(master_destination_dir) / 'file.txt'
     dcos_node.send_file(
         local_path=symlink_file_path,
         remote_path=master_destination_path,
     )
     args = ['cat', str(master_destination_path)]
     result = dcos_node.run(args=args)
     assert result.stdout.decode() == random
コード例 #28
0
def test_remove_remote(tmpdir, settings, capsys, monkeypatch):
    monkeypatch.setattr('ideascube.serveradmin.catalog.urlretrieve',
                        fake_urlretrieve)

    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write('all:\n  foovideos:\n    name: Videos from Foo')

    remote = {
        'id': 'foo',
        'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
    }

    call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                 remote['url'])
    call_command('catalog', 'remotes', 'remove', remote['id'])

    # Ensure the remote has been removed
    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    remotes_dir = Path(settings.CATALOG_STORAGE_ROOT).join('remotes')

    assert remotes_dir.check(dir=True)
    assert remotes_dir.listdir() == []

    # Ensure the cache has been updated
    assert catalog_cache_dir.join('catalog.yml').check(file=True)

    expected = {}

    with catalog_cache_dir.join('catalog.yml').open('r') as f:
        assert yaml.safe_load(f.read()) == expected

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == ''
コード例 #29
0
def test_add_remote(tmpdir, settings, capsys):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Videos from Foo')

    expected = {
        'id': 'foo', 'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
        }

    call_command(
        'catalog', 'remotes', 'add', expected['id'], expected['name'],
        expected['url'])

    # Ensure the remote has been added
    remotes_dir = Path(settings.CATALOG_STORAGE_ROOT).join('remotes')

    assert remotes_dir.check(dir=True)
    assert remotes_dir.join('foo.json').check(file=True)

    with remotes_dir.join('foo.json').open('r') as f:
        assert json.load(f) == expected
コード例 #30
0
def test_cannot_add_duplicate_remote(tmpdir, settings, monkeypatch, capsys):
    monkeypatch.setattr('ideascube.serveradmin.catalog.urlretrieve',
                        fake_urlretrieve)

    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write('all:\n  foovideos:\n    name: Videos from Foo')

    remote = {
        'id': 'foo',
        'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
    }

    call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                 remote['url'])

    remotes_dir = Path(settings.CATALOG_STORAGE_ROOT).join('remotes')

    assert remotes_dir.check(dir=True)
    assert remotes_dir.join('foo.yml').check(file=True)

    old_mtime = remotes_dir.join('foo.yml').mtime()

    capsys.readouterr()

    # Adding the same remote with the same url should not fail.
    call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                 remote['url'])

    out, _ = capsys.readouterr()
    assert out == 'Not adding already existing remote: "{}"\n'.format(
        remote['id'])

    # But should fail with different urls.
    with pytest.raises(CommandError):
        call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                     remote['url'] + "bad")

    assert remotes_dir.join('foo.yml').mtime() == old_mtime
コード例 #31
0
    def test_sudo(
        self,
        dcos_node: Node,
        tmpdir: local,
    ) -> None:
        """
        It is possible to use sudo to send a file to a directory which the
        user does not have access to.
        """
        testuser = str(uuid.uuid4().hex)
        dcos_node.run(args=['useradd', testuser])
        dcos_node.run(
            args=['cp', '-R', '$HOME/.ssh', '/home/{}/'.format(testuser)],
            shell=True,
        )

        sudoers_line = '{user} ALL=(ALL) NOPASSWD: ALL'.format(user=testuser)
        dcos_node.run(
            args=['echo "' + sudoers_line + '">> /etc/sudoers'],
            shell=True,
        )

        random = str(uuid.uuid4())
        local_file = tmpdir.join('example_file.txt')
        local_file.write(random)
        master_destination_dir = '/etc/{testuser}/{random}'.format(
            testuser=testuser,
            random=random,
        )
        master_destination_path = Path(master_destination_dir) / 'file.txt'
        with pytest.raises(CalledProcessError):
            dcos_node.send_file(
                local_path=Path(str(local_file)),
                remote_path=master_destination_path,
                user=testuser,
            )
        dcos_node.send_file(
            local_path=Path(str(local_file)),
            remote_path=master_destination_path,
            user=testuser,
            sudo=True,
        )

        args = ['stat', '-c', '"%U"', str(master_destination_path)]
        result = dcos_node.run(args=args, shell=True)
        assert result.stdout.decode().strip() == 'root'

        # Implicitly asserts SSH connection closed by ``send_file``.
        dcos_node.run(args=['userdel', '-r', testuser])
コード例 #32
0
def test_reinstall_unavailable_package(tmpdir, capsys, settings, staticsite_path):
    sha256sum = get_file_sha256(staticsite_path.strpath)

    remote_catalog_file = tmpdir.join('source').join('catalog.yml')
    remote_catalog_file.write_text(
        'all:\n'
        '  the-site:\n'
        '    name: A great web site\n'
        '    version: 2017-06\n'
        '    sha256sum: {sha256sum}\n'
        '    size: 3027988\n'
        '    url: file://{staticsite_path}\n'
        '    type: static-site'.format(sha256sum=sha256sum, staticsite_path=staticsite_path),
        'utf-8')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))
    call_command('catalog', 'cache', 'update')

    install_dir = Path(settings.CATALOG_NGINX_INSTALL_DIR)
    assert install_dir.join('the-site').check(exists=False)

    call_command('catalog', 'install', 'the-site')

    # The package was removed from the remote
    remote_catalog_file.write_text('all: {}', 'utf-8')
    call_command('catalog', 'cache', 'update')

    # Reset the output
    out, err = capsys.readouterr()

    with pytest.raises(CommandError) as excinfo:
        call_command('catalog', 'reinstall', 'the-site')

    assert 'No such package: the-site' in excinfo.exconly()
コード例 #33
0
ファイル: test_aws.py プロジェクト: Fabs/dcos-e2e
    def test_custom_key_pair(self, tmpdir: local):
        """
        It is possible to pass a custom key pair to the AWS backend.
        """
        key_name = 'e2e-test-{random}'.format(random=uuid.uuid4().hex)
        private_key_path = Path(str(tmpdir.join('private_key')))
        public_key_path = Path(str(tmpdir.join('public_key')))
        _write_key_pair(
            public_key_path=public_key_path,
            private_key_path=private_key_path,
        )
        backend = AWS(aws_key_pair=(key_name, private_key_path))
        region_name = backend.aws_region
        ec2 = boto3.client('ec2', region_name=region_name)
        ec2.import_key_pair(
            KeyName=key_name,
            PublicKeyMaterial=public_key_path.read_bytes(),
        )

        try:
            with Cluster(
                    cluster_backend=backend,
                    agents=0,
                    public_agents=0,
            ) as cluster:
                (master, ) = cluster.masters
                node = Node(
                    public_ip_address=master.public_ip_address,
                    private_ip_address=master.private_ip_address,
                    default_user=master.default_user,
                    ssh_key_path=private_key_path,
                )

                node.run(args=['echo', '1'])
        finally:
            ec2.delete_key_pair(KeyName=key_name)
コード例 #34
0
def test_split_cache(tmpdir, settings, monkeypatch):
    monkeypatch.setattr('ideascube.serveradmin.catalog.urlretrieve',
                        fake_urlretrieve)

    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write('all:\n  foovideos:\n    name: Videos from Foo')
    remote = {
        'id': 'foo',
        'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
    }

    call_command('catalog', 'remotes', 'add', remote['id'], remote['name'],
                 remote['url'])
    call_command('catalog', 'cache', 'update')

    # Now write the catalog cache in the old format
    old_cache = yaml.dump({
        'installed': {},
        'available': {
            'foovideos': {
                'name': 'Videos from Foo'
            }
        }
    })

    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    catalog_storage_dir = Path(settings.CATALOG_STORAGE_ROOT)

    catalog_cache_dir.join('catalog.yml').write(old_cache)
    catalog_storage_dir.join('installed.yml').remove()

    # And check that it migrates properly
    call_command('catalog', 'cache', 'update')

    expected = {
        'foovideos': {
            'name': 'Videos from Foo'
        },
    }
    assert yaml.safe_load(
        catalog_cache_dir.join('catalog.yml').read()) == expected
    assert yaml.safe_load(
        catalog_storage_dir.join('installed.yml').read()) == {}
コード例 #35
0
 def test_send_file(
     self,
     dcos_node: Node,
     tmpdir: local,
 ) -> None:
     """
     It is possible to send a file to a cluster node as the default user.
     """
     content = str(uuid.uuid4())
     local_file = tmpdir.join('example_file.txt')
     local_file.write(content)
     master_destination_path = Path('/etc/new_dir/on_master_node.txt')
     dcos_node.send_file(
         local_path=Path(str(local_file)),
         remote_path=master_destination_path,
     )
     args = ['cat', str(master_destination_path)]
     result = dcos_node.run(args=args)
     assert result.stdout.decode() == content
コード例 #36
0
ファイル: test_aws.py プロジェクト: adamtheturtle/dcos-e2e
    def test_install_dcos_with_custom_genconf(
        self,
        oss_artifact_url: str,
        tmpdir: local,
    ) -> None:
        """
        It is possible to install DC/OS on an AWS including
        custom files in the ``genconf`` directory.
        """
        cluster_backend = AWS()
        with Cluster(
            cluster_backend=cluster_backend,
            agents=0,
            public_agents=0,
        ) as cluster:
            (master, ) = cluster.masters
            ip_detect_file = tmpdir.join('ip-detect')
            ip_detect_contents = dedent(
                """\
                #!/bin/bash
                echo {ip_address}
                """,
            ).format(ip_address=master.private_ip_address)
            ip_detect_file.write(ip_detect_contents)

            cluster.install_dcos_from_url(
                build_artifact=oss_artifact_url,
                dcos_config=cluster.base_config,
                log_output_live=True,
                ip_detect_path=cluster_backend.ip_detect_path,
                files_to_copy_to_genconf_dir=[
                    (Path(str(ip_detect_file)), Path('/genconf/ip-detect')),
                ],
            )
            cluster.wait_for_dcos_oss()
            cat_result = master.run(
                args=['cat', '/opt/mesosphere/bin/detect_ip'],
            )
            node_script_contents = cat_result.stdout.decode()
            assert node_script_contents == ip_detect_contents
            backend_script_path = cluster_backend.ip_detect_path
            backend_script_contents = backend_script_path.read_text()
            assert node_script_contents != backend_script_contents
コード例 #37
0
    def test_log_name(self, temporary_log_directory: local,
                      patch_logging_getlogger: MagicMock, test_input_name: str,
                      test_input_filename: str) -> None:
        """Test the function is assigned the correct name."""

        # Create a temporary log file, and get its path, if `test_input_filename` is not None
        if test_input_filename:
            temporary_log_file = temporary_log_directory.join(
                test_input_filename)
            temporary_log_file_path = os.path.join(temporary_log_file.dirname,
                                                   temporary_log_file.basename)
        else:
            temporary_log_file_path = None

        # Run the `create_logger` function
        _ = create_logger(test_input_name, temporary_log_file_path)

        # Assert the correct name is used
        patch_logging_getlogger.assert_called_with(test_input_name)
コード例 #38
0
    def test_install_from_path_with_genconf_files(
        self,
        cluster_backend: ClusterBackend,
        oss_installer: Path,
        tmpdir: local,
    ) -> None:
        """
        It is possible to copy files to the ``genconf`` directory.
        """
        with Cluster(
            cluster_backend=cluster_backend,
            masters=1,
            agents=0,
            public_agents=0,
        ) as cluster:

            (master, ) = cluster.masters
            ip_detect_file = tmpdir.join('ip-detect')
            ip_detect_contents = dedent(
                """\
                #!/bin/bash
                echo {ip_address}
                """,
            ).format(ip_address=master.private_ip_address)
            ip_detect_file.write(ip_detect_contents)

            master.install_dcos_from_path(
                dcos_installer=oss_installer,
                dcos_config=cluster.base_config,
                ip_detect_path=cluster_backend.ip_detect_path,
                # Test that this overwrites the ``ip-detect`` script given
                # by ``ip_detect_path``.
                files_to_copy_to_genconf_dir=[
                    (Path(str(ip_detect_file)), Path('/genconf/ip-detect')),
                ],
                role=Role.MASTER,
            )
            cluster.wait_for_dcos_oss()
            cat_result = master.run(
                args=['cat', '/opt/mesosphere/bin/detect_ip'],
            )
            assert cat_result.stdout.decode() == ip_detect_contents
コード例 #39
0
    def test_log_format(self, temporary_log_directory: local,
                        patch_logging_formatter: MagicMock,
                        test_input_name: str,
                        test_input_filename: str) -> None:
        """Test the format of the log."""

        # Create a temporary log file, and get its path, if `test_input_filename` is not None
        if test_input_filename:
            temporary_log_file = temporary_log_directory.join(
                test_input_filename)
            temporary_log_file_path = os.path.join(temporary_log_file.dirname,
                                                   temporary_log_file.basename)
        else:
            temporary_log_file_path = None

        # Run the `create_logger` function
        _ = create_logger(test_input_name, temporary_log_file_path)

        # Assert the correct logging format is applied for the log
        patch_logging_formatter.assert_called_once_with(EXPECTED_LOG_FORMAT)
コード例 #40
0
    def test_log_level(self, temporary_log_directory: local,
                       patch_logging_getlogger: MagicMock,
                       test_input_name: str, test_input_filename: str) -> None:
        """Test the correct logging level is set."""

        # Create a temporary log file, and get its path, if `test_input_filename` is not None
        if test_input_filename:
            temporary_log_file = temporary_log_directory.join(
                test_input_filename)
            temporary_log_file_path = os.path.join(temporary_log_file.dirname,
                                                   temporary_log_file.basename)
        else:
            temporary_log_file_path = None

        # Run the `create_logger` function
        _ = create_logger(test_input_name, temporary_log_file_path)

        # Assert the correct logging level is set for the log
        patch_logging_getlogger.return_value.setLevel.assert_called_once_with(
            logging.DEBUG)
コード例 #41
0
    def test_log_output(self, temporary_log_directory: local,
                        patch_logging_getlogger: MagicMock,
                        test_input_name: str,
                        test_input_filename: str) -> None:
        """Test the function outputs the expected log."""

        # Create a temporary log file, and get its path, if `test_input_filename` is not None
        if test_input_filename:
            temporary_log_file = temporary_log_directory.join(
                test_input_filename)
            temporary_log_file_path = os.path.join(temporary_log_file.dirname,
                                                   temporary_log_file.basename)
        else:
            temporary_log_file_path = None

        # Run the `create_logger` function
        test_output = create_logger(test_input_name, temporary_log_file_path)

        # Assert the output is as expected
        assert test_output == patch_logging_getlogger.return_value
コード例 #42
0
def test_cannot_add_duplicate_remote(tmpdir, settings, capsys):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Videos from Foo')

    remote = {
        'id': 'foo', 'name': 'Content from Foo',
        'url': 'file://{}'.format(remote_catalog_file.strpath),
        }

    call_command(
        'catalog', 'remotes', 'add', remote['id'], remote['name'],
        remote['url'])

    remotes_dir = Path(settings.CATALOG_STORAGE_ROOT).join('remotes')

    assert remotes_dir.check(dir=True)
    assert remotes_dir.join('foo.json').check(file=True)

    old_mtime = remotes_dir.join('foo.json').mtime()

    capsys.readouterr()

    # Adding a remote with the same id and url should be ignored
    call_command(
        'catalog', 'remotes', 'add', remote['id'], remote['name'],
        remote['url'])

    out, err = capsys.readouterr()
    assert out.strip() == ''
    assert err.strip() == 'This remote already exists, ignoring'

    # Adding a remote with a different id but the same url should fail
    with pytest.raises(CommandError) as excinfo:
        call_command(
            'catalog', 'remotes', 'add', remote['id'] + '2', remote['name'],
            remote['url'])

    excinfo.match('A remote with this url already exists')
    assert remotes_dir.join('foo2.json').check(exists=False)

    # Adding a remote with the same id but a different url should fail
    with pytest.raises(CommandError) as excinfo:
        call_command(
            'catalog', 'remotes', 'add', remote['id'], remote['name'],
            remote['url'] + "bad")

    excinfo.match('A remote with this id already exists')
    assert remotes_dir.join('foo.json').mtime() == old_mtime
コード例 #43
0
def test_move_remotes(tmpdir, settings):
    remote_catalog_file = tmpdir.mkdir('source').join('catalog.yml')
    remote_catalog_file.write(
        'all:\n  foovideos:\n    name: Videos from Foo')

    call_command(
        'catalog', 'remotes', 'add', 'foo', 'Content from Foo',
        'file://{}'.format(remote_catalog_file.strpath))

    # Now move the remotes to the old location
    catalog_cache_dir = Path(settings.CATALOG_CACHE_ROOT)
    catalog_storage_dir = Path(settings.CATALOG_STORAGE_ROOT)

    catalog_storage_dir.join('remotes').move(catalog_cache_dir.join('remotes'))
    content = json.loads(catalog_cache_dir.join('remotes', 'foo.json').read())
    catalog_cache_dir.join('remotes', 'foo.yml').write(yaml.safe_dump(content))
    catalog_cache_dir.join('remotes', 'foo.json').remove()
    assert catalog_storage_dir.join('remotes').check(exists=False)

    # And check that it migrates properly
    call_command('catalog', 'cache', 'update')

    assert catalog_cache_dir.join('remotes').check(exists=False)
    assert catalog_storage_dir.join('remotes', 'foo.json').check(file=True)