Example #1
0
    def copy_to_cache(self, target_dir):
        """Copies the source content to the supplied local directory.

        :param target_dir: Path to the destination directory.
        :type target_dir: str
        :returns: The error, if one occurred
        :rtype: None
        """

        try:
            with util.tempdir() as tmp_dir:

                tmp_file = os.path.join(tmp_dir, 'packages.zip')
                # Download the zip file.
                req = http.get(self.url)
                if req.status_code == 200:
                    with open(tmp_file, 'wb') as f:
                        for chunk in req.iter_content(1024):
                            f.write(chunk)
                else:
                    raise Exception(
                        'HTTP GET for {} did not return 200: {}'.format(
                            self.url, req.status_code))

                # Unzip the downloaded file.
                packages_zip = zipfile.ZipFile(tmp_file, 'r')
                packages_zip.extractall(tmp_dir)

                # Move the enclosing directory to the target directory
                enclosing_dirs = [
                    item for item in os.listdir(tmp_dir)
                    if os.path.isdir(os.path.join(tmp_dir, item))
                ]

                # There should only be one directory present after extracting.
                assert (len(enclosing_dirs) is 1)

                enclosing_dir = os.path.join(tmp_dir, enclosing_dirs[0])

                shutil.copytree(enclosing_dir, target_dir)

                # Set appropriate file permissions on the scripts.
                x_mode = (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
                          | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP)

                scripts_dir = os.path.join(target_dir, 'scripts')
                scripts = os.listdir(scripts_dir)

                for script in scripts:
                    script_path = os.path.join(scripts_dir, script)
                    if os.path.isfile(script_path):
                        os.chmod(script_path, x_mode)

                return None

        except Exception:
            logger.exception('Unable to fetch packages from URL: %s', self.url)

            raise DCOSException('Unable to fetch packages from [{}]'.format(
                self.url))
Example #2
0
def test_get_config(load_path_mock):
    with env(), util.tempdir() as tempdir:
        os.environ.pop('DCOS_CONFIG', None)
        os.environ[constants.DCOS_DIR_ENV] = tempdir

        # no config file of any type
        # this should create the global config
        config.get_config()
        global_toml = os.path.join(tempdir, "dcos.toml")
        load_path_mock.assert_called_once_with(global_toml, False)
        load_path_mock.reset_mock()

        # create old global config toml
        global_toml = create_global_config(tempdir)
        config.get_config()
        load_path_mock.assert_called_once_with(global_toml, False)
        load_path_mock.reset_mock()

        # clusters dir, no clusters
        _create_clusters_dir(tempdir)
        config.get_config()
        load_path_mock.assert_called_once_with(global_toml, False)
        load_path_mock.reset_mock()

        cluster_id = "fake-cluster"
        cluster_path = add_cluster_dir(cluster_id, tempdir)
        cluster_toml = os.path.join(cluster_path, "dcos.toml")
        config.get_config(True)
        load_path_mock.assert_called_with(cluster_toml, True)
Example #3
0
def test_get_config(load_path_mock):
    with env(), util.tempdir() as tempdir:
        os.environ.pop('DCOS_CONFIG', None)
        os.environ[constants.DCOS_DIR_ENV] = tempdir

        # no config file of any type
        with pytest.raises(DCOSException) as e:
            config.get_config()

        msg = ("No cluster is attached. "
               "Please run `dcos cluster attach <cluster-name>`")
        assert str(e.value) == msg
        load_path_mock.assert_not_called()

        # create old global config toml
        global_toml = create_global_config(tempdir)
        config.get_config()
        load_path_mock.assert_called_once_with(global_toml, False)

        # clusters dir, no clusters
        _create_clusters_dir(tempdir)
        with pytest.raises(DCOSException) as e:
            config.get_config()
        assert str(e.value) == msg

        cluster_id = "fake-cluster"
        cluster_path = add_cluster_dir(cluster_id, tempdir)
        cluster_toml = os.path.join(cluster_path, "dcos.toml")
        config.get_config(True)
        load_path_mock.assert_any_call(cluster_toml, True)
def temp_dcos_dir():
    with util.tempdir() as tempdir:
        old_dcos_dir = os.environ.get(constants.DCOS_DIR_ENV)
        os.environ[constants.DCOS_DIR_ENV] = tempdir
        yield tempdir
        if old_dcos_dir is None:
            os.environ.pop(constants.DCOS_DIR_ENV)
        else:
            os.environ[constants.DCOS_DIR_ENV] = old_dcos_dir
Example #5
0
def test_download_sandbox_to_target():
    with tempdir() as tmp:
        targetdir = '--target-dir=' + tmp + '/sandbox'
        returncode, stdout, stderr = exec_command(
            ['dcos', 'task', 'download', 'download-app', targetdir])

        assert returncode == 0
        assert stderr == b''
        assert os.path.exists(tmp + '/sandbox')
Example #6
0
def test_get_clusters_with_configured_link(get_linked_clusters):
    with env(), util.tempdir() as tempdir:
        os.environ[constants.DCOS_DIR_ENV] = tempdir
        cluster_id = "a8b53513-63d4-4059-8b08-fde4fe1f1a83"
        add_cluster_dir(cluster_id, tempdir)
        get_linked_clusters.return_value = [_linked_cluster(cluster_id)]

        clusters = cluster.get_clusters(True)
        assert len(clusters) == 1
        assert type(clusters[0]) == cluster.Cluster
Example #7
0
def dcos_dir_tmp_copy():
    with util.tempdir() as tempdir:
        old_dcos_dir_env = os.environ.get(constants.DCOS_DIR_ENV)
        old_dcos_dir = config.get_config_dir_path()
        os.environ[constants.DCOS_DIR_ENV] = tempdir
        copy_tree(old_dcos_dir, tempdir)

        yield tempdir

        if old_dcos_dir_env:
            os.environ[constants.DCOS_DIR_ENV] = old_dcos_dir_env
        else:
            os.environ.pop(constants.DCOS_DIR_ENV)
Example #8
0
    def copy_to_cache(self, target_dir):
        """Copies the source content to the supplied local directory.

        :param target_dir: Path to the destination directory.
        :type target_dir: str
        :returns: The error, if one occurred
        :rtype: None
        """

        try:
            with util.tempdir() as tmp_dir:

                tmp_file = os.path.join(tmp_dir, "packages.zip")
                # Download the zip file.
                req = http.get(self.url)
                if req.status_code == 200:
                    with open(tmp_file, "wb") as f:
                        for chunk in req.iter_content(1024):
                            f.write(chunk)
                else:
                    raise Exception

                # Unzip the downloaded file.
                packages_zip = zipfile.ZipFile(tmp_file, "r")
                packages_zip.extractall(tmp_dir)

                # Move the enclosing directory to the target directory
                enclosing_dirs = [item for item in os.listdir(tmp_dir) if os.path.isdir(os.path.join(tmp_dir, item))]

                # There should only be one directory present after extracting.
                assert len(enclosing_dirs) is 1

                enclosing_dir = os.path.join(tmp_dir, enclosing_dirs[0])

                shutil.copytree(enclosing_dir, target_dir)

                # Set appropriate file permissions on the scripts.
                x_mode = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP

                scripts_dir = os.path.join(target_dir, "scripts")
                scripts = os.listdir(scripts_dir)

                for script in scripts:
                    script_path = os.path.join(scripts_dir, script)
                    if os.path.isfile(script_path):
                        os.chmod(script_path, x_mode)

                return None

        except Exception:
            raise DCOSException("Unable to fetch packages from [{}]".format(self.url))
Example #9
0
    def copy_to_cache(self, target_dir):
        """Copies the source content to the supplied local directory.

        :param target_dir: Path to the destination directory.
        :type target_dir: str
        :returns: The error, if one occurred
        :rtype: None
        """

        try:
            with util.tempdir() as tmp_dir:

                tmp_file = os.path.join(tmp_dir, 'packages.zip')

                # Download the zip file.
                urllib.request.urlretrieve(self.url, tmp_file)

                # Unzip the downloaded file.
                packages_zip = zipfile.ZipFile(tmp_file, 'r')
                packages_zip.extractall(tmp_dir)

                # Move the enclosing directory to the target directory
                enclosing_dirs = [item
                                  for item in os.listdir(tmp_dir)
                                  if os.path.isdir(
                                      os.path.join(tmp_dir, item))]

                # There should only be one directory present after extracting.
                assert(len(enclosing_dirs) is 1)

                enclosing_dir = os.path.join(tmp_dir, enclosing_dirs[0])

                shutil.copytree(enclosing_dir, target_dir)

                # Set appropriate file permissions on the scripts.
                x_mode = (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
                          stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP)

                scripts_dir = os.path.join(target_dir, 'scripts')
                scripts = os.listdir(scripts_dir)

                for script in scripts:
                    script_path = os.path.join(scripts_dir, script)
                    if os.path.isfile(script_path):
                        os.chmod(script_path, x_mode)

                return None

        except Exception:
            raise DCOSException(
                'Unable to fetch packages from [{}]'.format(self.url))
Example #10
0
def test_uses_deprecated_config():
    with env(), util.tempdir() as tempdir:
        os.environ.pop('DCOS_CONFIG', None)
        os.environ[constants.DCOS_DIR_ENV] = tempdir
        assert config.get_config_dir_path() == tempdir

        # create old global config toml
        global_toml = create_global_config(tempdir)
        assert config.get_global_config_path() == global_toml
        assert config.uses_deprecated_config() is True

        # create clusters subdir
        _create_clusters_dir(tempdir)
        assert config.uses_deprecated_config() is False
Example #11
0
def test_get_clusters():
    with env(), util.tempdir() as tempdir:
        os.environ[constants.DCOS_DIR_ENV] = tempdir

        # no config file of any type
        assert cluster.get_clusters() == []

        # cluster dir exists, no cluster
        clusters_dir = os.path.join(tempdir, constants.DCOS_CLUSTERS_SUBDIR)
        util.ensure_dir_exists(clusters_dir)
        assert cluster.get_clusters() == []

        # one cluster
        cluster_id = "fake_cluster"
        add_cluster_dir(cluster_id, tempdir)
        assert cluster.get_clusters() == [_cluster(cluster_id)]
Example #12
0
def test_download_single_file():
    with tempdir() as tmp:

        cwd = os.getcwd()
        os.chdir(tmp)

        returncode, stdout, stderr = exec_command(
            ['dcos', 'task', 'download', 'download-app', '/test/test1'])

        assert returncode == 0
        assert stderr == b''
        assert os.path.exists(tmp + '/test1')

        file = open(tmp + '/test1', 'r')
        content = file.read(4)
        assert content == 'test'

        os.chdir(cwd)
Example #13
0
def dcos_tempdir(copy=False):
    """
    Context manager for getting a temporary DCOS_DIR.

    :param copy: whether or not to copy the current one
    :type copy: bool
    """

    with util.tempdir() as tempdir:
        old_dcos_dir_env = os.environ.get(constants.DCOS_DIR_ENV)
        old_dcos_dir = config.get_config_dir_path()
        os.environ[constants.DCOS_DIR_ENV] = tempdir
        if copy:
            copy_tree(old_dcos_dir, tempdir)

        yield tempdir

        if old_dcos_dir_env:
            os.environ[constants.DCOS_DIR_ENV] = old_dcos_dir_env
        else:
            os.environ.pop(constants.DCOS_DIR_ENV)
Example #14
0
def test_get_attached_cluster_path():
    with env(), util.tempdir() as tempdir:
        os.environ[constants.DCOS_DIR_ENV] = tempdir

        # no clusters dir
        assert config.get_attached_cluster_path() is None

        # clusters dir, no clusters
        _create_clusters_dir(tempdir)
        assert config.get_attached_cluster_path() is None

        # 1 cluster, not attached
        cluster_id = "fake-cluster"
        cluster_path = add_cluster_dir(cluster_id, tempdir)
        assert config.get_attached_cluster_path() == cluster_path
        attached_path = os.path.join(cluster_path,
                                     constants.DCOS_CLUSTER_ATTACHED_FILE)
        assert os.path.exists(attached_path)

        # attached cluster
        assert config.get_attached_cluster_path() == cluster_path
Example #15
0
def test_move_to_cluster_config(mock_get, mock_config):
    with env(), util.tempdir() as tempdir:
        os.environ[constants.DCOS_DIR_ENV] = tempdir

        create_global_config(tempdir)
        mock_config.return_value = "fake-url"

        cluster_id = "fake"
        mock_resp = mock.Mock()
        mock_resp.json.return_value = {"CLUSTER_ID": cluster_id}
        mock_get.return_value = mock_resp

        assert config.get_config_dir_path() == tempdir
        cluster.move_to_cluster_config()

        clusters_path = os.path.join(tempdir, constants.DCOS_CLUSTERS_SUBDIR)
        assert os.path.exists(clusters_path)
        cluster_path = os.path.join(clusters_path, cluster_id)
        assert os.path.exists(os.path.join(cluster_path, "dcos.toml"))
        assert os.path.exists(
            os.path.join(cluster_path, constants.DCOS_CLUSTER_ATTACHED_FILE))
Example #16
0
def test_set_attached():
    with env(), util.tempdir() as tempdir:
        os.environ[constants.DCOS_DIR_ENV] = tempdir

        cluster_path = add_cluster_dir("a", tempdir)
        # no attached_cluster
        assert cluster.set_attached(cluster_path) is None
        assert config.get_attached_cluster_path() == cluster_path

        cluster_path2 = add_cluster_dir("b", tempdir)
        # attach cluster already attached
        assert cluster.set_attached(cluster_path2) is None
        assert config.get_attached_cluster_path() == cluster_path2

        # attach cluster through environment
        os.environ[constants.DCOS_CLUSTER] = "a"
        assert config.get_attached_cluster_path() == cluster_path

        # attach back to old cluster through environment
        os.environ[constants.DCOS_CLUSTER] = "b"
        assert config.get_attached_cluster_path() == cluster_path2
Example #17
0
def test_get_clusters():
    with env(), util.tempdir() as tempdir:
        os.environ[constants.DCOS_DIR_ENV] = tempdir

        # no config file of any type
        assert cluster.get_clusters() == []

        # cluster dir exists, no cluster
        clusters_dir = os.path.join(tempdir, constants.DCOS_CLUSTERS_SUBDIR)
        util.ensure_dir_exists(clusters_dir)
        assert cluster.get_clusters() == []

        # a valid cluster
        cluster_id = "a8b53513-63d4-4059-8b08-fde4fe1f1a83"
        add_cluster_dir(cluster_id, tempdir)

        # Make sure clusters dir can contain random files / folders
        # cf. https://jira.mesosphere.com/browse/DCOS_OSS-1782
        util.ensure_file_exists(os.path.join(clusters_dir, '.DS_Store'))
        util.ensure_dir_exists(os.path.join(clusters_dir, 'not_a_cluster'))

        assert cluster.get_clusters() == [_cluster(cluster_id)]
Example #18
0
def test_setup_cluster_config(mock_get):
    with env(), util.tempdir() as tempdir:
        os.environ[constants.DCOS_DIR_ENV] = tempdir
        with cluster.setup_directory() as setup_temp:

            cluster.set_attached(setup_temp)

            cluster_id = "fake"
            mock_resp = mock.Mock()
            mock_resp.json.return_value = {
                "CLUSTER_ID": cluster_id,
                "cluster": cluster_id
            }
            mock_get.return_value = mock_resp
            path = cluster.setup_cluster_config("fake_url", setup_temp, False)
            expected_path = os.path.join(
                tempdir, constants.DCOS_CLUSTERS_SUBDIR + "/" + cluster_id)
            assert path == expected_path
            assert os.path.exists(path)
            assert os.path.exists(os.path.join(path, "dcos.toml"))

        assert not os.path.exists(setup_temp)
Example #19
0
def update_sources(config, validate=False):
    """Overwrites the local package cache with the latest source data.

    :param config: Configuration dictionary
    :type config: dcos.config.Toml
    :rtype: None
    """

    errors = []

    # ensure the cache directory is properly configured
    cache_dir = os.path.expanduser(
        util.get_config_vals(['package.cache'], config)[0])

    # ensure the cache directory exists
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)

    if not os.path.isdir(cache_dir):
        raise DCOSException(
            'Cache directory does not exist! [{}]'.format(cache_dir))

    # obtain an exclusive file lock on $CACHE/.lock
    lock_path = os.path.join(cache_dir, '.lock')

    with _acquire_file_lock(lock_path):

        # list sources
        sources = list_sources(config)

        for source in sources:

            emitter.publish('Updating source [{}]'.format(source))

            # create a temporary staging directory
            with util.tempdir() as tmp_dir:

                stage_dir = os.path.join(tmp_dir, source.hash())

                # copy to the staging directory
                try:
                    source.copy_to_cache(stage_dir)
                except DCOSException as e:
                    logger.exception(
                        'Failed to copy universe source %s to cache %s',
                        source.url,
                        stage_dir)

                    errors.append(str(e))
                    continue

                # check version
                # TODO(jsancio): move this to the validation when it is forced
                Registry(source, stage_dir).check_version(
                    LooseVersion('1.0'),
                    LooseVersion('3.0'))

                # validate content
                if validate:
                    validation_errors = Registry(source, stage_dir).validate()
                    if len(validation_errors) > 0:
                        errors += validation_errors
                        continue  # keep updating the other sources

                # remove the $CACHE/source.hash() directory
                target_dir = os.path.join(cache_dir, source.hash())
                try:
                    if os.path.exists(target_dir):
                        shutil.rmtree(target_dir,
                                      onerror=_rmtree_on_error,
                                      ignore_errors=False)
                except OSError:
                    logger.exception(
                        'Error removing target directory before move: %s',
                        target_dir)

                    err = "Could not remove directory [{}]".format(target_dir)
                    errors.append(err)
                    continue  # keep updating the other sources

                # move the staging directory to $CACHE/source.hash()
                shutil.move(stage_dir, target_dir)

    if errors:
        raise DCOSException(util.list_to_err(errors))
Example #20
0
def update_sources(config, validate=False):
    """Overwrites the local package cache with the latest source data.

    :param config: Configuration dictionary
    :type config: dcos.config.Toml
    :rtype: None
    """

    errors = []

    # ensure the cache directory is properly configured
    cache_dir = os.path.expanduser(
        util.get_config_vals(['package.cache'], config)[0])

    # ensure the cache directory exists
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)

    if not os.path.isdir(cache_dir):
        raise DCOSException(
            'Cache directory does not exist! [{}]'.format(cache_dir))

    # obtain an exclusive file lock on $CACHE/.lock
    lock_path = os.path.join(cache_dir, '.lock')

    with _acquire_file_lock(lock_path):

        # list sources
        sources = list_sources(config)

        for source in sources:

            emitter.publish('Updating source [{}]'.format(source))

            # create a temporary staging directory
            with util.tempdir() as tmp_dir:

                stage_dir = os.path.join(tmp_dir, source.hash())

                # copy to the staging directory
                try:
                    source.copy_to_cache(stage_dir)
                except DCOSException as e:
                    logger.exception(
                        'Failed to copy universe source %s to cache %s',
                        source.url,
                        stage_dir)

                    errors.append(e.message)
                    continue

                # check version
                # TODO(jsancio): move this to the validation when it is forced
                Registry(source, stage_dir).check_version(
                    LooseVersion('1.0'),
                    LooseVersion('2.0'))

                # validate content
                if validate:
                    validation_errors = Registry(source, stage_dir).validate()
                    if len(validation_errors) > 0:
                        errors += validation_errors
                        continue  # keep updating the other sources

                # remove the $CACHE/source.hash() directory
                target_dir = os.path.join(cache_dir, source.hash())
                try:
                    if os.path.exists(target_dir):
                        shutil.rmtree(target_dir,
                                      onerror=_rmtree_on_error,
                                      ignore_errors=False)
                except OSError:
                    logger.exception(
                        'Error removing target directory before move: %s',
                        target_dir)

                    err = "Could not remove directory [{}]".format(target_dir)
                    errors.append(err)
                    continue  # keep updating the other sources

                # move the staging directory to $CACHE/source.hash()
                shutil.move(stage_dir, target_dir)

    if errors:
        raise DCOSException(util.list_to_err(errors))