Beispiel #1
0
def _get_cosmos_url():
    """
    :returns: cosmos base url
    :rtype: str
    """
    config = util.get_config()
    cosmos_url = config.get("package.cosmos_url")
    if cosmos_url is None:
        cosmos_url = util.get_config_vals(['core.dcos_url'], config)[0]
    return cosmos_url
Beispiel #2
0
def _get_cosmos_url():
    """
    :returns: cosmos base url
    :rtype: str
    """
    config = util.get_config()
    cosmos_url = config.get("package.cosmos_url")
    if cosmos_url is None:
        cosmos_url = util.get_config_vals(['core.dcos_url'], config)[0]
    return cosmos_url
Beispiel #3
0
    def __init__(self):
        config = util.get_config()
        self._dcos_url = None
        self._mesos_master_url = None

        mesos_master_url = config.get('core.mesos_master_url')
        if mesos_master_url is None:
            self._dcos_url = util.get_config_vals(config, ['core.dcos_url'])[0]
        else:
            self._mesos_master_url = mesos_master_url
Beispiel #4
0
    def local_cache(self, config):
        """Returns the file system path to this source's local cache.

        :param config: Configuration dictionary
        :type config: dcos.config.Toml
        :returns: Path to this source's local cache on disk
        :rtype: str or None
        """

        cache_dir = os.path.expanduser(util.get_config_vals(config, ["package.cache"])[0])
        return os.path.join(cache_dir, self.hash())
Beispiel #5
0
    def local_cache(self, config):
        """Returns the file system path to this source's local cache.

        :param config: Configuration dictionary
        :type config: dcos.config.Toml
        :returns: Path to this source's local cache on disk
        :rtype: str or None
        """

        cache_dir = os.path.expanduser(
            util.get_config_vals(['package.cache'], config)[0])
        return os.path.join(cache_dir, self.hash())
Beispiel #6
0
    def __init__(self):
        config = util.get_config()
        self._dcos_url = None
        self._mesos_master_url = None

        mesos_master_url = config.get('core.mesos_master_url')
        if mesos_master_url is None:
            self._dcos_url = util.get_config_vals(['core.dcos_url'], config)[0]
        else:
            self._mesos_master_url = mesos_master_url

        self._timeout = config.get('core.timeout')
Beispiel #7
0
    def __init__(self):
        config = util.get_config()
        self._dcos_url = None
        self._mesos_master_url = None

        mesos_master_url = config.get("core.mesos_master_url")
        if mesos_master_url is None:
            self._dcos_url = util.get_config_vals(["core.dcos_url"], config)[0]
        else:
            self._mesos_master_url = mesos_master_url

        self._timeout = config.get("core.timeout")
Beispiel #8
0
def delete_zk_node(znode):
    """Delete Zookeeper node

    :param znode: znode to delete
    :type znode: str
    :rtype: None
    """

    dcos_url = util.get_config_vals(['core.dcos_url'])[0]
    znode_url = urllib.parse.urljoin(
        dcos_url, '/exhibitor/exhibitor/v1/explorer/znode/{}'.format(znode))
    requests.delete(znode_url)
Beispiel #9
0
def delete_zk_node(znode):
    """Delete Zookeeper node

    :param znode: znode to delete
    :type znode: str
    :rtype: None
    """

    dcos_url = util.get_config_vals(['core.dcos_url'])[0]
    znode_url = urllib.parse.urljoin(
        dcos_url,
        '/exhibitor/exhibitor/v1/explorer/znode/{}'.format(znode))
    http.delete(znode_url)
Beispiel #10
0
def _get_marathon_url(config):
    """
    :param config: configuration dictionary
    :type config: config.Toml
    :returns: marathon base url
    :rtype: str
    """

    marathon_url = config.get('marathon.url')
    if marathon_url is None:
        dcos_url = util.get_config_vals(['core.dcos_url'], config)[0]
        marathon_url = urllib.parse.urljoin(dcos_url, 'marathon/')

    return marathon_url
Beispiel #11
0
def _get_marathon_url(config):
    """
    :param config: configuration dictionary
    :type config: config.Toml
    :returns: marathon base url
    :rtype: str
    """

    marathon_url = config.get("marathon.url")
    if marathon_url is None:
        dcos_url = util.get_config_vals(config, ["core.dcos_url"])[0]
        marathon_url = urllib.parse.urljoin(dcos_url, "marathon/")

    return marathon_url
Beispiel #12
0
def _get_marathon_url(config):
    """
    :param config: configuration dictionary
    :type config: config.Toml
    :returns: marathon base url
    :rtype: str
    """

    marathon_url = config.get('marathon.url')
    if marathon_url is None:
        dcos_url = util.get_config_vals(['core.dcos_url'], config)[0]
        marathon_url = urllib.parse.urljoin(dcos_url, 'service/marathon/')

    return marathon_url
Beispiel #13
0
def _get_mesos_url(config):
    """
    :param config: configuration
    :type config: Toml
    :returns: url for the Mesos master
    :rtype: str
    """

    mesos_master_url = config.get('core.mesos_master_url')
    if mesos_master_url is None:
        dcos_url = util.get_config_vals(config, ['core.dcos_url'])[0]
        return urllib.parse.urljoin(dcos_url, 'mesos/')
    else:
        return mesos_master_url
Beispiel #14
0
def list_sources(config):
    """List configured package sources.

    :param config: Configuration dictionary
    :type config: dcos.config.Toml
    :returns: The list of sources, in resolution order
    :rtype: [Source]
    """

    source_uris = util.get_config_vals(['package.sources'], config)[0]

    sources = [url_to_source(s) for s in source_uris]

    errs = [source for source in sources if isinstance(source, Error)]
    if errs:
        raise DCOSException('\n'.join(err.error() for err in errs))

    return sources
Beispiel #15
0
def list_sources(config):
    """List configured package sources.

    :param config: Configuration dictionary
    :type config: dcos.config.Toml
    :returns: The list of sources, in resolution order
    :rtype: [Source]
    """

    source_uris = util.get_config_vals(['package.sources'], config)[0]

    sources = [url_to_source(s) for s in source_uris]

    errs = [source for source in sources if isinstance(source, Error)]
    if errs:
        raise DCOSException('\n'.join(err.error() for err in errs))

    return sources
Beispiel #16
0
 def __init__(self, url=None):
     self.url = url or urllib.parse.urljoin(
         util.get_config_vals(['core.dcos_url'])[0], '/mesos_dns/')
Beispiel #17
0
 def __init__(self, url=None):
     self.url = url or urllib.parse.urljoin(
         util.get_config_vals(['core.dcos_url'])[0], '/mesos_dns/')
Beispiel #18
0
def update_sources(config, validate=False):
    """Overwrites the local package cache with the latest source data.

    :param config: Configuration dictionary
    :type config: dcos.config.Toml
    :rtype: None
    """

    errors = []

    # ensure the cache directory is properly configured
    cache_dir = os.path.expanduser(
        util.get_config_vals(['package.cache'], config)[0])

    # ensure the cache directory exists
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)

    if not os.path.isdir(cache_dir):
        raise DCOSException(
            'Cache directory does not exist! [{}]'.format(cache_dir))

    # obtain an exclusive file lock on $CACHE/.lock
    lock_path = os.path.join(cache_dir, '.lock')

    with _acquire_file_lock(lock_path):

        # list sources
        sources = list_sources(config)

        for source in sources:

            emitter.publish('Updating source [{}]'.format(source))

            # create a temporary staging directory
            with util.tempdir() as tmp_dir:

                stage_dir = os.path.join(tmp_dir, source.hash())

                # copy to the staging directory
                try:
                    source.copy_to_cache(stage_dir)
                except DCOSException as e:
                    logger.exception(
                        'Failed to copy universe source %s to cache %s',
                        source.url,
                        stage_dir)

                    errors.append(str(e))
                    continue

                # check version
                # TODO(jsancio): move this to the validation when it is forced
                Registry(source, stage_dir).check_version(
                    LooseVersion('1.0'),
                    LooseVersion('3.0'))

                # validate content
                if validate:
                    validation_errors = Registry(source, stage_dir).validate()
                    if len(validation_errors) > 0:
                        errors += validation_errors
                        continue  # keep updating the other sources

                # remove the $CACHE/source.hash() directory
                target_dir = os.path.join(cache_dir, source.hash())
                try:
                    if os.path.exists(target_dir):
                        shutil.rmtree(target_dir,
                                      onerror=_rmtree_on_error,
                                      ignore_errors=False)
                except OSError:
                    logger.exception(
                        'Error removing target directory before move: %s',
                        target_dir)

                    err = "Could not remove directory [{}]".format(target_dir)
                    errors.append(err)
                    continue  # keep updating the other sources

                # move the staging directory to $CACHE/source.hash()
                shutil.move(stage_dir, target_dir)

    if errors:
        raise DCOSException(util.list_to_err(errors))
Beispiel #19
0
def update_sources(config, validate=False):
    """Overwrites the local package cache with the latest source data.

    :param config: Configuration dictionary
    :type config: dcos.config.Toml
    :rtype: None
    """

    errors = []

    # ensure the cache directory is properly configured
    cache_dir = os.path.expanduser(
        util.get_config_vals(['package.cache'], config)[0])

    # ensure the cache directory exists
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)

    if not os.path.isdir(cache_dir):
        raise DCOSException(
            'Cache directory does not exist! [{}]'.format(cache_dir))

    # obtain an exclusive file lock on $CACHE/.lock
    lock_path = os.path.join(cache_dir, '.lock')

    with _acquire_file_lock(lock_path):

        # list sources
        sources = list_sources(config)

        for source in sources:

            emitter.publish('Updating source [{}]'.format(source))

            # create a temporary staging directory
            with util.tempdir() as tmp_dir:

                stage_dir = os.path.join(tmp_dir, source.hash())

                # copy to the staging directory
                try:
                    source.copy_to_cache(stage_dir)
                except DCOSException as e:
                    logger.exception(
                        'Failed to copy universe source %s to cache %s',
                        source.url,
                        stage_dir)

                    errors.append(e.message)
                    continue

                # check version
                # TODO(jsancio): move this to the validation when it is forced
                Registry(source, stage_dir).check_version(
                    LooseVersion('1.0'),
                    LooseVersion('2.0'))

                # validate content
                if validate:
                    validation_errors = Registry(source, stage_dir).validate()
                    if len(validation_errors) > 0:
                        errors += validation_errors
                        continue  # keep updating the other sources

                # remove the $CACHE/source.hash() directory
                target_dir = os.path.join(cache_dir, source.hash())
                try:
                    if os.path.exists(target_dir):
                        shutil.rmtree(target_dir,
                                      onerror=_rmtree_on_error,
                                      ignore_errors=False)
                except OSError:
                    logger.exception(
                        'Error removing target directory before move: %s',
                        target_dir)

                    err = "Could not remove directory [{}]".format(target_dir)
                    errors.append(err)
                    continue  # keep updating the other sources

                # move the staging directory to $CACHE/source.hash()
                shutil.move(stage_dir, target_dir)

    if errors:
        raise DCOSException(util.list_to_err(errors))