示例#1
0
    def resource_setup(cls):
        super(BaseDataProcessingTest, cls).resource_setup()

        plugin = None
        if cls.default_plugin:
            plugin = cls.client.get_plugin(cls.default_plugin)['plugin']
        cls.default_version = plugin_utils.get_default_version(plugin)

        if cls.default_plugin is not None and cls.default_version is None:
            raise InvalidSaharaTestConfiguration(
                message="No known Sahara plugin version was found")

        # add lists for watched resources
        cls._node_group_templates = []
        cls._cluster_templates = []
        cls._data_sources = []
        cls._job_binary_internals = []
        cls._job_binaries = []
        cls._jobs = []
示例#2
0
    def resource_setup(cls):
        super(BaseDataProcessingTest, cls).resource_setup()

        endpoint_type = TEMPEST_CONF.data_processing.endpoint_type
        catalog_type = TEMPEST_CONF.data_processing.catalog_type
        auth_url = TEMPEST_CONF.identity.uri

        credentials = cls.os_admin.credentials

        auth = v3.Password(auth_url=auth_url.replace('/v2.0', '/v3'),
                           username=credentials.username,
                           password=credentials.password,
                           project_name=credentials.tenant_name,
                           user_domain_name='default',
                           project_domain_name='default')

        ses = session.Session(auth=auth)

        cls.client = sahara_client.Client(
            TEMPEST_CONF.data_processing.saharaclient_version,
            session=ses,
            service_type=catalog_type,
            endpoint_type=endpoint_type)

        if TEMPEST_CONF.service_available.glance:
            # Check if glance v1 is available to determine which client to use.
            if TEMPEST_CONF.image_feature_enabled.api_v1:
                cls.image_client = cls.os_admin.image_client
            elif TEMPEST_CONF.image_feature_enabled.api_v2:
                cls.image_client = cls.os_admin.image_client_v2
            else:
                raise lib_exc.InvalidConfiguration(
                    'Either api_v1 or api_v2 must be True in '
                    '[image-feature-enabled].')
        cls.object_client = cls.os_primary.object_client
        cls.container_client = cls.os_primary.container_client
        cls.networks_client = cls.os_primary.compute_networks_client

        if TEMPEST_CONF.network.floating_network_name:
            cls.floating_ip_pool = TEMPEST_CONF.network.floating_network_name
            if TEMPEST_CONF.service_available.neutron:
                cls.floating_ip_pool = \
                    cls.get_floating_ip_pool_id_for_neutron()
        else:
            cls.floating_ip_pool = TEMPEST_CONF.network.public_network_id

        test_image_name = TEMPEST_CONF.data_processing.test_image_name

        cls.test_image_id = cls.get_image_id(test_image_name)

        default_plugin = cls.get_plugin()
        plugin_dict = default_plugin.to_dict()
        default_version = plugin_utils.get_default_version(plugin_dict)

        cls.worker_template = (plugin_utils.get_node_group_template(
            'worker1', default_version, cls.floating_ip_pool))

        cls.master_template = (plugin_utils.get_node_group_template(
            'master1', default_version, cls.floating_ip_pool))

        cls.cluster_template = (plugin_utils.get_cluster_template(
            default_version=default_version))

        cls.swift_data_source_with_creds = {
            'url': 'swift://sahara-container/input-source',
            'description': 'Test data source',
            'type': 'swift',
            'credentials': {
                'user': '******',
                'password': '******'
            }
        }

        cls.local_hdfs_data_source = {
            'url': 'input-source',
            'description': 'Test data source',
            'type': 'hdfs',
        }

        cls.external_hdfs_data_source = {
            'url': 'hdfs://test-master-node/usr/hadoop/input-source',
            'description': 'Test data source',
            'type': 'hdfs'
        }