Esempio n. 1
0
def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type,
                            edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_PIG):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type(edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_JAVA):
        config.update({'params': {}})
    return {'job_config': config}
Esempio n. 2
0
def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_SHELL):
        return {'job_config': {'configs': [], 'params': {}, 'args': []}}

    if edp.compare_job_type(job_type,
                            edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_PIG):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += get_possible_mapreduce_configs()
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/hive-default.xml')

    config = {'configs': cfg}
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG, edp.JOB_TYPE_HIVE):
        config.update({'params': {}})
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
        config.update({'args': []})
    return {'job_config': config}
Esempio n. 3
0
def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *get_possible_job_types()):
        return None

    if edp.compare_job_type(job_type, 'Java'):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, 'MapReduce', 'Pig'):
        #TODO(nmakhotkin) Here we should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, 'MapReduce'):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, 'Hive'):
        #TODO(nmakhotkin) Here we should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type('MapReduce', 'Java'):
        config.update({'params': {}})
    return {'job_config': config}
Esempio n. 4
0
def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_SHELL):
        return {'job_config': {'configs': [], 'params': [], 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE,
                            edp.JOB_TYPE_PIG):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type(edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_JAVA):
        config.update({'params': {}})
    return {'job_config': config}
Esempio n. 5
0
def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_SHELL):
        return {'job_config': {'configs': [], 'params': {}, 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE,
                            edp.JOB_TYPE_PIG):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += get_possible_mapreduce_configs()
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/hive-default.xml')

    config = {'configs': cfg}
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG, edp.JOB_TYPE_HIVE):
        config.update({'params': {}})
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
        config.update({'args': []})
    return {'job_config': config}
Esempio n. 6
0
def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_SHELL):
        return {'job_config': {'configs': [], 'params': {}, 'args': []}}

    if edp.compare_job_type(job_type,
                            edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_PIG):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += get_possible_mapreduce_configs()
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    config = {'configs': cfg}
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG, edp.JOB_TYPE_HIVE):
        config.update({'params': {}})
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
        config.update({'args': []})
    return {'job_config': config}
Esempio n. 7
0
def get_possible_hive_config_from(file_name):
    '''Return the possible configs, args, params for a Hive job.'''
    config = {
        'configs': xmlutils.load_hadoop_xml_defaults(file_name),
        'params': {}
    }
    return config
Esempio n. 8
0
def vm_awareness_mapred_config():
    c = x.load_hadoop_xml_defaults('topology/resources/mapred-template.xml')
    result = [cfg for cfg in c if cfg['value']]
    LOG.info(
        "Vm awareness will add following configs in map-red "
        "params: %s", result)
    return result
def get_possible_hive_config_from(file_name):
    '''Return the possible configs, args, params for a Hive job.'''
    config = {
        'configs': xmlutils.load_hadoop_xml_defaults(file_name),
        'params': {}
        }
    return config
Esempio n. 10
0
 def test_load_xml_defaults(self):
     self.assertEqual(
         [{'name': u'name1', 'value': u'value1', 'description': 'descr1'},
          {'name': u'name2', 'value': u'value2', 'description': 'descr2'},
          {'name': u'name3', 'value': '', 'description': 'descr3'},
          {'name': u'name4', 'value': '', 'description': 'descr4'},
          {'name': u'name5', 'value': u'value5', 'description': ''}],
         x.load_hadoop_xml_defaults(
             'tests/unit/resources/test-default.xml'))
Esempio n. 11
0
 def test_load_xml_defaults(self):
     self.assertEqual(
         [{'name': u'name1', 'value': u'value1', 'description': 'descr1'},
          {'name': u'name2', 'value': u'value2', 'description': 'descr2'},
          {'name': u'name3', 'value': '', 'description': 'descr3'},
          {'name': u'name4', 'value': '', 'description': 'descr4'},
          {'name': u'name5', 'value': u'value5', 'description': ''}],
         x.load_hadoop_xml_defaults(
             'tests/unit/resources/test-default.xml'))
Esempio n. 12
0
def get_swift_configs():
    configs = x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
    for conf in configs:
        if conf['name'] == HADOOP_SWIFT_AUTH_URL:
            conf['value'] = su.retrieve_auth_url() + "tokens/"
        if conf['name'] == HADOOP_SWIFT_TENANT:
            conf['value'] = retrieve_tenant()

    result = [cfg for cfg in configs if cfg['value']]
    LOG.info("Swift would be integrated with the following "
             "params: %s", result)
    return result
Esempio n. 13
0
def get_swift_configs():
    configs = x.load_hadoop_xml_defaults("swift/resources/conf-template.xml")
    for conf in configs:
        if conf["name"] == HADOOP_SWIFT_AUTH_URL:
            conf["value"] = su.retrieve_auth_url() + "tokens/"
        if conf["name"] == HADOOP_SWIFT_TENANT:
            conf["value"] = retrieve_tenant()
        if CONF.os_region_name and conf["name"] == HADOOP_SWIFT_REGION:
            conf["value"] = CONF.os_region_name

    result = [cfg for cfg in configs if cfg["value"]]
    LOG.info(_LI("Swift would be integrated with the following " "params: {result}").format(result=result))
    return result
Esempio n. 14
0
def get_swift_configs():
    configs = x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
    for conf in configs:
        if conf['name'] == HADOOP_SWIFT_AUTH_URL:
            conf['value'] = su.retrieve_auth_url() + "auth/tokens/"
        if conf['name'] == HADOOP_SWIFT_TENANT:
            conf['value'] = retrieve_tenant()
        if CONF.os_region_name and conf['name'] == HADOOP_SWIFT_REGION:
            conf['value'] = CONF.os_region_name

    result = [cfg for cfg in configs if cfg['value']]
    LOG.info("Swift would be integrated with the following "
             "params: {result}".format(result=result))
    return result
Esempio n. 15
0
def vm_awareness_core_config():
    c = x.load_hadoop_xml_defaults('topology/resources/core-template.xml')
    result = [cfg for cfg in c if cfg['value']]

    if not CONF.enable_hypervisor_awareness:
        # not leveraging 4-layer approach so override template value
        param = next((prop for prop in result
                      if prop['name'] == 'net.topology.impl'), None)
        if param:
            param['value'] = 'org.apache.hadoop.net.NetworkTopology'

    LOG.info(_LI("Vm awareness will add following configs in core-site "
             "params: %s"), result)
    return result
Esempio n. 16
0
def vm_awareness_core_config():
    c = x.load_hadoop_xml_defaults('topology/resources/core-template.xml')
    result = [cfg for cfg in c if cfg['value']]

    if not CONF.enable_hypervisor_awareness:
        # not leveraging 4-layer approach so override template value
        param = next((prop for prop in result
                      if prop['name'] == 'net.topology.impl'), None)
        if param:
            param['value'] = 'org.apache.hadoop.net.NetworkTopology'

    LOG.info("Vm awareness will add following configs in core-site "
             "params: %s", result)
    return result
Esempio n. 17
0
    def test_create_hadoop_xml(self):
        conf = x.load_hadoop_xml_defaults(
            'tests/unit/resources/test-default.xml')
        self.assertEqual("""<?xml version="1.0" ?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
  <property>
    <name>name1</name>
    <value>some_val1</value>
  </property>
  <property>
    <name>name2</name>
    <value>2</value>
  </property>
</configuration>
""",
                         x.create_hadoop_xml({'name1': 'some_val1',
                                              'name2': 2}, conf),)
Esempio n. 18
0
    def test_create_hadoop_xml(self):
        conf = x.load_hadoop_xml_defaults(
            'tests/unit/resources/test-default.xml')
        self.assertEqual(x.create_hadoop_xml({'name1': 'some_val1',
                                              'name2': 2}, conf),
                         """<?xml version="1.0" ?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
  <property>
    <name>name2</name>
    <value>2</value>
  </property>
  <property>
    <name>name1</name>
    <value>some_val1</value>
  </property>
</configuration>
""")
Esempio n. 19
0
def get_swift_configs():
    configs = x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
    for conf in configs:
        if conf['name'] == HADOOP_SWIFT_AUTH_URL:
            conf['value'] = su.retrieve_auth_url() + "auth/tokens/"
        if conf['name'] == HADOOP_SWIFT_TENANT:
            conf['value'] = retrieve_tenant()
        if CONF.os_region_name and conf['name'] == HADOOP_SWIFT_REGION:
            conf['value'] = CONF.os_region_name
        if conf['name'] == HADOOP_SWIFT_DOMAIN_NAME:
            # NOTE(jfreud): Don't be deceived here... Even though there is an
            # attribute provided by context called domain_name, it is used for
            # domain scope, and hadoop-swiftfs always authenticates using
            # project scope. The purpose of the setting below is to override
            # the default value for project domain and user domain, domain id
            # as 'default', which may not always be correct.
            # TODO(jfreud): When hadoop-swiftfs allows it, stop hoping that
            # project_domain_name is always equal to user_domain_name.
            conf['value'] = context.current().project_domain_name

    result = [cfg for cfg in configs if cfg['value']]
    LOG.info("Swift would be integrated with the following "
             "params: {result}".format(result=result))
    return result
Esempio n. 20
0
def get_swift_configs():
    configs = x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
    for conf in configs:
        if conf['name'] == HADOOP_SWIFT_AUTH_URL:
            conf['value'] = su.retrieve_auth_url() + "auth/tokens/"
        if conf['name'] == HADOOP_SWIFT_TENANT:
            conf['value'] = retrieve_tenant()
        if CONF.os_region_name and conf['name'] == HADOOP_SWIFT_REGION:
            conf['value'] = CONF.os_region_name
        if conf['name'] == HADOOP_SWIFT_DOMAIN_NAME:
            # NOTE(jfreud): Don't be deceived here... Even though there is an
            # attribute provided by context called domain_name, it is used for
            # domain scope, and hadoop-swiftfs always authenticates using
            # project scope. The purpose of the setting below is to override
            # the default value for project domain and user domain, domain id
            # as 'default', which may not always be correct.
            # TODO(jfreud): When hadoop-swiftfs allows it, stop hoping that
            # project_domain_name is always equal to user_domain_name.
            conf['value'] = context.current().project_domain_name

    result = [cfg for cfg in configs if cfg['value']]
    LOG.info("Swift would be integrated with the following "
             "params: {result}".format(result=result))
    return result
Esempio n. 21
0
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from oslo.config import cfg

from sahara.openstack.common import log as logging
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
from sahara.utils import xmlutils as x

CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_4_1/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_4_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_4_1/resources/mapred-default.xml')

YARN_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_4_1/resources/yarn-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_4_1/resources/oozie-default.xml')

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
Esempio n. 22
0
def read_default_swift_configs():
    return x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
Esempio n. 23
0
from oslo.config import cfg

from sahara import conductor as c
from sahara.openstack.common import log as logging
from sahara.plugins.general import utils
from sahara.plugins import provisioning as p
from sahara.topology import topology_helper as topology
from sahara.utils import types as types
from sahara.utils import xmlutils as x


conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/spark/resources/core-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/spark/resources/hdfs-default.xml")

XML_CONFS = {"HDFS": [CORE_DEFAULT, HDFS_DEFAULT]}

SPARK_CONFS = {
    "Spark": {
        "OPTIONS": [
            {
                "name": "Master port",
                "description": "Start the master on a different port" " (default: 7077)",
                "default": "7077",
                "priority": 2,
            },
            {
Esempio n. 24
0
# limitations under the License.

from sahara.plugins import provisioning as p
from sahara.utils import xmlutils as x


CORE_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/hadoop-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/mapred-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/intel/v2_5_1/resources/oozie-default.xml')


XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
    "HDFS": [HDFS_DEFAULT],
    "MapReduce": [MAPRED_DEFAULT],
    "JobFlow": [OOZIE_DEFAULT]
}

IDH_TARBALL_URL = p.Config('IDH tarball URL', 'general', 'cluster', priority=1,
                           default_value='http://repo2.intelhadoop.com/'
                                         'setup/setup-intelhadoop-'
                                         '2.5.1-en-evaluation.RHEL.tar.gz')

OS_REPO_URL = p.Config('OS repository URL', 'general', 'cluster', priority=1,
Esempio n. 25
0
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from sahara.utils import xmlutils as x


OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/oozie-default.xml')

OOZIE_CORE_DEFAULT = [
    {
        'name': 'hadoop.proxyuser.hadoop.hosts',
        'value': "localhost"
    },
    {
        'name': 'hadoop.proxyuser.hadoop.groups',
        'value': 'hadoop'
    }]

OOZIE_HEAPSIZE_DEFAULT = "CATALINA_OPTS -Xmx1024m"


def get_oozie_required_xml_configs():
Esempio n. 26
0
def vm_awareness_mapred_config():
    c = x.load_hadoop_xml_defaults('topology/resources/mapred-template.xml')
    result = [cfg for cfg in c if cfg['value']]
    LOG.info("Vm awareness will add following configs in map-red "
             "params: %s", result)
    return result
Esempio n. 27
0
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from oslo_config import cfg
from oslo_log import log as logging

from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
from sahara.utils import xmlutils as x

CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/core-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/hdfs-default.xml")

MAPRED_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/mapred-default.xml")

YARN_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/yarn-default.xml")

OOZIE_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/oozie-default.xml")

HIVE_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/hive-default.xml")

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
    "HDFS": [HDFS_DEFAULT],
    "YARN": [YARN_DEFAULT],
Esempio n. 28
0
from sahara.plugins import utils
from sahara.plugins.vanilla import utils as vu
from sahara.plugins.vanilla.v1_2_1 import mysql_helper as m_h
from sahara.plugins.vanilla.v1_2_1 import oozie_helper as o_h
from sahara.swift import swift_helper as swift
from sahara.topology import topology_helper as topology
from sahara.utils import crypto
from sahara.utils import types as types
from sahara.utils import xmlutils as x


conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/mapred-default.xml')

HIVE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hive-default.xml')

# Append Oozie configs fore core-site.xml
CORE_DEFAULT += o_h.OOZIE_CORE_DEFAULT

XML_CONFS = {
    "HDFS": [CORE_DEFAULT, HDFS_DEFAULT],
Esempio n. 29
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from oslo_config import cfg
import six

from sahara.plugins import provisioning as p
from sahara.plugins.sandbox.hadoop2 import config_helper as c_helper
from sahara.utils import xmlutils as x

CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")

CORE_DEFAULT = x.load_hadoop_xml_defaults( # a list of a 'property' in ***.xml
    'plugins/sandbox/v2_7_1/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/sandbox/v2_7_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/sandbox/v2_7_1/resources/mapred-default.xml')

YARN_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/sandbox/v2_7_1/resources/yarn-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/sandbox/v2_7_1/resources/oozie-default.xml')

HIVE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/sandbox/v2_7_1/resources/hive-default.xml')
Esempio n. 30
0
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from sahara.utils import xmlutils as x

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/oozie-default.xml')

OOZIE_CORE_DEFAULT = [{
    'name': 'hadoop.proxyuser.hadoop.hosts',
    'value': "localhost"
}, {
    'name': 'hadoop.proxyuser.hadoop.groups',
    'value': 'hadoop'
}]

OOZIE_HEAPSIZE_DEFAULT = "CATALINA_OPTS -Xmx1024m"


def get_oozie_required_xml_configs():
    """Following configs differ from default configs in oozie-default.xml."""
    return {
Esempio n. 31
0
def get_possible_mapreduce_configs():
    '''return a list of possible configuration values for map reduce jobs.'''
    cfg = xmlutils.load_hadoop_xml_defaults(
        'service/edp/resources/mapred-job-config.xml')
    return cfg
Esempio n. 32
0
# limitations under the License.

from oslo.config import cfg

from sahara import exceptions as ex
from sahara.openstack.common import log as logging
from sahara.plugins import provisioning as p
from sahara.utils import types as types
from sahara.utils import xmlutils as x

CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/mapred-default.xml')

YARN_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/yarn-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/oozie-default.xml')

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
Esempio n. 33
0
from sahara import conductor as c
from sahara.i18n import _
from sahara.i18n import _LI
from sahara.openstack.common import log as logging
from sahara.plugins import provisioning as p
from sahara.plugins import utils
from sahara.topology import topology_helper as topology
from sahara.utils import types as types
from sahara.utils import xmlutils as x

conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/spark/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/spark/resources/hdfs-default.xml')

XML_CONFS = {"HDFS": [CORE_DEFAULT, HDFS_DEFAULT]}

SPARK_CONFS = {
    'Spark': {
        "OPTIONS": [{
            'name': 'Master port',
            'description': 'Start the master on a different port'
            ' (default: 7077)',
            'default': '7077',
            'priority': 2,
        }, {
Esempio n. 34
0
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from sahara.plugins import provisioning as p
from sahara.utils import xmlutils as x


CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/hadoop-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/hdfs-default.xml")

YARN_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/yarn-default.xml")

OOZIE_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/oozie-default.xml")


XML_CONFS = {"Hadoop": [CORE_DEFAULT], "HDFS": [HDFS_DEFAULT], "YARN": [YARN_DEFAULT], "JobFlow": [OOZIE_DEFAULT]}

IDH_TARBALL_URL = p.Config(
    "IDH tarball URL",
    "general",
    "cluster",
    priority=1,
Esempio n. 35
0
# See the License for the specific language governing permissions and
# limitations under the License.

from sahara.plugins import provisioning as p
from sahara.utils import xmlutils as x

CORE_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/hadoop-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/mapred-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/intel/v2_5_1/resources/oozie-default.xml')

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
    "HDFS": [HDFS_DEFAULT],
    "MapReduce": [MAPRED_DEFAULT],
    "JobFlow": [OOZIE_DEFAULT]
}

IDH_TARBALL_URL = p.Config('IDH tarball URL',
                           'general',
                           'cluster',
                           priority=1,
                           default_value='http://repo2.intelhadoop.com/'
                           'setup/setup-intelhadoop-'
                           '2.5.1-en-evaluation.RHEL.tar.gz')
Esempio n. 36
0
from sahara.plugins import provisioning as p
from sahara.plugins import utils
from sahara.plugins.vanilla import utils as vu
from sahara.plugins.vanilla.v1_2_1 import mysql_helper as m_h
from sahara.plugins.vanilla.v1_2_1 import oozie_helper as o_h
from sahara.swift import swift_helper as swift
from sahara.topology import topology_helper as topology
from sahara.utils import crypto
from sahara.utils import types as types
from sahara.utils import xmlutils as x

conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/mapred-default.xml')

HIVE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hive-default.xml')

# Append Oozie configs fore core-site.xml
CORE_DEFAULT += o_h.OOZIE_CORE_DEFAULT

XML_CONFS = {
    "HDFS": [CORE_DEFAULT, HDFS_DEFAULT],
Esempio n. 37
0
def get_possible_mapreduce_configs():
    '''return a list of possible configuration values for map reduce jobs.'''
    cfg = xmlutils.load_hadoop_xml_defaults(
        'service/edp/resources/mapred-job-config.xml')
    return cfg
Esempio n. 38
0
def load_hadoop_xml_defaults(file_name, package, **kwargs):
    return xmlutils.load_hadoop_xml_defaults(file_name, package)
Esempio n. 39
0
def read_default_swift_configs():
    return x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')