Exemplo n.º 1
0
def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *get_possible_job_types()):
        return None

    if edp.compare_job_type(job_type, 'Java'):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, 'MapReduce', 'Pig'):
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, 'MapReduce'):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, 'Hive'):
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type('MapReduce', 'Java'):
        config.update({'params': {}})
    return {'job_config': config}
Exemplo n.º 2
0
def get_possible_job_config(job_type):
    if job_type not in get_possible_job_types():
        return None

    if job_type == "Java":
        return {'job_config': {'configs': [], 'args': []}}

    if job_type in ['MapReduce', 'Pig', 'Jar']:
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/resources/mapred-default.xml')
        if job_type in ['MapReduce', 'Jar']:
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif job_type == 'Hive':
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if job_type not in ['MapReduce', 'Jar', 'Java']:
        config.update({'params': {}})
    return {'job_config': config}
Exemplo n.º 3
0
def vm_awareness_mapred_config():
    c = x.load_hadoop_xml_defaults('topology/resources/mapred-template.xml')
    result = [cfg for cfg in c if cfg['value']]
    LOG.info(
        "Vm awareness will add following configs in map-red "
        "params: %s", result)
    return result
Exemplo n.º 4
0
 def test_load_xml_defaults(self):
     self.assertListEqual(
         [{'name': u'name1', 'value': u'value1', 'description': 'descr1'},
          {'name': u'name2', 'value': u'value2', 'description': 'descr2'},
          {'name': u'name3', 'value': '', 'description': 'descr3'},
          {'name': u'name4', 'value': '', 'description': 'descr4'},
          {'name': u'name5', 'value': u'value5', 'description': ''}],
         x.load_hadoop_xml_defaults(
             'tests/unit/resources/test-default.xml'))
Exemplo n.º 5
0
def get_possible_job_config(job_type):
    if job_type not in get_possible_job_types():
        return None
    if job_type in ['Jar', 'Pig']:
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/resources/mapred-default.xml')
        if job_type == 'Jar':
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif job_type == 'Hive':
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/resources/hive-default.xml')
    config = {'configs': cfg, "args": {}}
    if job_type != 'Jar':
        config.update({'params': {}})
    return {'job_config': config}
Exemplo n.º 6
0
def get_possible_job_config(job_type):
    if job_type not in get_possible_job_types():
        return None
    if job_type in ['Jar', 'Pig']:
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/resources/mapred-default.xml')
        if job_type == 'Jar':
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif job_type == 'Hive':
        #TODO(nmakhotkin) Savanna should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/resources/hive-default.xml')
    config = {'configs': cfg, "args": {}}
    if job_type != 'Jar':
        config.update({'params': {}})
    return {'job_config': config}
Exemplo n.º 7
0
def get_swift_configs():
    configs = x.load_hadoop_xml_defaults("swift/resources/conf-template.xml")
    for conf in configs:
        if conf["name"] == HADOOP_SWIFT_AUTH_URL:
            conf["value"] = _retrieve_auth_url()
        if conf["name"] == HADOOP_SWIFT_TENANT:
            conf["value"] = _retrieve_tenant()

    result = [cfg for cfg in configs if cfg["value"]]
    LOG.info("Swift would be integrated with the following " "params: %s", result)
    return result
Exemplo n.º 8
0
def get_swift_configs():
    configs = x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
    for conf in configs:
        if conf['name'] == HADOOP_SWIFT_AUTH_URL:
            conf['value'] = su.retrieve_auth_url() + "tokens/"
        if conf['name'] == HADOOP_SWIFT_TENANT:
            conf['value'] = _retrieve_tenant()

    result = [cfg for cfg in configs if cfg['value']]
    LOG.info("Swift would be integrated with the following "
             "params: %s", result)
    return result
Exemplo n.º 9
0
def vm_awareness_core_config():
    c = x.load_hadoop_xml_defaults('topology/resources/core-template.xml')
    result = [cfg for cfg in c if cfg['value']]

    if not CONF.enable_hypervisor_awareness:
        # not leveraging 4-layer approach so override template value
        param = next((prop for prop in result
                      if prop['name'] == 'net.topology.impl'), None)
        if param:
            param['value'] = 'org.apache.hadoop.net.NetworkTopology'

    LOG.info("Vm awareness will add following configs in core-site "
             "params: %s", result)
    return result
Exemplo n.º 10
0
def vm_awareness_core_config():
    c = x.load_hadoop_xml_defaults('topology/resources/core-template.xml')
    result = [cfg for cfg in c if cfg['value']]

    if not CONF.enable_hypervisor_awareness:
        # not leveraging 4-layer approach so override template value
        param = next(
            (prop for prop in result if prop['name'] == 'net.topology.impl'),
            None)
        if param:
            param['value'] = 'org.apache.hadoop.net.NetworkTopology'

    LOG.info(
        "Vm awareness will add following configs in core-site "
        "params: %s", result)
    return result
Exemplo n.º 11
0
    def test_create_hadoop_xml(self):
        conf = x.load_hadoop_xml_defaults(
            'tests/unit/resources/test-default.xml')
        self.assertEquals(x.create_hadoop_xml({'name1': 'some_val1',
                                               'name2': 2}, conf),
                          """<?xml version="1.0" ?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
  <property>
    <name>name2</name>
    <value>2</value>
  </property>
  <property>
    <name>name1</name>
    <value>some_val1</value>
  </property>
</configuration>
""")
Exemplo n.º 12
0
    def test_create_hadoop_xml(self):
        conf = x.load_hadoop_xml_defaults(
            'tests/unit/resources/test-default.xml')
        self.assertEqual(
            x.create_hadoop_xml({
                'name1': 'some_val1',
                'name2': 2
            }, conf), """<?xml version="1.0" ?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
  <property>
    <name>name2</name>
    <value>2</value>
  </property>
  <property>
    <name>name1</name>
    <value>some_val1</value>
  </property>
</configuration>
""")
Exemplo n.º 13
0
 def test_load_xml_defaults(self):
     self.assertListEqual([
         {
             'name': u'name1',
             'value': u'value1',
             'description': 'descr1'
         }, {
             'name': u'name2',
             'value': u'value2',
             'description': 'descr2'
         }, {
             'name': u'name3',
             'value': '',
             'description': 'descr3'
         }, {
             'name': u'name4',
             'value': '',
             'description': 'descr4'
         }, {
             'name': u'name5',
             'value': u'value5',
             'description': ''
         }
     ], x.load_hadoop_xml_defaults('tests/unit/resources/test-default.xml'))
Exemplo n.º 14
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from savanna.openstack.common import log as logging
from savanna.plugins import provisioning as p
from savanna.plugins.vanilla import oozie_helper as o_h
from savanna.swift import swift_helper as swift
from savanna.utils import xmlutils as x

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/mapred-default.xml')

## Append Oozie configs fore core-site.xml
CORE_DEFAULT += o_h.OOZIE_CORE_DEFAULT

XML_CONFS = {
    "HDFS": [CORE_DEFAULT, HDFS_DEFAULT],
    "MapReduce": [MAPRED_DEFAULT],
    "JobFlow": [o_h.OOZIE_DEFAULT]
}
Exemplo n.º 15
0
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from savanna.utils import xmlutils as x

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/oozie-default.xml')

OOZIE_CORE_DEFAULT = [{
    'name': 'hadoop.proxyuser.hadoop.hosts',
    'value': "localhost"
}, {
    'name': 'hadoop.proxyuser.hadoop.groups',
    'value': 'hadoop'
}]


def get_oozie_required_xml_configs():
    """Following configs differ from default configs in oozie-default.xml."""
    return {
        'oozie.service.ActionService.executor.ext.classes':
        'org.apache.oozie.action.email.EmailActionExecutor,'
Exemplo n.º 16
0
# limitations under the License.

from savanna.plugins import provisioning as p
from savanna.utils import xmlutils as x


CORE_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/resources/hadoop-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/resources/mapred-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/intel/resources/oozie-default.xml')


XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
    "HDFS": [HDFS_DEFAULT],
    "MapReduce": [MAPRED_DEFAULT],
    "JobFlow": [OOZIE_DEFAULT]
}

IDH_TARBALL_URL = p.Config('IDH tarball URL', 'general', 'cluster', priority=1,
                           default_value='http://repo1.intelhadoop.com:3424/'
                                         'setup/setup-intelhadoop-'
                                         '2.5.1-en-evaluation.RHEL.tar.gz')

OS_REPO_URL = p.Config('OS repository URL', 'general', 'cluster', priority=1,
Exemplo n.º 17
0
def vm_awareness_mapred_config():
    c = x.load_hadoop_xml_defaults('topology/resources/mapred-template.xml')
    result = [cfg for cfg in c if cfg['value']]
    LOG.info("Vm awareness will add following configs in map-red "
             "params: %s", result)
    return result
Exemplo n.º 18
0
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from savanna.openstack.common import log as logging
from savanna.plugins import provisioning as p
from savanna.plugins.vanilla import mysql_helper as m_h
from savanna.plugins.vanilla import oozie_helper as o_h
from savanna.swift import swift_helper as swift
from savanna.utils import xmlutils as x

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/mapred-default.xml')

HIVE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/resources/hive-default.xml')

## Append Oozie configs fore core-site.xml
CORE_DEFAULT += o_h.OOZIE_CORE_DEFAULT

XML_CONFS = {
    "HDFS": [CORE_DEFAULT, HDFS_DEFAULT],
Exemplo n.º 19
0
# See the License for the specific language governing permissions and
# limitations under the License.

from savanna.plugins import provisioning as p
from savanna.utils import xmlutils as x

CORE_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/hadoop-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults_with_type_and_locale(
    'plugins/intel/v2_5_1/resources/mapred-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/intel/v2_5_1/resources/oozie-default.xml')

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
    "HDFS": [HDFS_DEFAULT],
    "MapReduce": [MAPRED_DEFAULT],
    "JobFlow": [OOZIE_DEFAULT]
}

IDH_TARBALL_URL = p.Config('IDH tarball URL',
                           'general',
                           'cluster',
                           priority=1,
                           default_value='http://repo1.intelhadoop.com:3424/'
                           'setup/setup-intelhadoop-'
                           '2.5.1-en-evaluation.RHEL.tar.gz')