Esempio n. 1
0
from sahara.topology import topology_helper as topology
from sahara.utils import files as f
from sahara.utils import types as types
from sahara.utils import xmlutils as x

conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/spark/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/spark/resources/hdfs-default.xml')

SWIFT_DEFAULTS = swift.read_default_swift_configs()

XML_CONFS = {"HDFS": [CORE_DEFAULT, HDFS_DEFAULT, SWIFT_DEFAULTS]}

_default_executor_classpath = ":".join(['/usr/lib/hadoop/hadoop-swift.jar'])

SPARK_CONFS = {
    'Spark': {
        "OPTIONS": [
            {
                'name':
                'Executor extra classpath',
                'description':
                'Value for spark.executor.extraClassPath'
                ' in spark-defaults.conf'
                ' (default: %s)' % _default_executor_classpath,
Esempio n. 2
0
from sahara.utils import files as f
from sahara.utils import types as types
from sahara.utils import xmlutils as x


conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/spark/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/spark/resources/hdfs-default.xml')

SWIFT_DEFAULTS = swift.read_default_swift_configs()

XML_CONFS = {
    "HDFS": [CORE_DEFAULT, HDFS_DEFAULT, SWIFT_DEFAULTS]
}

_default_executor_classpath = ":".join(
    ['/usr/lib/hadoop/hadoop-swift.jar'])

SPARK_CONFS = {
    'Spark': {
        "OPTIONS": [
            {
                'name': 'Executor extra classpath',
                'description': 'Value for spark.executor.extraClassPath'
                ' in spark-defaults.conf'
Esempio n. 3
0
def read_default_swift_configs(**kwargs):
    return swift_helper.read_default_swift_configs()