def __init__(self):
     super(VersionHandler, self).__init__()
     self._version = version
     self._required_services = [
         yarn.YARNv270(),
         maprfs.MapRFS(),
         mng.Management(),
         oozie.Oozie(),
     ]
     self._services = [
         hive.HiveV12(),
         pig.PigV015(),
         impala.ImpalaV250(),
         flume.FlumeV16(),
         sqoop2.Sqoop2(),
         mahout.MahoutV012(),
         oozie.OozieV420(),
         hue.HueV390(),
         hbase.HBaseV111(),
         drill.DrillV16(),
         yarn.YARNv270(),
         maprfs.MapRFS(),
         mng.Management(),
         httpfs.HttpFS(),
         swift.Swift(),
         sentry.SentryV16(),
         spark.SparkOnYarnV161(),
     ]
Exemple #2
0
 def __init__(self):
     super(VersionHandler, self).__init__()
     self._version = version
     self._required_services = [
         yarn.YARNv270(),
         maprfs.MapRFS(),
         mng.Management(),
         oozie.Oozie(),
     ]
     self._services = [
         hive.HiveV013(),
         hive.HiveV10(),
         impala.ImpalaV141(),
         pig.PigV014(),
         flume.Flume(),
         sqoop2.Sqoop2(),
         mahout.MahoutV010(),
         oozie.OozieV410(),
         hue.HueV370(),
         hue.HueV381(),
         hbase.HBaseV0989(),
         hbase.HBaseV09812(),
         drill.DrillV11(),
         yarn.YARNv270(),
         maprfs.MapRFS(),
         mng.Management(),
         httpfs.HttpFS(),
         swift.Swift(),
     ]
Exemple #3
0
 def __init__(self, cluster, version_handler, added=None, removed=None):
     super(Context, self).__init__(cluster, version_handler, added, removed)
     self._hadoop_version = yarn.YARNv270().version
     self._hadoop_lib = None
     self._hadoop_conf = None
     self._cluster_mode = yarn.YARNv270.cluster_mode
     self._node_aware = True
     self._resource_manager_uri = "maprfs:///"
     self._mapr_version = "5.1.0"
     self._ubuntu_ecosystem_repo = (
         "http://package.mapr.com/releases/ecosystem-5.x/ubuntu binary/")
     self._centos_ecosystem_repo = (
         "http://package.mapr.com/releases/ecosystem-5.x/redhat")
_CONFIGURE_SH_TIMEOUT = 600
_SET_MODE_CMD = 'maprcli cluster mapreduce set -mode '

_TOPO_SCRIPT = 'plugins/mapr/resources/topology.sh'
INSTALL_JAVA_SCRIPT = 'plugins/mapr/resources/install_java.sh'
INSTALL_SCALA_SCRIPT = 'plugins/mapr/resources/install_scala.sh'
INSTALL_MYSQL_CLIENT = 'plugins/mapr/resources/install_mysql_client.sh'
ADD_MAPR_REPO_SCRIPT = 'plugins/mapr/resources/add_mapr_repo.sh'
ADD_SECURITY_REPO_SCRIPT = 'plugins/mapr/resources/add_security_repos.sh'
ADD_MAPR_USER = '******'

SERVICE_INSTALL_PRIORITY = [
    mng.Management(),
    yarn.YARNv251(),
    yarn.YARNv241(),
    yarn.YARNv270(),
    mr.MapReduce(),
    maprfs.MapRFS(),
]


@six.add_metaclass(abc.ABCMeta)
class BaseConfigurer(ac.AbstractConfigurer):
    def configure(self, cluster_context, instances=None):
        instances = instances or cluster_context.get_instances()
        self._configure_ssh_connection(cluster_context, instances)
        self._install_mapr_repo(cluster_context, instances)
        if not cluster_context.is_prebuilt:
            self._prepare_bare_image(cluster_context, instances)
        self.configure_general_environment(cluster_context, instances)
        self._install_services(cluster_context, instances)