def __init__(self): super(VersionHandler, self).__init__() self._version = version self._required_services = [ mapreduce.MapReduce(), maprfs.MapRFS(), management.Management(), oozie.Oozie(), ] self._services = [ mapreduce.MapReduce(), maprfs.MapRFS(), management.Management(), oozie.OozieV401(), hive.HiveV013(), hbase.HBaseV094(), hbase.HBaseV0987(), httpfs.HttpFS(), mahout.MahoutV09(), pig.PigV013(), pig.PigV014(), swift.Swift(), flume.Flume(), drill.DrillV07(), drill.DrillV08(), drill.DrillV09(), drill.DrillV12(), sqoop2.Sqoop2(), hue.HueV360(), ]
def __init__(self): super(VersionHandler, self).__init__() self._version = version self._required_services = [ mapreduce.MapReduce(), maprfs.MapRFS(), mng.Management(), oozie.Oozie(), ] self._services = [ mapreduce.MapReduce(), maprfs.MapRFS(), mng.Management(), oozie.Oozie(), hive.HiveV012(), hive.HiveV013(), flume.Flume(), hbase.HBaseV098(), hue.Hue(), httpfs.HttpFS(), mahout.Mahout(), pig.Pig(), sqoop2.Sqoop2(), impala.ImpalaV141(), swift.Swift(), drill.Drill() ]
def __init__(self, cluster, version_handler, added=None, removed=None): super(Context, self).__init__(cluster, version_handler, added, removed) self._hadoop_version = mr.MapReduce().version self._hadoop_lib = None self._hadoop_conf = None self._resource_manager_uri = 'maprfs:///' self._cluster_mode = mr.MapReduce.cluster_mode self._node_aware = False
def __init__(self, cluster, version_handler, added=None, removed=None): super(Context, self).__init__(cluster, version_handler, added, removed) self._hadoop_version = mr.MapReduce().version self._hadoop_lib = None self._hadoop_conf = None self._resource_manager_uri = 'maprfs:///' self._cluster_mode = None self._node_aware = True self._mapr_version = '3.1.1' self._ubuntu_ecosystem_repo = ( 'http://package.mapr.com/releases/ecosystem/ubuntu binary/') self._centos_ecosystem_repo = ( 'http://package.mapr.com/releases/ecosystem/redhat')
def __init__(self): super(VersionHandler, self).__init__() self._version = version self._required_services = [ mapreduce.MapReduce(), maprfs.MapRFS(), management.Management(), oozie.Oozie(), ] self._services = [ mapreduce.MapReduce(), maprfs.MapRFS(), management.Management(), oozie.Oozie(), hive.HiveV012(), hive.HiveV013(), hbase.HBaseV094(), hbase.HBaseV098(), httpfs.HttpFS(), mahout.Mahout(), pig.Pig(), swift.Swift(), flume.Flume(), ]
_SET_MODE_CMD = 'maprcli cluster mapreduce set -mode ' _TOPO_SCRIPT = 'plugins/mapr/resources/topology.sh' INSTALL_JAVA_SCRIPT = 'plugins/mapr/resources/install_java.sh' INSTALL_SCALA_SCRIPT = 'plugins/mapr/resources/install_scala.sh' INSTALL_MYSQL_CLIENT = 'plugins/mapr/resources/install_mysql_client.sh' ADD_MAPR_REPO_SCRIPT = 'plugins/mapr/resources/add_mapr_repo.sh' ADD_SECURITY_REPO_SCRIPT = 'plugins/mapr/resources/add_security_repos.sh' ADD_MAPR_USER = '******' SERVICE_INSTALL_PRIORITY = [ mng.Management(), yarn.YARNv251(), yarn.YARNv241(), yarn.YARNv270(), mr.MapReduce(), maprfs.MapRFS(), ] @six.add_metaclass(abc.ABCMeta) class BaseConfigurer(ac.AbstractConfigurer): def configure(self, cluster_context, instances=None): instances = instances or cluster_context.get_instances() self._configure_ssh_connection(cluster_context, instances) self._install_mapr_repo(cluster_context, instances) if not cluster_context.is_prebuilt: self._prepare_bare_image(cluster_context, instances) self.configure_general_environment(cluster_context, instances) self._install_services(cluster_context, instances) if cluster_context.is_node_aware: