def __init__(self): super(Management, self).__init__() self._ui_name = "Management" self._node_processes = [ZOOKEEPER, WEB_SERVER, METRICS] self._ui_info = None self._validation_rules = [vu.at_least(1, ZOOKEEPER), vu.at_least(1, WEB_SERVER), vu.odd_count_of(ZOOKEEPER)]
def __init__(self): super(YARNv251, self).__init__() self._version = '2.5.1' self._validation_rules = [ vu.at_least(1, RESOURCE_MANAGER), vu.at_least(1, NODE_MANAGER), vu.exactly(1, HISTORY_SERVER), ]
def __init__(self): super(HBase, self).__init__() self._name = "hbase" self._ui_name = "HBase" self._node_processes = [HBASE_MASTER, HBASE_REGION_SERVER, HBASE_THRIFT] self._cluster_defaults = ["hbase-default.json"] self._validation_rules = [vu.at_least(1, HBASE_MASTER), vu.at_least(1, HBASE_REGION_SERVER)] self._ui_info = [("HBase Master", HBASE_MASTER, "http://%s:60010")]
def __init__(self): super(Hive, self).__init__() self._name = 'hive' self._ui_name = 'Hive' self._node_processes = [HIVE_METASTORE, HIVE_SERVER_2] self._validation_rules = [ vu.at_least(1, HIVE_METASTORE), vu.at_least(1, HIVE_SERVER_2), ]
def __init__(self): super(Management, self).__init__() self._ui_name = 'Management' self._node_processes = [ZOOKEEPER, WEB_SERVER, METRICS] self._ui_info = None self._validation_rules = [ vu.at_least(1, ZOOKEEPER), vu.at_least(1, WEB_SERVER), vu.odd_count_of(ZOOKEEPER), ]
def __init__(self): super(Management, self).__init__() self._ui_name = 'Management' self._node_processes = [ZOOKEEPER, WEB_SERVER, METRICS] self._ui_info = [ ('MapR Control System (MCS)', WEB_SERVER, 'https://%s:8443'), ] self._validation_rules = [ vu.at_least(1, ZOOKEEPER), vu.at_least(1, WEB_SERVER), vu.odd_count_of(ZOOKEEPER), ]
def __init__(self): super(MapReduce, self).__init__() self._ui_name = 'MapReduce' self._name = 'hadoop' self._version = '0.20.2' self._node_processes = [JOB_TRACKER, TASK_TRACKER] self._ui_info = [ ('JobTracker', JOB_TRACKER, 'http://%s:50030'), ('TaskTracker', TASK_TRACKER, 'http://%s:50060'), ] self._validation_rules = [ vu.at_least(1, JOB_TRACKER), vu.at_least(1, TASK_TRACKER), ]
def __init__(self): super(HBase, self).__init__() self._name = 'hbase' self._ui_name = 'HBase' self._node_processes = [ HBASE_MASTER, HBASE_REGION_SERVER, HBASE_THRIFT, ] self._cluster_defaults = ['hbase-default.json'] self._validation_rules = [ vu.at_least(1, HBASE_MASTER), vu.at_least(1, HBASE_REGION_SERVER), ]
def __init__(self): super(MapReduce, self).__init__() self._ui_name = 'MapReduce' self._name = 'hadoop' self._version = '0.20.2' self._node_processes = [JOB_TRACKER, TASK_TRACKER] self._ui_info = [ ('JobTracker', JOB_TRACKER, {s.SERVICE_UI: 'http://%s:50030'}), ('TaskTracker', TASK_TRACKER, {s.SERVICE_UI: 'http://%s:50060'}), ] self._validation_rules = [ vu.at_least(1, JOB_TRACKER), vu.at_least(1, TASK_TRACKER), ] self._cluster_defaults = ['mapreduce-cluster.json']
def __init__(self): super(KafkaConnect, self).__init__() self._version = '2.0.1' self._name = 'kafka-connect' self._ui_name = 'Kafka Connect' self._node_processes = [KAFKA_CONNECT_HDFS, KAFKA_CONNECT_JDBC] self._validation_rules = [vu.at_least(1, KAFKA)]
def __init__(self): super(Flume, self).__init__() self._name = 'flume' self._ui_name = 'Flume' self._version = '1.5.0' self._node_processes = [FLUME] self._validation_rules = [vu.at_least(1, FLUME)]
def __init__(self): super(KafkaRest, self).__init__() self._version = '2.0.1' self._name = 'kafka-eco' self._ui_name = 'Kafka Rest' self._node_processes = [KAFKA_REST] self._validation_rules = [vu.at_least(1, KAFKA)]
def __init__(self): super(Pig, self).__init__() self._name = 'pig' self._ui_name = 'Pig' self._version = '0.13' self._node_processes = [PIG] self._validation_rules = [vu.at_least(1, PIG)]
def __init__(self): super(Drill, self).__init__() self._name = 'drill' self._ui_name = 'Drill' self._node_processes = [DRILL] self._ui_info = [('Drill', DRILL, 'http://%s:8047')] self._validation_rules = [vu.at_least(1, DRILL)]
def __init__(self): super(Drill, self).__init__() self._name = 'drill' self._ui_name = 'Drill' self._node_processes = [DRILL] self._ui_info = [('Drill', DRILL, {s.SERVICE_UI: 'http://%s:8047'})] self._validation_rules = [vu.at_least(1, DRILL)]
def __init__(self): super(Mahout, self).__init__() self._name = 'mahout' self._ui_name = 'Mahout' self._version = '0.9' self._node_processes = [MAHOUT] self._validation_rules = [vu.at_least(1, MAHOUT)]
def __init__(self): super(HttpFS, self).__init__() self._name = 'httpfs' self._ui_name = 'HttpFS' self._version = '1.0' self._node_processes = [HTTP_FS] self._cluster_defaults = ['httpfs-default.json'] self._validation_rules = [vu.at_least(1, HTTP_FS)]
def __init__(self): super(HBase, self).__init__() self._name = 'hbase' self._ui_name = 'HBase' self._node_processes = [ HBASE_MASTER, HBASE_REGION_SERVER, HBASE_THRIFT, ] self._cluster_defaults = ['hbase-default.json'] self._validation_rules = [ vu.at_least(1, HBASE_MASTER), vu.at_least(1, HBASE_REGION_SERVER), ] self._ui_info = [ ("HBase Master", HBASE_MASTER, {s.SERVICE_UI: "http://%s:60010"}), ]
def __init__(self): super(Drill, self).__init__() self._name = 'drill' self._ui_name = 'Drill' self._version = '0.7' self._node_processes = [DRILL] self._ui_info = [('Drill', DRILL, 'http://%s:8047')] self._validation_rules = [vu.at_least(1, DRILL)]
def __init__(self): super(MapReduce, self).__init__() self._ui_name = 'MapReduce' self._name = 'hadoop' self._version = '0.20.2' self._node_processes = [JOB_TRACKER, TASK_TRACKER] self._ui_info = [ ('JobTracker', JOB_TRACKER, { s.SERVICE_UI: 'http://%s:50030' }), ('TaskTracker', TASK_TRACKER, { s.SERVICE_UI: 'http://%s:50060' }), ] self._validation_rules = [ vu.at_least(1, JOB_TRACKER), vu.at_least(1, TASK_TRACKER), ] self._cluster_defaults = ['mapreduce-cluster.json']
def __init__(self): super(SparkOnYarn, self).__init__() self._version = '1.5.2' self._node_processes = [ SPARK_HISTORY_SERVER, SPARK_SLAVE, ] self._validation_rules = [ vu.exactly(1, SPARK_HISTORY_SERVER), vu.at_least(1, SPARK_SLAVE), ]
def __init__(self): super(ImpalaV141, self).__init__() self._version = "1.4.1" self._dependencies = [("mapr-hive", hive.HiveV013().version), ("mapr-impala", self.version)] self._validation_rules = [ vu.depends_on(hive.HiveV013(), self), vu.on_same_node(IMPALA_CATALOG, hive.HIVE_SERVER_2), vu.exactly(1, IMPALA_STATE_STORE), vu.exactly(1, IMPALA_CATALOG), vu.at_least(1, IMPALA_SERVER), ]
def __init__(self): super(MapRFS, self).__init__() self._ui_name = 'MapRFS' self._node_processes = [CLDB, FILE_SERVER, NFS] self._ui_info = [ ('Container Location Database (CLDB)', CLDB, 'http://%s:7221'), ] self._validation_rules = [ vu.at_least(1, CLDB), vu.each_node_has(FILE_SERVER), vu.on_same_node(CLDB, FILE_SERVER), ]
def __init__(self): super(ImpalaV141, self).__init__() self._version = '1.4.1' self._dependencies = [ ('mapr-hive', hive.HiveV013().version), ('mapr-impala', self.version), ] self._validation_rules = [ vu.depends_on(hive.HiveV013(), self), vu.exactly(1, IMPALA_STATE_STORE), vu.exactly(1, IMPALA_CATALOG), vu.at_least(1, IMPALA_SERVER), ]
def __init__(self): super(ImpalaV250, self).__init__() self._version = '2.5.0' self._dependencies = [('mapr-hive', hive.HiveV12().version), ('mapr-impala', self.version), ('mapr-hbase', hbase.HBaseV111().version)] self._validation_rules = [ vu.depends_on(hive.HiveV12(), self), vu.exactly(1, IMPALA_STATE_STORE), vu.exactly(1, IMPALA_CATALOG), vu.at_least(1, IMPALA_SERVER), vu.required_os('centos', self) ]
def __init__(self): super(MapRFS, self).__init__() self._ui_name = 'MapRFS' self._node_processes = [CLDB, FILE_SERVER, NFS] self._ui_info = [ ('Container Location Database (CLDB)', CLDB, 'http://%s:7221'), ] self._validation_rules = [ vu.at_least(1, CLDB), vu.each_node_has(FILE_SERVER), vu.on_same_node(CLDB, FILE_SERVER), vu.has_volumes(), ]
def __init__(self): super(ImpalaV220, self).__init__() self._version = '2.2.0' self._dependencies = [ ('mapr-hive', hive.HiveV12().version), ('mapr-impala', self.version), ] self._validation_rules = [ vu.depends_on(hive.HiveV12(), self), vu.exactly(1, IMPALA_STATE_STORE), vu.exactly(1, IMPALA_CATALOG), vu.at_least(1, IMPALA_SERVER), vu.required_os('centos', self) ]
def __init__(self): super(ImpalaV123, self).__init__() self._version = '1.2.3' self._dependencies = [ ('mapr-hive', hive.HiveV012().version), ('mapr-impala', self.version), ] self._validation_rules = [ vu.depends_on(hive.HiveV012(), self), vu.on_same_node(IMPALA_CATALOG, hive.HIVE_SERVER_2), vu.exactly(1, IMPALA_STATE_STORE), vu.exactly(1, IMPALA_CATALOG), vu.at_least(1, IMPALA_SERVER), ]
def __init__(self): super(SparkOnYarn, self).__init__() self._name = 'spark' self._ui_name = 'Spark' self._version = '1.5.2' self._dependencies = [('mapr-spark', self.version)] self._node_processes = [ SPARK_HISTORY_SERVER, SPARK_SLAVE, ] self._validation_rules = [ vu.exactly(1, SPARK_HISTORY_SERVER), vu.at_least(1, SPARK_SLAVE), ] self._ui_info = [('Spark History Server', SPARK_HISTORY_SERVER, { s.SERVICE_UI: 'http://%%s:%s' % SPARK_HS_UI_PORT })]
def __init__(self): super(Spark, self).__init__() self._name = 'spark' self._ui_name = 'Spark' self._version = '1.2.1' self._node_processes = [ SPARK_HISTORY_SERVER, SPARK_MASTER, SPARK_SLAVE, ] self._dependencies = [('mapr-spark', self.version)] self._ui_info = [('SPARK', SPARK_MASTER, 'http://%s:8080')] self._validation_rules = [ vu.exactly(1, SPARK_MASTER), vu.exactly(1, SPARK_HISTORY_SERVER), vu.at_least(1, SPARK_SLAVE), ] self._node_defaults = ['spark-default.json']
def __init__(self): super(Spark, self).__init__() self._name = 'spark' self._ui_name = 'Spark' self._version = '1.5.2' self._node_processes = [ SPARK_HISTORY_SERVER, SPARK_MASTER, SPARK_SLAVE, ] self._dependencies = [('mapr-spark', self.version)] self._ui_info = [('Spark Master', SPARK_MASTER, 'http://%%s:%s' % SPARK_MASTER_UI_PORT), ('Spark History Server', SPARK_HISTORY_SERVER, 'http://%%s:%s' % SPARK_HS_UI_PORT)] self._validation_rules = [ vu.exactly(1, SPARK_MASTER), vu.exactly(1, SPARK_HISTORY_SERVER), vu.at_least(1, SPARK_SLAVE), ] self._node_defaults = ['spark-default.json']
def __init__(self): super(Spark, self).__init__() self._name = 'spark' self._ui_name = 'Spark' self._version = '1.5.2' self._node_processes = [ SPARK_HISTORY_SERVER, SPARK_MASTER, SPARK_SLAVE, ] self._dependencies = [('mapr-spark', self.version)] self._ui_info = [ ('Spark Master', SPARK_MASTER, {s.SERVICE_UI: 'http://%%s:%s' % SPARK_MASTER_UI_PORT}), ('Spark History Server', SPARK_HISTORY_SERVER, {s.SERVICE_UI: 'http://%%s:%s' % SPARK_HS_UI_PORT})] self._validation_rules = [ vu.exactly(1, SPARK_MASTER), vu.exactly(1, SPARK_HISTORY_SERVER), vu.at_least(1, SPARK_SLAVE), ] self._node_defaults = ['spark-default.json']