def startFlume(host, server): FLUME_HOME = env.FLUME_HOME _shell = 'ansible client -l ' + host + ' -a "' log.warn('开始启动 ' + host + ' 节点 ' + server + ' 服务') _shell = _shell + '{FLUME_HOME}/nginx-flume.sh start "'.format( FLUME_HOME=FLUME_HOME) exeCmd.run(_shell)
def startES(host, server): ELASTICSEARCH_HOME = os.getenv('ELASTICSEARCH_HOME') _shell = 'ansible client -l ' + host + ' -a "' log.warn('开始启动 ' + host + ' 节点 ' + server + ' 服务') _shell = _shell + '{ELASTICSEARCH_HOME}/start.sh"'.format( ELASTICSEARCH_HOME=ELASTICSEARCH_HOME) exeCmd.run(_shell)
def startSupervisor(host, server): STORM_HOME = os.getenv('STORM_HOME') _shell = 'ansible client -l ' + host + ' -a "' log.warn('开始启动 ' + host + ' 节点 ' + server + ' 服务') _shell = _shell + '{STORM_HOME}/start-Supervisor.sh"'.format( STORM_HOME=STORM_HOME) exeCmd.run(_shell)
def startKafka(host, server): KAFKA_HOME = env.KAFKA_HOME log.warn('开始启动 {host} 节点 {server} 服务'.format(host=host, server=server)) _shell = 'ansible client -l {host} -a "{KAFKA_HOME}'.format( host=host, KAFKA_HOME=KAFKA_HOME) _shell = _shell + '/{scriptName}"'.format( scriptName=conf.get('kafka.start.script')) exeCmd.run(_shell)
def startAzkaban(host, server): AZKABAN_HOME = env.AZKABAN_HOME log.warn('开始启动 {host} 节点 {server} 服务'.format(host=host, server=server)) _shell = 'ssh {host} -a {AZKABAN_HOME}'.format(host=host, AZKABAN_HOME=AZKABAN_HOME) if "AzkabanWebServer".find(server) >= 0: _shell = _shell + '/azkaban-web-server/start-web.sh' exeCmd.run(_shell) else: _shell = _shell + '/azkaban-exec-server/start-exec.sh' exeCmd.run(_shell)
def startHive(host, server): HIVE_HOME = env.HIVE_HOME log.warn('开始启动 ' + host + ' 节点 ' + server + ' 服务') _shell = 'ansible client -l {host} -a "{HIVE_HOME}/'.format( host=host, HIVE_HOME=HIVE_HOME) if "org.apache.hadoop.hive.metastore.HiveMetaStore".find(server) >= 0: _shell = _shell + '{scriptName}"'.format( scriptName=conf.get('hive.metastore.start.script')) exeCmd.run(_shell) else: _shell = _shell + '{scriptName}"'.format( scriptName=conf.get('hive.server2.start.script')) exeCmd.run(_shell)
def startHbase(host, serverName): HBASE_HOME = env.HBASE_HOME _shell = 'ansible client -l {host} -a "{HBASE_HOME}/bin/hbase-daemons.sh start '.format( host=host, HBASE_HOME=HBASE_HOME) if 'hmaster'.find(serverName) >= 0: log.warn('开始启动 {host} 节点 {serverName} 服务'.format( host=host, serverName=serverName)) _shell = _shell + 'master"' exeCmd.run(_shell) else: log.warn('开始启动 {host} {serverName} 服务'.format(host=host, serverName=serverName)) _shell = _shell + 'regionserver"' exeCmd.run(_shell)
def start_hadoop(host, serverName): HADOOP_HOME = env.HADOOP_HOME _shell = 'ansible client -l {host} -a "{HADOOP_HOME}/sbin/'.format(host=host, HADOOP_HOME=HADOOP_HOME) if 'secondarynamenodenamenodedatanode'.find(serverName) >= 0: log.warn('开始启动 {host} 节点 {serverName} 服务'.format(host=host, serverName=serverName)) _shell = _shell + 'hadoop-daemon.sh start {serverName}"'.format(serverName=serverName) exeCmd.run(_shell) elif 'resourcemanagernodemanager'.find(serverName) >= 0: log.warn('开始启动 {host} 节点 {serverName} 服务'.format(host=host, serverName=serverName)) _shell = _shell + 'yarn-daemon.sh start {serverName}"'.format(serverName=serverName) exeCmd.run(_shell) else: log.warn('开始启动 {host} 节点 {serverName} 服务'.format(host=host, serverName=serverName)) _shell = _shell + 'mr-jobhistory-daemon.sh start historyserver"' exeCmd.run(_shell)
def startZk(host, server): ZOOKEEPER_HOME = env.ZOOKEEPER_HOME log.warn('开始启动 {host} 节点 {server} 服务'.format(host=host, server=server)) _shell = 'ansible client -l {host} -a "{ZOOKEEPER_HOME}'.format(host=host, ZOOKEEPER_HOME=ZOOKEEPER_HOME) _shell = _shell + '/bin/zkServer.sh start"' exeCmd.run(_shell)
def startBootstrap(host, server): log.warn('开始启动 {host} 节点 {server} 服务'.format(host=host, server=server)) _shell = 'ansible client -l {host} -a "'.format(host=host) _shell = _shell + '{startScriptPath}"'.format( startScriptPath=conf.get('tomcat.start.script.path')) exeCmd.run(_shell)