def getHdfsDirSizeForSSH(self, hdfsDir): sysConf = ConfModel.getSystemConf() nameNodeAccount = sysConf.get('hadoop').get('hadoop_namenode_account') nameNodeHost = sysConf.get('hadoop').get('hadoop_namenode_host') command = "hdfs dfs -du -s " + hdfsDir commandRs = Process.sshCommand(nameNodeAccount, nameNodeHost, command) rs = {} if (commandRs.get('code') == 0): strToList = commandRs.get('stdoutPut').split('\n') # 过滤多余的行 line = "" for curLine in strToList : if (len(curLine) == 0): continue elif ( "bash" in curLine) : continue else : line = curLine filterList = line.split() rs['dir'] = filterList[2] rs['dataSize'] = int(filterList[0]) rs['hdfsSize'] = int(filterList[1]) else: print commandRs.get('erroutPut') return rs
def __init__(self): systemConf = ConfModel.getSystemConf() dict = { 'host':systemConf['spark_server']['host'], 'port':systemConf['spark_server']['port'], 'user':systemConf['spark_server']['user'], 'password':systemConf['spark_server']['password'] } HiveInterface.__init__(self,dict)
def __init__(self): systemConf = ConfModel.getSystemConf() dict = { 'host': systemConf['mysql_bi_db']['host'], 'port': int(systemConf['mysql_bi_db']['port']), 'user': systemConf['mysql_bi_db']['user'], 'passwd': systemConf['mysql_bi_db']['password'], 'db': 'dw_service', 'charset': 'utf8' } MysqlInterface.__init__(self, dict)
def DefautlSendMail(subject, content): systemConf = ConfModel.getSystemConf() mailBaseInfo = { 'smtpServer': systemConf['bi_mail']['smtp_server'], 'username': systemConf['bi_mail']['username'], 'password': systemConf['bi_mail']['password'], 'sender': systemConf['bi_mail']['sender'], 'receiver': systemConf['bi_mail']['receiver'].split(','), 'subject': subject, 'content': content } Mail.SendMail(mailBaseInfo)
def _getSparkInterface(self): systemConf = ConfModel.getSystemConf() u'避免多次创建连接实例' if (self.sparkInterface != None): return self.sparkInterface dict = { 'host':systemConf['spark_server']['host'], 'port':systemConf['spark_server']['port'], 'user':systemConf['spark_server']['user'], 'password':systemConf['spark_server']['password'], } self.sparkInterface = HiveServer2(dict) return self.sparkInterface
def __init__(self): self.systemConf = ConfModel.getSystemConf() self.systemCorePath = ConfModel.getCoreSystemPath()