def __init__(self,**config): #load config self.conf=copy.copy(self.DEFAULT_CONFIG) self._load_conf(self.conf,config) self._load_mysql_conf(config) self._load_remote_conf(config) self._load_ser_conf(config) self.abs_path=MyTools.get_abs_path(__file__) self.rc=RemoteConfig(project_name=self.PROJECT_NAME,**self.conf) self.mysql=None self.sendHttp=None self.sys_ps=SysPs() self.host_id=MyTools.get_uuid(True) self._load_old_info() self.info={ "hostid":self.host_id, "hostname":MyTools.get_hostname(), "cpu": {}, "mem": {}, "disk": {}, "proc": {}, "role": {}, "net": {}, "user":MyTools.get_user() }
def __init__(self,**config): self.conf = MyTools.load_config(self.DEFAULT,config) self.ps = SysPs() self.conf["keyword"].update(self.KEYWORD) self.keyword = self.conf["keyword"] self.func = self.conf['func'] self.host_id = MyTools.get_uuid(True)
class ReloadProject(): """ reload project """ logger = MyTools.getLogger(__name__ + ".ReloadProject") DEFAULT = { "file_list": [], "version_file": "%s/naja.version" % MyTools.get_abs_path(__file__) } def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT, config) self._create_update() def _create_update(self): fileList = self.conf['file_list'] self.update = [] if not isinstance(fileList, list): return for i in fileList: self.update.append( UpdateCode(local_conf=i, local_version=self.conf['version_file'])) def _reload(self): pass
def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT_CONFIG, config) self.host_id = MyTools.get_host_id() self.send_http = SendHttp() self.jobs = {} self.run_queue = Queue.Queue() self.proc_queue = Queue.Queue(4) self.result_queue = Queue.Queue() self.run_thread = RunDeployJob(self, self.run_queue, self.proc_queue) self.result_thread = FetchJobResult(self.proc_queue, self.result_queue)
class ProcessScheduler(Process): """ Process scheduler """ logger = MyTools.getLogger(__name__ + ".ProcessScheduler") DEFAULT = { "maxSize": 8, "queue": None, } def __init__(self, **config): Process.__init__(self) self.conf = MyTools.load_config(self.DEFAULT, config) self.queue = self.conf['queue'] self.size = 0 self.threadList = [] def _get_queue(self, queue, block=True, timeout=10): f = None try: f = queue.get(block=block, timeout=timeout) except Empty: self.logger.warning("get queue timeout %d" % timeout) except Exception, e: self.logger.error(e, exc_info=1) return f
class SendHttp(SendInfoInterface): DEFAULT_CONFIG = { 'header': {}, 'method': "POST", 'data': {}, 'url': None, } logger = MyTools.getLogger(__name__ + ".SendHttp") def send_info(self, **config): conf = MyTools.load_config(self.DEFAULT_CONFIG, config) assert conf['url'], "url not null" res = None req = urllib2.Request(url=conf['url']) if conf['data']: #dataEncode=urllib.urlencode(conf['data']) #req.add_data(dataEncode) req.add_data(conf['data']) req.get_method = lambda: conf['method'] for headerKey in conf['header']: req.add_header(headerKey, conf['header'][headerKey]) try: res = urllib2.urlopen(req) except urllib2.HTTPError, e: self.logger.error("send request %s failed. Code: %d Msg: %s" % (conf['url'], e.getcode(), e.message)) except urllib2.URLError, e: self.logger.error("url error. " + e.reason)
class ProcessFork(Process): """ Scheduler fork """ logger = MyTools.getLogger(__name__ + ".ProcessFork") DEFAULT = {"daemon_list": [], "check_alive": True, "check_interval": 30} def __init__(self, **config): Process.__init__(self) self.conf = MyTools.load_config(self.DEFAULT, config) self.dList = self.conf['daemon_list'] self.dList = self.dList if isinstance(self.dList, list) else [] self.thread = {} def _createThread(self, i): p = threading.Thread(target=i.run) p.setDaemon(True) return p def runFunc(self): for i in self.dList: try: p = self._createThread(i.obj) self.thread[p] = i p.start() except: self.logger.exception("runing dRun failed") def checkFunc(self): dThread = [] for i in self.thread: if not i.is_alive(): dThread.append(i) return dThread def run(self): if not self.dList: return self.runFunc() if self.conf['check_alive']: self._checkAlive() else: for i in self.thread: i.join() def _checkAlive(self): name = "" while 1: try: dt = self.checkFunc() for i in dt: sf = self.thread[i] name = sf.name p = self._createThread(sf.obj) self.thread[p] = sf del self.thread[i] p.start() except: self.logger.exception("check %s DRun failed." % name) time.sleep(self.conf['check_interval'])
def get_package(self, job): remote_file = "%s/naja/source/%s" % (self.conf['remote_server'], job.packageName) local_file = "%s/%s" % (self.conf['local_stored'], job.packageName) if self.send_http.get_file(remote_file, local_file) and MyTools.untar( local_file, self.conf['local_stored']): return "%s/%s" % (self.conf['local_stored'], job.script) return None
class DeployCode(TRun): logger = MyTools.getLogger(__name__ + ".DeployCode") PROJECT_NAME = "naja" DEFAULT_CONFIG = { "local_package": MyTools.get_abs_path(__file__), "remote_file": "naja/source/codes/code_list.json", "remote_server": None, } @staticmethod def main(remoteConfig): config = remoteConfig.get_config(DeployCode.PROJECT_NAME) return DeployCode(**config) def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT_CONFIG, config) self.send_http = SendHttp() def _get_code_list(self): """ code_path: http://127.0.0.1:8920/naja/source/codes/code_list.json code_list.json: ["test.py"] """ code_path = "%s/%s" % (self.conf['remote_server'], self.conf['remote_file']) code_local_path = "%s/%s" % (MyTools.get_abs_path(__file__), "code_local.json") try: code_str = self.send_http.get_chunk(code_path) code_list = json.loads(code_str) except Exception, e: self.logger.warning("get code list failed. %s" % e.message) code_list = [] if os.path.exists(code_local_path): code_local_list = self.send_http.get_local_info(code_local_path) else: code_local_list = [] code1 = set(code_list) code2 = set(code_local_list) ready_code_list = list(code1 - code2) self.sh.write_local_info(code_local_path, json.dumps(list(code1 | code2))) return ready_code_list
class MyPlugin(object): logger = MyTools.getLogger(__name__ + ".MyPlugin") def run(self): pass @staticmethod def main(remoteConfig): return None
def _load_old_info(self): old_file=self.abs_path+"/.collectSysMsg.old.json" try: if os.path.exists(old_file): info_str=MyTools.head(old_file) self.old_info=json.loads(info_str[0]) else: self.old_info={} except: self.old_info={}
def send_complete_job(self, job_id, res): fetch_time = self.jobs[job_id][0] cjob = CompleteJob(job_id, self.host_id, fetch_time, int(time.time() * 1000), res) header = {'Content-type': 'application/json'} data = json.dumps(MyTools.namedtuple_dict(cjob)) url = "%s/%s/complete" % (self.conf['remote_server'], self.conf['remote_url']) res = self.send_http.send_info(url=url, header=header, data=data) body = create_body(res) return body
def _get_drive(self): app_list={} proc=self.ps.get_process(cmd=self.keyword['drive']) for pid,cmdline in proc.items(): if cmdline[0] == "/bin/bash" and len(cmdline) == 3: continue appid=self._get_appid(cmdline) appmsg=self._get_drive_msg(cmdline) if appid and len(appid) == 2: container=Container(self.host_id,appid[0],appid[1],MyTools.now_time()) if appmsg and len(appmsg) == 2: app_list[appid[0]]=SparkApp(self.host_id,appid[0],appmsg[0],appmsg[1],[container],MyTools.now_time()) return app_list
def _get_executor(self): app_list={} proc=self.ps.get_process(cmd=self.keyword['executor']) for pid,cmdline in proc.items(): if cmdline[0] == "/bin/bash" and len(cmdline) == 3: continue appid=self._get_appid(cmdline) if appid and len(appid) == 2: container=Container(self.host_id,appid[0],appid[1],MyTools.now_time()) if app_list.has_key(appid[0]): app_list[appid[0]].append(container) else: app_list[appid[0]]=[container] return app_list
class DynamicDeploy(DRun): logger = MyTools.getLogger(__name__ + ".DynamicDeploy") PROJECT_NAME = "naja" DEFAULT_CONFIG = { "local_stored": None, "remote_server": None, "remote_url": "naja/deploy" } @staticmethod def main(remoteConfig): config = remoteConfig.get_config(DynamicDeploy.PROJECT_NAME) return DynamicDeploy(**config) def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT_CONFIG, config) self.host_id = MyTools.get_host_id() self.send_http = SendHttp() self.jobs = {} self.run_queue = Queue.Queue() self.proc_queue = Queue.Queue(4) self.result_queue = Queue.Queue() self.run_thread = RunDeployJob(self, self.run_queue, self.proc_queue) self.result_thread = FetchJobResult(self.proc_queue, self.result_queue) def run(self): self.logger.info("run DynamicDeploy from deploy.py,config: %s" % self.cf) self.run_thread.setDaemon(True) self.result_thread.setDaemon(True) self.run_thread.start() self.result_thread.start() while 1: try: self.fetch_host_ready_job() except: self.logger.exception("fetch host ready job failed.") self.fetch_job_res() def fetch_job_res(self): while 1: try: (job_id, res) = self.result_queue.get(block=False, timeout=5) self.send_complete_job(job_id, res) del self.jobs[job_id] except Queue.Empty: break except Exception, e: self.logger.error("fetch job result failed. %s" % e.message)
class SendKafka(SendInfoInterface): """ send kafka message """ logger = MyTools.getLogger(__name__ + ".SendKafka") def __init__(self, server): self.servers = server.split(",") assert self.servers, "server not null" self.producer = KafkaProducer(bootstrap_servers=self.servers) def send_info(self, topic, msg={}): for key in msg: self.producer.send(topic, key=key, value=msg[key]) self.producer.flush()
def _get_code_list(self): """ code_path: http://127.0.0.1:8920/naja/source/codes/code_list.json code_list.json: ["test.py"] """ code_path = "%s/%s" % (self.conf['remote_server'], self.conf['remote_file']) code_local_path = "%s/%s" % (MyTools.get_abs_path(__file__), "code_local.json") try: code_str = self.send_http.get_chunk(code_path) code_list = json.loads(code_str) except Exception, e: self.logger.warning("get code list failed. %s" % e.message) code_list = []
def send_info(self, **config): conf = MyTools.load_config(self.DEFAULT_CONFIG, config) assert conf['url'], "url not null" res = None req = urllib2.Request(url=conf['url']) if conf['data']: #dataEncode=urllib.urlencode(conf['data']) #req.add_data(dataEncode) req.add_data(conf['data']) req.get_method = lambda: conf['method'] for headerKey in conf['header']: req.add_header(headerKey, conf['header'][headerKey]) try: res = urllib2.urlopen(req) except urllib2.HTTPError, e: self.logger.error("send request %s failed. Code: %d Msg: %s" % (conf['url'], e.getcode(), e.message))
def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT_CONFIG, config) self.processNumber = self.conf['processNumber'] assert self.conf['configFile'], "configFile parameters must be set" assert self.conf['remoteServer'], "remoteServer parameters must be set" cf = self.conf['configFile'] rs = self.conf['remoteServer'] self.remoteConfig = RemoteConfig(local_conf=cf, remote_server=rs, project_name="naja") self.showPlugin = DynamicImport() self.alreadyF = {} self.alreadyD = {} self.queue = Queue(self.processNumber) self.procScheduler = None self.procForks = []
class SendInfoInterface(object): """ first send info hostid set NULL """ logger = MyTools.getLogger(__name__ + ".SendInfoInterface") def get_local_chunk(self, rfile): if not os.path.isfile(rfile): return "" try: with open(rfile, "r") as f: buf = f.read() except: buf = "" return buf def get_local_info(self, rfile): buf = self.get_local_chunk(rfile) try: return json.loads(buf) except: self.logger.warning("json loads failed. %s" % str(buf)) return {} def write_local_chunk(self, wfile, info): try: with open(wfile, "w") as fh: fh.write(info) return True except: return False def write_local_info(self, wfile, info): try: jsonStr = json.dumps(info) return self.write_local_chunk(wfile, jsonStr) except: self.logger.warnning("write json to file failed. %s %s" % (str(wfile), str(info))) return False def send_info(self, jsonDict): pass
def _net(self): i_net={} #ip is key r_net={} #ifName is key for i in MyTools.get_netcard(): i_net[i[1]]=r_net[i[0]]={"ip":i[1],"recv":0,"sent":0,"link":0,"total_link":0} try: links=psutil.net_connections() flow=psutil.net_io_counters(pernic=True) except: links=[] flow={} for i in links: if i_net.has_key(i.laddr.ip): i_net[i.laddr.ip]['link']+=1 for i in i_net: i_net[i]['total_link']=len(links) for i in r_net: r_net[i]['recv']=flow[i].bytes_recv if flow else 0 r_net[i]['sent']=flow[i].bytes_sent if flow else 0 return r_net
def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT_CONFIG, config) self._dynamic_class()
def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT_CONFIG, config) self.send_http = SendHttp()
class RunPlugin(object): """ Run plugin """ DEFAULT_CONFIG = { "processNumber": 8, "configFile": None, "configPrefix": "config.file", "remoteServer": None } logger = MyTools.getLogger(__name__ + ".RunPlugin") def __init__(self, **config): self.conf = MyTools.load_config(self.DEFAULT_CONFIG, config) self.processNumber = self.conf['processNumber'] assert self.conf['configFile'], "configFile parameters must be set" assert self.conf['remoteServer'], "remoteServer parameters must be set" cf = self.conf['configFile'] rs = self.conf['remoteServer'] self.remoteConfig = RemoteConfig(local_conf=cf, remote_server=rs, project_name="naja") self.showPlugin = DynamicImport() self.alreadyF = {} self.alreadyD = {} self.queue = Queue(self.processNumber) self.procScheduler = None self.procForks = [] def _get_T(self): tf = {} sp = self.showPlugin.showPlugin spt = sp.get_t() for i in spt: if i not in self.alreadyF: self.alreadyF[i] = spt[i] cf = self.remoteConfig.get_config( "%s.%s" % (self.conf['configPrefix'], i)) if not cf: continue f = spt[i].main(self.remoteConfig.copy(cf)) if f: tf[i] = f return tf def _get_D(self): tf = {} sp = self.showPlugin.showPlugin spd = sp.get_d() for i in spd: if i not in self.alreadyD: self.alreadyD[i] = spd[i] cf = self.remoteConfig.get_config( "%s.%s" % (self.conf['configPrefix'], i)) if not cf: continue f = spd[i].main(self.remoteConfig.copy(cf)) if f: tf[i] = f return tf def _run_T(self, tf): self._create_scheduler() for i in tf: sff = SchedulerFunc(i, tf[i]) self.procScheduler._put_queue(sff, self.queue, block=False) self.logger.info(str(tf[i].run)) def _run_D(self, df): drunList = [] if not df: return for i in df: drunList.append(SchedulerFunc(i, df[i])) pf = ProcessFork(daemon_list=drunList) pf.daemon = True pf.start() self.procForks.append(pf) def _run_dynamic(self): sp = self.showPlugin dynamicFunc = SchedulerFunc("DynamicImport", sp) self.queue.put(dynamicFunc) dynamicRun = ProcessScheduler.create_func(dynamicFunc) dynamicThread = threading.Thread(target=dynamicRun) dynamicThread.setDaemon(True) dynamicThread.start() return dynamicThread def _load_TD(self): self.remoteConfig.update_config() works = [] tf = self._get_T() self._run_T(tf) df = self._get_D() self._run_D(df) def _create_scheduler(self): if not self.procScheduler: self.procScheduler = ProcessScheduler(queue=self.queue, maxSize=self.processNumber) self.procScheduler.daemon = True def run(self): self._create_scheduler() if self.procScheduler: self.procScheduler.start() self._run_dynamic() while 1: try: self._load_TD() time.sleep(5) except KeyboardInterrupt: break
def get_app(self): res=self._get_app_all() return MyTools.namedtuple_dict(HostJob(self.host_id,res[0],res[1]))
def __init__(self, **config): Process.__init__(self) self.conf = MyTools.load_config(self.DEFAULT, config) self.queue = self.conf['queue'] self.size = 0 self.threadList = []
class CollectYarnAppMsg(): """ Collect yarn application message Parameter: keyword = { "containerRole":"cmdlineKeyword" } func = { "containerRole":Function(containerRole,cmdlineKeyword) return List(dict(id:T),dict(id:List[R])) } """ DEFAULT={ "keyword":{}, "func":{}, } KEYWORD={ "drive":"org.apache.spark.deploy.yarn.ApplicationMaster", "executor":"org.apache.spark.executor.CoarseGrainedExecutorBackend" } logger = MyTools.getLogger(__name__+".CollectYarnAppMsg") def __init__(self,**config): self.conf = MyTools.load_config(self.DEFAULT,config) self.ps = SysPs() self.conf["keyword"].update(self.KEYWORD) self.keyword = self.conf["keyword"] self.func = self.conf['func'] self.host_id = MyTools.get_uuid(True) """ return HostJob(hostId="",hostApp=List[SparkApp],hostContainer=List[Container]) { "hostId":"", "hostApp":[ { "hostId":"", "apps":[{ "hostId":"xx", "appId":"xx", "userClass":"xx", "appName":"xx", "containers":[Container._asdict,...], "timestamp":xxxxx }, {...} ], "hostContainer":[ { "hostId":"xx", "appId":"xx", "containerId":"xx", "timestamp":xxxxx }, {...}, ] } """ def get_app(self): res=self._get_app_all() return MyTools.namedtuple_dict(HostJob(self.host_id,res[0],res[1])) def _get_app_all(self): apps=[] containers=[] res=self._get_app() apps.extend(res[0]) containers.extend(res[1]) for k,v in self.keyword.items(): if k not in self.KEYWORD.keys(): res=self._get_self_app(k,v) apps.extend(res[0]) containers.extend(res[1]) return [apps,containers] def _get_self_app(self,app_role,cmd): func=self.func[app_role] res=[] if callable(func): try: res=func(app_role,cmd) except: self.logger.warning("self get_app failed. %s : %s" %(app_role,cmd)) if isinstance(res,list) and res and len(res) == 2: if isinstance(res[0],dict) and isinstance(res[1],dict): return self._set_app_container(res[0],res[1]) return [[],[]] def _get_app(self): dl=self._get_drive() el=self._get_executor() return self._set_app_container(dl,el) def _set_app_container(self,drive_list,executor_list): for appid,app in drive_list.items(): if executor_list.has_key(appid): app.containers.extend(executor_list[appid]) del executor_list[appid] el_values=[] for i in executor_list.values(): el_values.extend(i) return [drive_list.values(),el_values] def _get_drive(self): app_list={} proc=self.ps.get_process(cmd=self.keyword['drive']) for pid,cmdline in proc.items(): if cmdline[0] == "/bin/bash" and len(cmdline) == 3: continue appid=self._get_appid(cmdline) appmsg=self._get_drive_msg(cmdline) if appid and len(appid) == 2: container=Container(self.host_id,appid[0],appid[1],MyTools.now_time()) if appmsg and len(appmsg) == 2: app_list[appid[0]]=SparkApp(self.host_id,appid[0],appmsg[0],appmsg[1],[container],MyTools.now_time()) return app_list def _get_executor(self): app_list={} proc=self.ps.get_process(cmd=self.keyword['executor']) for pid,cmdline in proc.items(): if cmdline[0] == "/bin/bash" and len(cmdline) == 3: continue appid=self._get_appid(cmdline) if appid and len(appid) == 2: container=Container(self.host_id,appid[0],appid[1],MyTools.now_time()) if app_list.has_key(appid[0]): app_list[appid[0]].append(container) else: app_list[appid[0]]=[container] return app_list def _get_drive_msg(self,cmdline): param=[] for i in range(len(cmdline)): if cmdline[i] == "--class": param.append(cmdline[i+1]) elif cmdline[i] == "--properties-file": param.append(cmdline[i+1]) try: p=Properties(param[1]) app_name=p.get_value("spark.app.name") except: app_name=None if len(param) == 2: return [param[0],app_name] else: return [] def _get_appid(self,cmdline): cmd_keyword="-Djava.io.tmpdir" for i in cmdline: if cmd_keyword in i: v=i.split("/") return v[-3:-1] return []
def __init__(self, **config): Process.__init__(self) self.conf = MyTools.load_config(self.DEFAULT, config) self.dList = self.conf['daemon_list'] self.dList = self.dList if isinstance(self.dList, list) else [] self.thread = {}
class CollectSysMsg(TRun): PROJECT_NAME = "naja" DEFAULT_CONFIG={ 'send_type':None, 'send_func':None, } MYSQL_CONFIG={ 'mysql_host':None, 'mysql_user':'******', 'mysql_db':'naja', 'mysql_password':None, 'mysql_port':3306 } SER_CONFIG={ 'ser_host':None, 'ser_url':None } REMOTE_CONFIG={ 'local_conf':"%s/s.properties" % (MyTools.get_abs_path(__file__)), 'local_version': None, 'remote_server':"http://172.17.124.208:9200/naja/source" } logger = MyTools.getLogger(__name__+".CollectSysMsg") @staticmethod def main(remoteConfig): conf=remoteConfig.get_config(CollectSysMsg.PROJECT_NAME) return CollectSysMsg(**conf) def __init__(self,**config): #load config self.conf=copy.copy(self.DEFAULT_CONFIG) self._load_conf(self.conf,config) self._load_mysql_conf(config) self._load_remote_conf(config) self._load_ser_conf(config) self.abs_path=MyTools.get_abs_path(__file__) self.rc=RemoteConfig(project_name=self.PROJECT_NAME,**self.conf) self.mysql=None self.sendHttp=None self.sys_ps=SysPs() self.host_id=MyTools.get_uuid(True) self._load_old_info() self.info={ "hostid":self.host_id, "hostname":MyTools.get_hostname(), "cpu": {}, "mem": {}, "disk": {}, "proc": {}, "role": {}, "net": {}, "user":MyTools.get_user() } def _load_remote_conf(self,config): self._load_conf(self.REMOTE_CONFIG,config) def _load_mysql_conf(self,config): self._load_conf(self.MYSQL_CONFIG,config) def _load_ser_conf(self,config): self._load_conf(self.SER_CONFIG,config) def _load_conf(self,CONFIG,config): for k in CONFIG: if k in config: self.conf[k]=config[k] else: self.conf[k]=CONFIG[k] def _load_old_info(self): old_file=self.abs_path+"/.collectSysMsg.old.json" try: if os.path.exists(old_file): info_str=MyTools.head(old_file) self.old_info=json.loads(info_str[0]) else: self.old_info={} except: self.old_info={} def _write_old_info(self): old_file=self.abs_path+"/.collectSysMsg.old.json" MyTools.write_file(old_file,json.dumps(self.old_info)) def _now_time(self): return int(time.time()*1000) def get_sleep(self): return 1 def get_schedule(self): return 5 def role_message(self): rc=self.rc role={} ps=self.sys_ps roles=rc.get_config('role') r=self._roles(roles) for k,v in r.items(): if ps.get_process(cmd=v): role[k]={'status':1} self.info['role'][k]=role[k] for i in self.info['role'].keys(): if i not in role.keys(): self.info['role'][i]['status']=0 def _roles(self,r,k=None,s={}): for i in r.keys(): p=i if not k else k+"."+i if not isinstance(r[i],dict): s[p]=r[i] else: self._roles(r[i],p,s) return s def _mem(self): r_mem={} mem=psutil.virtual_memory() r_mem['total']=mem.total r_mem['used']=mem.used r_mem['free']=mem.free r_mem['buffer']=mem.buffers if hasattr(mem,'buffers') else 0 r_mem['cached']=mem.cached if hasattr(mem,'cached') else 0 r_mem['shared']=mem.shared if hasattr(mem,'shared') else 0 return r_mem def _cpu(self): r_cpu={} cpu=psutil.cpu_times_percent() r_cpu['idle']=cpu.idle r_cpu['user']=cpu.user r_cpu['system']=cpu.system return r_cpu def _disk(self): r_disk={} disk_io=psutil.disk_io_counters(perdisk=True) for i in psutil.disk_partitions(): r_disk[i.mountpoint]={"device":i.device,"fstype":i.fstype} for k,v in r_disk.items(): device=os.path.basename(v['device']) u=psutil.disk_usage(k) v['total']=u.total v['used']=u.used v['percent']=u.percent if disk_io.has_key(device): v['read']=disk_io[device].read_bytes v['read_time']=disk_io[device].read_time v['write']=disk_io[device].write_bytes v['write_time']=disk_io[device].write_time else: v['read']=0 v['write']=0 v['read_time']=1 v['write_time']=1 return r_disk def _disk_rate(self): od=self.old_info.get('disk',{}) d=self.info['disk'] rd={} for k,v in d.items(): o_r=od[k]['read'] if od.has_key(k) else v['read'] o_w=od[k]['write'] if od.has_key(k) else v['write'] rrate=v['read']-o_r/float(self.get_schedule()) wrate=v['write']-o_w/float(self.get_schedule()) rd[k]={'read':rrate,'write':wrate} return rd def _net(self): i_net={} #ip is key r_net={} #ifName is key for i in MyTools.get_netcard(): i_net[i[1]]=r_net[i[0]]={"ip":i[1],"recv":0,"sent":0,"link":0,"total_link":0} try: links=psutil.net_connections() flow=psutil.net_io_counters(pernic=True) except: links=[] flow={} for i in links: if i_net.has_key(i.laddr.ip): i_net[i.laddr.ip]['link']+=1 for i in i_net: i_net[i]['total_link']=len(links) for i in r_net: r_net[i]['recv']=flow[i].bytes_recv if flow else 0 r_net[i]['sent']=flow[i].bytes_sent if flow else 0 return r_net def _net_rate(self): o_net=self.old_info.get('net',{}) net=self.info['net'] nr={} for i in net: o_sent=o_net.get(i,{"sent":net[i]['sent']})['sent'] o_recv=o_net.get(i,{"recv":net[i]['recv']})['recv'] rsent=(net[i]['sent']-o_sent)/float(self.get_schedule()) rrecv=(net[i]['recv']-o_recv)/float(self.get_schedule()) nr[i]={'sent':rsent,'recv':rrecv} return nr def _proc(self): r_proc={} r_proc['total']=len(psutil.pids()) return r_proc def sys_message(self): self.info['cpu']=self._cpu() self.info['mem']=self._mem() self.info['net']=self._net() self.info['proc']=self._proc() #proc 未写入表 self.info['disk']=self._disk() def run(self): rc=self.rc rc.update_config() conf=rc.get_config(self.PROJECT_NAME,{}) for i in self.REMOTE_CONFIG: if conf.has_key(i) and conf[i] != self.conf[i]: self.rc=RemoteConfig(project_name=self.PROJECT_NAME,**conf) break for i in self.MYSQL_CONFIG: if conf.has_key(i) and conf[i] != self.conf[i] and self.mysql: self.mysql.close() self.mysql=None break self._load_conf(self.conf,conf) self.sys_message() self.role_message() self.send() def send(self): try: t=self.conf['send_type'] if t == "ser": self.send_ser() elif t == "mysql": self.send_mysql() else: jinfo=json.dumps(self.info) if self.conf['send_func']: self.conf['send_func'](jinfo) else: self._send_func(jinfo) self.old_info=copy.copy(self.info) self._write_old_info() except: self.logger.exception("send failed.") def _send_func(self,jinfo): print jinfo def send_ser(self): url=self.conf['ser_url'] host=self.conf['ser_host'] if not url: assert host,"ser_host parameters must be specified" if not self.sendHttp: self.sendHttp=SendHttp() url=url if url else "http://%s/naja/host" %host header={'Content-type':'application/json'} data=json.dumps(self._create_host()) try: res=self.sendHttp.send_info(url=url,header=header,data=data) except: self.logger.exception("send http server failed.") def send_mysql(self): h=self.conf['mysql_host'] p=self.conf['mysql_password'] assert h,"mysql_host parameters must be specified" assert p,"mysql_password parameters must be specified" if not self.mysql: self.mysql=MysqlDB(host=h,db=self.conf['mysql_db'],user=self.conf['mysql_user'],password=p,port=self.conf['mysql_port']) mysql=self.mysql oi=self.old_info ni=self.info host_id=self.host_id sqls=[] sqls.append(self._host_sql()) sqls.append(self._ip_sql()) sqls.append(self._role_sql()) sqls.append(self._cpu_sql()) sqls.append(self._mem_sql()) sqls.append(self._disk_sql()) sqls.append(self._net_sql()) #for i in sqls: # print i mysql.multiple_write(";".join(sqls)) def _role_sql(self): hid=self.host_id o_role=self.old_info.get('role',{}) role=self.info['role'] timestamp=self._now_time() nr=[i for i in role if i not in o_role] ur=[i for i in role if i in o_role] if nr: role_sql='insert into roles (host_id,host_role,timestamp) values ' role_sql+=",".join(['("%s","%s",%d)' % (hid,r,timestamp) for r in nr]) else: role_sql='' if o_role: urole_sql='update roles set timestamp=%d where host_id="%s" and host_role="%s"' urole_sql=';'.join([urole_sql %(timestamp,hid,i) for i in ur if role[i]['status'] == 1]) else: urole_sql='' return role_sql+";"+urole_sql def _ip_sql(self): hid=self.host_id o_net=self.old_info.get("net",{}) net=self.info['net'] timestamp=self._now_time() if not net: return '' nc=[(i,net[i]['ip']) for i in net if i not in o_net] unc=[(i,net[i]['ip']) for i in net if i in o_net and net[i]['ip'] != o_net[i]['ip']] if nc: ip_sql='insert into ips values ' ip_sql+=",".join(['("%s","%s","%s",%d)' % (hid,n[0],n[1],timestamp) for n in nc]) else: ip_sql='' if unc: uip_sql='update ips set timestamp=%d,host_ip="%s" where host_ifname="%s" and host_id="%s"' uip_sql=';'.join([uip_sql % (timestamp,i[1],i[0],hid) for i in unc]) else: uip_sql='' return ip_sql+";"+uip_sql def _host_sql(self): hid=self.host_id o_info=self.old_info info=self.info timestamp=self._now_time() if o_info: host_sql='update hosts set timestamp=%d%s where host_id="%s"' if o_info['hostname'] != info['hostname']: host_sql=host_sql %(timestamp,',host_name="%s"' % info['hostname'],hid) else: host_sql=host_sql %(timestamp,'',hid) else: host_sql='insert into hosts values ("%s","%s",%d)' %(hid,info['hostname'],timestamp) return host_sql def _cpu_sql(self): hid=self.host_id timestamp=self._now_time() cpu=self.info['cpu'] cpu_sql='insert into cpu values ("%s",%0.2f,%0.2f,%0.2f,%d)' return cpu_sql % (hid,cpu['user'],cpu['system'],cpu['idle'],timestamp) def _mem_sql(self): hid=self.host_id timestamp=self._now_time() mem=self.info['mem'] mem_sql='insert into mem values ("%s",%d,%d,%d,%d,%d,%d,%d)' return mem_sql % (hid,mem['total'],mem['used'],mem['free'],mem['shared'],mem['buffer'],mem['cached'],timestamp) def _disk_sql(self): hid=self.host_id timestamp=self._now_time() d=self.info['disk'] disk_sql='insert into disk values ' value='("%s","%s","%s","%s",%d,%d,%0.2f,%0.2f,%d)' values=[] disk_rate=self._disk_rate() for k,v in d.items(): rrate=disk_rate[k]['read'] wrate=disk_rate[k]['write'] values.append(value % (hid,k,v['device'],v['fstype'],v['total'],v['used'],rrate,wrate,timestamp)) disk_sql+=','.join(values) return disk_sql def _net_sql(self): hid=self.host_id timestamp=self._now_time() n=self.info['net'] net_sql='insert into net_io values ' value='("%s","%s",%0.2f,%0.2f,%d,%d,%d)' values=[] net_rate=self._net_rate() for k,v in n.items(): rrate=net_rate[k]['recv'] srate=net_rate[k]['sent'] values.append(value %(hid,k,srate,rrate,v['link'],v['total_link'],timestamp)) net_sql+=','.join(values) return net_sql def _create_ip(self): i=self.info['net'] hid=self.info['hostid'] ips=[] net_io=[] now_time=self._now_time() rni=self._net_rate() for k in i: ip = {'id':hid,'ifName':k,'ip':i[k]['ip'],'timestamp':now_time} ips.append(ip) ni={"id":hid,"ifName":k,"sent":rni[k]['sent'],"recv":rni[k]['recv'],"link":i[k]['link'],"totalLink":i[k]['total_link']} ni['timestamp']=now_time net_io.append(ni) return (ips,net_io) def _create_role(self): r=self.info['role'] hid=self.info['hostid'] roles=[] nt=self._now_time() for i in r: if r[i]['status'] == 1: role = {'id':hid,'role':i,'table':None,'timestamp':nt} roles.append(role) return roles def _create_cpu(self): c=self.info['cpu'] hid=self.info['hostid'] cpu={'id':hid,'user':c['user'],'sys':c['system'],'idle':c['idle'],'timestamp':self._now_time()} return cpu def _create_memory(self): m=self.info['mem'] hid=self.info['hostid'] mem={'id':hid,'total':m['total'], 'used':m['used'],'free':m['free'], 'shared':m['shared'],'buffer':m['buffer'], 'cached':m['cached'],'timestamp':self._now_time()} return mem def _create_disk(self): d=self.info['disk'] hid=self.info['hostid'] nt=self._now_time() disks=[] rd=self._disk_rate() for i in d: disk={'id':hid,'mount':i,'device':d[i]['device'],'fsType':d[i]['fstype'], 'total':d[i]['total'],'used':d[i]['used'], 'ioRead':rd[i]['read'],'ioWrite':rd[i]['write'],'timestamp':nt} disks.append(disk) return disks def _create_host(self): hid=self.info['hostid'] user=self.info['user'] hname=self.info['hostname'] (ip,net)=self._create_ip() mem=self._create_memory() cpu=self._create_cpu() disk=self._create_disk() role=self._create_role() proc=self.info['proc'] return {'hostId':hid,'hostName':hname,'user':user,'ip':ip,'mem':mem,'cpu':cpu,'net':net,'disk':disk,'proc':proc,'role':role}
def _write_old_info(self): old_file=self.abs_path+"/.collectSysMsg.old.json" MyTools.write_file(old_file,json.dumps(self.old_info))