def parse(self, data): pattern = re.compile("[ :]+") lines = data.split('\n') data = {} for line in lines: if ":" in line: line = pattern.split(line) interface = line[1] included = False for dev in self.__include_devices: if fnmatch(interface, dev): included = True break if not included: continue data[interface] = {} for direction in self.directions: data[interface][direction] = {} for field in self.fields: data[interface][direction][field] = int( line[self.directions[direction] + self.fields[field]]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): lines=data.split('\n') result=[] for line in lines[3:-2]: items = [item.strip() for item in line.split('|')] #print items process = {} process["id"] = items[1] process["user"] = items[2] if len(items[3])>0: process["host"] = items[3] if len(items[4])>0: process["db"] = items[4] process["command"] = items[5] process["time"] = int(items[6]) if len(items[7])>0: process["state"] = items[7] if len(items[8])>0: process["info"] = items[8] result.append(process) output = {} output['processes'] = result output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): result = {} result["timestamp"] = int(time.time()) result["hostname"] = get_hostname() result["filesystems"] = [] for line in data.split("\n"): line = line.strip() tokens = line.split() if line.startswith("User quota on"): filesystem = { "device" : tokens[4][1:-1], "filesystem" : tokens[3], "quota" : [] } result["filesystems"].append(filesystem) continue if line.startswith("User ID") or line.startswith("-") or line.startswith("Blocks") or (len(line) == 0): continue entry = {} entry["username"] = tokens[0] if entry["username"] in self.__exclude_users: continue entry["used"] = int(tokens[1]) entry["soft"] = int(tokens[2]) entry["hard"] = int(tokens[3]) filesystem["quota"].append(entry) return result
def parse(self, data): result = {} result["timestamp"] = int(time.time()) result["hostname"] = get_hostname() result["filesystems"] = [] for line in data.split("\n"): line = line.strip() tokens = line.split() if line.startswith("User quota on"): filesystem = { "device": tokens[4][1:-1], "filesystem": tokens[3], "quota": [] } result["filesystems"].append(filesystem) continue if line.startswith("User ID") or line.startswith( "-") or line.startswith("Blocks") or (len(line) == 0): continue entry = {} entry["username"] = tokens[0] if entry["username"] in self.__exclude_users: continue entry["used"] = int(tokens[1]) entry["soft"] = int(tokens[2]) entry["hard"] = int(tokens[3]) filesystem["quota"].append(entry) return result
def parse(self, data): lines = data.split('\n') result = [] for line in lines[3:-2]: items = [item.strip() for item in line.split('|')] #print items process = {} process["id"] = items[1] process["user"] = items[2] if len(items[3]) > 0: process["host"] = items[3] if len(items[4]) > 0: process["db"] = items[4] process["command"] = items[5] process["time"] = int(items[6]) if len(items[7]) > 0: process["state"] = items[7] if len(items[8]) > 0: process["info"] = items[8] result.append(process) output = {} output['processes'] = result output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): lines=data.split('\n') output = {} for line in lines[3:-2]: items = [item.strip() for item in line.split('|')] output[items[1]]=items[2] output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): log.debug("match %s" % self.__pattern) match_obj = re.match(self.__pattern, data, re.M|re.I) match_groups = match_obj.groups() result = self.__transform.format(*match_groups) output = json.loads(result) output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): log.debug("match %s" % self.__pattern) match_obj = re.match(self.__pattern, data, re.M | re.I) match_groups = match_obj.groups() result = self.__transform.format(*match_groups) output = json.loads(result) output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): lines = data.split('\n') output = {} for line in lines[3:-2]: items = [item.strip() for item in line.split('|')] output[items[1]] = items[2] output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): log.debug("delimiter %s" % self.__delimiter) list = [item.strip() for item in re.split(self.__delimiter, data)] result = self.__transform.format(*list) log.debug("result %s" % result) output = json.loads(result) log.debug("output %s" % output) output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): log.debug("delimiter %s" % self.__delimiter) list = [item.strip() for item in re.split(self.__delimiter, data)] result = self.__transform.format(*list) log.debug("result %s"%result) output = json.loads(result) log.debug("output %s"%output) output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def get_data(self, **kwargs): #log.debug(libvirt.__dict__) conn = libvirt.openReadOnly(None) data={} nova_network = load_nova_network() for domID in conn.listDomainsID(): dom = conn.lookupByID(domID) data[domID]=process(dom, nova_network) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def get_data(self, **kwargs): #log.debug(libvirt.__dict__) conn = libvirt.openReadOnly(None) data = {} nova_network = load_nova_network() for domID in conn.listDomainsID(): dom = conn.lookupByID(domID) data[domID] = process(dom, nova_network) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): lines = data.split('\n') data = [] for line in lines[2:]: line = line.split() data["kb_in"] = float(line[0]) data["kb_out"] = float(line[1]) output = {} output['ifstat'] = data output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): lines=data.split('\n') data={} for line in lines[1:]: line = line.split() data[line[0]]={} data[line[0]]["available"] = value(line[1]) data[line[0]]["used"] = value(line[2]) data[line[0]]["ratio"] = line[3] data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): lines = data.split('\n') data = {} for line in lines[1:]: line = line.split() data[line[0]] = {} data[line[0]]["available"] = value(line[1]) data[line[0]]["used"] = value(line[2]) data[line[0]]["ratio"] = line[3] data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): lines=data.split('\n') data=[] for line in lines[2:]: line = line.split() data["kb_in"] = float(line[0]) data["kb_out"] = float(line[1]) output = {} output['ifstat'] = data output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): result = {} full_names={"extent_alloc":"allocs", "abt":"alloc_btree", "blk_map":"block_map", "bmbt":"bmap_btree", "dir":"dir_ops", "trans":"transactions", "ig":"inode_ops", "push_ail":"log_tail", "qm":"qmstat" } variables={"allocs":["alloc_extent", "alloc_block", "free_extent", "free_block"], "alloc_btree":["lookup","compare","insrec","delrec"], "block_map":["read_ops","write_ops","unmap","add_exlist","del_exlist","look_exlist","cmp_exlist"], "bmap_btree":["lookup","compare","insrec","delrec"], "dir_ops":["lookup","create","remove","getdents"], "transactions":["sync","async","empty"], "inode_ops":["ig_attempts","ig_found","ig_frecycle","ig_missed","ig_dup","ig_reclaims","ig_attrchg"], "log":["writes","blocks","noiclogs","force","force_sleep"], "log_tail":["try_logspace","sleep_logspace","push_ail.pushes","push_ail.success","push_ail.pushbuf","push_ail.pinned","push_ail.locked","push_ail.flushing","push_ail.restarts","push_ail.flush"], "xstrat":["quick","split"], "rw":["xs_write_calls","xs_read_calls"], "attr":["xs_attr_get","xs_attr_set","xs_attr_remove","xs_attr_list"], "icluster":["xs_iflush_count","xs_icluster_flushcnt","xs_icluster_flushinode"], "vnodes":["vn_active","vn_alloc","vn_get","vn_hold","vn_rele","vn_reclaim","vn_remove"], "abtb2":["lookup","compare","insrec","delrec","newroot","killroot","increment","decrement","lshift","rshift","split","join","alloc","free","moves"], "abtc2":["lookup","compare","insrec","delrec","newroot","killroot","increment","decrement","lshift","rshift","split","join","alloc","free","moves"], "bmbt2":["lookup","compare","insrec","delrec","newroot","killroot","increment","decrement","lshift","rshift","split","join","alloc","free","moves"], "ibt2":["lookup","compare","insrec","delrec","newroot","killroot","increment","decrement","lshift","rshift","split","join","alloc","free","moves"], "qmstat":["dqreclaims","dqreclaim_misses","dquot_dups","dqcachemisses","dqcachehits","dqwants"], "buf":["xb_get","xb_create","xb_get_locked","xb_get_locked_waited","xb_busy_locked","xb_miss_locked","xb_page_retries","xb_page_found","xb_get_read"], "xpc":["xs_xstrat_bytes","xs_write_bytes","xs_read_bytes"]} result["timestamp"] = int(time.time()) result["hostname"] = get_hostname() result["xfs"] = {} lines=data.split("\n") for i in range(0,len(lines)-1): values=lines[i].split(' ') cat_name=values[0] if cat_name in full_names: cat_name=full_names[cat_name] if cat_name in variables: result["xfs"][cat_name]={} for j in range(0,len(variables[cat_name])): #print variables[cat_name][j] result["xfs"][cat_name][variables[cat_name][j]]=values[j+1] #else: # print cat_name+" not found" result["xfs"]['debug']=lines[-1].split(' ')[1] return result
def parse(self, data): tokens = [x.lower().strip() for x in data.split(";")] data = {} data["timestamp"] = int( time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() job_states = { "r": "run", "s": "started", "q": "queued", "e": "exited", "d": "deleted" } for state in job_states: if state == tokens[1]: data["state"] = job_states[state] break data["jobid"] = tokens[2] for attr in tokens[3].split(): kv = attr.split("=") if reduce(lambda x, y: x and y, [str.isdigit(_) for _ in kv[1]]): kv[1] = int(kv[1]) elif kv[0] == "exec_host": hosts = {} for slot in kv[1].split("+"): slot = slot.split("/") if slot[0] not in hosts: hosts[slot[0]] = [] if slot[1].isdigit(): hosts[slot[0]].append(int(slot[1])) elif '-' in slot[1]: start_end = slot[1].split('-') hosts[slot[0]].extend( range(int(start_end[0]), int(start_end[1]) + 1)) kv[1] = hosts elif kv[0] == "owner": kv[1] = kv[1].split("@")[0] if "." in kv[0]: kv[0] = kv[0].split(".") if isinstance(kv[0], str): data[kv[0]] = kv[1] elif isinstance(kv[0], list): list_to_dict(data, kv[0], kv[1]) return data
def parse(self, data): lines=data.split('\n') data={} for line in lines[3:]: line = line.split() log.debug("iostat %s"%line) data[line[0]]={} data[line[0]]["alloc"] = value(line[1]) data[line[0]]["free"] = value(line[2]) data[line[0]]["kb_read"] = value(line[5]) data[line[0]]["kb_write"] = value(line[6]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def get_data(self, **kwargs): url='https://'+self.__host+'/mgr/app/template/simple%2CDownloadFileSystemsScreen.vm' url_change_fs = 'https://'+self.__host+'/mgr/app/action/storage.SelectFileSystemAction/eventsubmit_doprocessselectfilesystem/ignored' url_quota = 'https://'+self.__host+'/mgr/app/template/simple%2CDownloadQuotasScreen.vm' data={} data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() headers=get_session_headers(self.__host, self.__username, self.__password) request = urllib2.Request(url, None, headers) response = urllib2.urlopen(request) raw_data=response.read() response.close() log.debug(raw_data) data['filesystems']={} for line in raw_data.split('\n')[1:]: if len(line.strip())==0: continue items=line.split(',') fs_name=items[0] data['filesystems'][fs_name]={'capacity':items[1],'live-fs-used':items[2],'snapshot-used':items[3],'free':items[4]} if self.__show_volumes and fs_name in self.__show_volumes: form_data=[('selectFS_evsId',self.__show_volumes[fs_name][1]),('selectFS_currentNameSpace',''),('selectFS_devId',self.__show_volumes[fs_name][0])] log.debug('change fs %s %s %s' % (self.__show_volumes[fs_name], headers, urllib.urlencode(form_data))) req = urllib2.Request(url_change_fs, urllib.urlencode(form_data), headers) response = urllib2.urlopen(req) request = urllib2.Request(url_quota, None, headers) response = urllib2.urlopen(request) quota_data = response.read() response.close() log.debug(quota_data) if len(quota_data.split('\n'))>1: data['filesystems'][fs_name]['virtual_volumes']=[] for quota_line in quota_data.split('\n')[1:]: if len(quota_line.strip())==0: continue quota_items=quota_line.split(',') file_count_hard_limit='' if len(quota_items[15])>0: file_count_hard_limit=int(quota_items[15]) data['filesystems'][fs_name]['virtual_volumes'].append({'volume-name':quota_items[1],'path':quota_items[2], 'contacts':quota_items[3],'user-group-account':quota_items[4], 'quota-type':quota_items[5],'created-by':quota_items[6], 'usage':float(quota_items[7])/1048576,'usage-limit':float(quota_items[8])/1048576, 'usage-hard-limit':quota_items[9],'usage-reset':int(quota_items[10]), 'usage-warning':int(quota_items[11]),'usage-critical':int(quota_items[12]), 'file-count':int(quota_items[13]),'file-count-limit':int(quota_items[14]), 'file-count-hard-limit':file_count_hard_limit,'file-count-reset':int(quota_items[16]), 'file-count-warning':int(quota_items[17]),'file-count-critical':int(quota_items[18])}) return data
def parse(self, data): tokens = [x.lower().strip() for x in data.split(";")] data={} data["timestamp"] = int(time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() job_states = { "r" : "run", "s" : "started", "q" : "queued", "e" : "exited", "d" : "deleted" } for state in job_states: if state == tokens[1]: data["state"] = job_states[state] break data["jobid"] = tokens[2] for attr in tokens[3].split(): kv = attr.split("=") try: if reduce(lambda x, y: x and y, [str.isdigit(_) for _ in kv[1]]): kv[1] = int(kv[1]) elif kv[0] == "exec_host": hosts = {} for slot in kv[1].split("+"): slot = slot.split("/") if slot[0] not in hosts: hosts[slot[0]] = [] if slot[1].isdigit(): hosts[slot[0]].append(int(slot[1])) elif '-' in slot[1]: start_end=slot[1].split('-') hosts[slot[0]].extend(range(int(start_end[0]), int(start_end[1])+1)) kv[1] = hosts elif kv[0] == "owner": kv[1] = kv[1].split("@")[0] if "." in kv[0]: kv[0] = kv[0].split(".") if isinstance(kv[0], str): data[kv[0]] = kv[1] elif isinstance(kv[0], list): list_to_dict(data, kv[0], kv[1]) except Exception as ERROR: log.debug(ERROR) pass return data
def parse(self, data): lines = data.split('\n') data = {} for line in lines[3:]: line = line.split() log.debug("iostat %s" % line) data[line[0]] = {} data[line[0]]["alloc"] = value(line[1]) data[line[0]]["free"] = value(line[2]) data[line[0]]["kb_read"] = value(line[5]) data[line[0]]["kb_write"] = value(line[6]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, line): data = {} tokens = [x.lower().strip() for x in line.split(";")] data["timestamp"] = int( time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() for state in self.JOB_STATES: if state == tokens[1]: data["state"] = self.JOB_STATES[state] break data["jobid"] = tokens[2] for attr in tokens[3].split(): kv = attr.split("=") try: if reduce(lambda x, y: x and y, [str.isdigit(_) for _ in kv[1]]): kv[1] = int(kv[1]) elif kv[0] == "exec_host": hosts = {} for slot in kv[1].split("+"): slot = slot.split("/") if slot[0] not in hosts: hosts[slot[0]] = [] if slot[1].isdigit(): hosts[slot[0]].append(int(slot[1])) elif '-' in slot[1]: start_end = slot[1].split('-') hosts[slot[0]].extend( range(int(start_end[0]), int(start_end[1]) + 1)) kv[1] = hosts elif kv[0] == "owner": kv[1] = kv[1].split("@")[0] if "." in kv[0]: kv[0] = kv[0].split(".") if isinstance(kv[0], str): data[kv[0]] = kv[1] elif isinstance(kv[0], list): list_to_dict(data, kv[0], kv[1]) except Exception as e: log.debug("error: %s, attrb = %s", e, attr) return data
def get_data(self, **kwargs): dir = "/proc/spl/kstat/zfs/" files = [ "arcstats", "dmu_tx", "zfetchstats", "zil" ] data={} for file in files: data[file] = {} linenum = 0 log.debug("reading file %s"% (dir + file)) for line in open(dir + file, "r"): linenum += 1 if linenum > 2: line = line.split() data[file][line[0]] = int(line[2]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def get_data(self, **kwargs): dir = "/proc/spl/kstat/zfs/" files = ["arcstats", "dmu_tx", "zfetchstats", "zil"] data = {} for file in files: data[file] = {} linenum = 0 log.debug("reading file %s" % (dir + file)) for line in open(dir + file, "r"): linenum += 1 if linenum > 2: line = line.split() data[file][line[0]] = int(line[2]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, line): data = {} tokens = [x.lower().strip() for x in line.split(";")] data["timestamp"] = int(time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() for state in self.JOB_STATES: if state == tokens[1]: data["state"] = self.JOB_STATES[state] break data["jobid"] = tokens[2] for attr in tokens[3].split(): kv = attr.split("=") try: if reduce(lambda x, y: x and y, [str.isdigit(_) for _ in kv[1]]): kv[1] = int(kv[1]) elif kv[0] == "exec_host": hosts = {} for slot in kv[1].split("+"): slot = slot.split("/") if slot[0] not in hosts: hosts[slot[0]] = [] if slot[1].isdigit(): hosts[slot[0]].append(int(slot[1])) elif '-' in slot[1]: start_end=slot[1].split('-') hosts[slot[0]].extend(range(int(start_end[0]), int(start_end[1])+1)) kv[1] = hosts elif kv[0] == "owner": kv[1] = kv[1].split("@")[0] if "." in kv[0]: kv[0] = kv[0].split(".") if isinstance(kv[0], str): data[kv[0]] = kv[1] elif isinstance(kv[0], list): list_to_dict(data, kv[0], kv[1]) except Exception as e: log.debug("error: %s, attrb = %s", e, attr) return data
def parse(self, data): tokens = [x.lower().strip() for x in data.split(";")] data={} data["timestamp"] = int(time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() event_type = tokens[3] data['event_type']=event_type if event_type == "svr": data['svr_type']=tokens[4] data['event_description']=tokens[5] elif event_type == "job": data['jobid']=tokens[4] data['event_description']=tokens[5] if data['jobid']=="tmomfinalizejob3": data['jobid']=tokens[5].split(" ")[1] return data
def parse(self, data): output = {} output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() try: j = json.loads(data) except ValueError as e: log.exception('Could not load JSON object from input data.') raise InputDataError() if self.__pattern and len(self.__pattern)>0: self.jsongrep(j, map(re.compile, self.__pattern.split(" ")), output) if self.__list_name in output and len(output[self.__list_name])==1: output[self.__list_name]=output[self.__list_name][0] else: if isinstance(j, list): output[self.__list_name]=j else: output.update(j) return output
def parse(self, data): tokens = [x.lower().strip() for x in data.split(";")] data = {} data["timestamp"] = int( time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() event_type = tokens[3] data['event_type'] = event_type if event_type == "svr": data['svr_type'] = tokens[4] data['event_description'] = tokens[5] elif event_type == "job": data['jobid'] = tokens[4] data['event_description'] = tokens[5] if data['jobid'] == "tmomfinalizejob3": data['jobid'] = tokens[5].split(" ")[1] return data
def parse(self, data): lines = data.split('\n') data = {} for line in lines: if line.startswith('cpu'): line = line.split() data[line[0]] = {} data[line[0]]['user'] = int(line[1]) data[line[0]]['nice'] = int(line[2]) data[line[0]]['system'] = int(line[3]) data[line[0]]['idle'] = int(line[4]) if len(line) >= 8: data[line[0]]['wait'] = int(line[5]) data[line[0]]['interrupt'] = int(line[6]) data[line[0]]['softirq'] = int(line[7]) if len(line) >= 9: data[line[0]]['steal'] = int(line[8]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): lines=data.split('\n') data={} for line in lines: if line.startswith('cpu'): line = line.split() data[line[0]] = {} data[line[0]]['user'] = int(line[1]) data[line[0]]['nice'] = int(line[2]) data[line[0]]['system'] = int(line[3]) data[line[0]]['idle'] = int(line[4]) if len(line)>=8: data[line[0]]['wait'] = int(line[5]) data[line[0]]['interrupt'] = int(line[6]) data[line[0]]['softirq'] = int(line[7]) if len(line)>=9: data[line[0]]['steal'] = int(line[8]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): lines=data.split('\n') result=[] for line in lines: if line.startswith("Filesystem "): continue line = line.strip().split() filesystem = {} filesystem["device"] = line[0] if filesystem["device"] in self.__exclude_devices: continue filesystem["size"] = int(line[1]) filesystem["used"] = int(line[2]) filesystem["available"] = int(line[3]) filesystem["mountpoint"] = line[5] result.append(filesystem) output = {} output['df'] = result output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): output = {} output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() try: j = json.loads(data) except ValueError as e: log.exception('Could not load JSON object from input data.') raise InputDataError() if self.__pattern and len(self.__pattern) > 0: self.jsongrep(j, map(re.compile, self.__pattern.split(" ")), output) if self.__list_name in output and len( output[self.__list_name]) == 1: output[self.__list_name] = output[self.__list_name][0] else: if isinstance(j, list): output[self.__list_name] = j else: output.update(j) return output
def parse(self, data): lines = data.split('\n') result = [] for line in lines: if line.startswith("Filesystem "): continue line = line.strip().split() filesystem = {} filesystem["device"] = line[0] if filesystem["device"] in self.__exclude_devices: continue filesystem["size"] = int(line[1]) filesystem["used"] = int(line[2]) filesystem["available"] = int(line[3]) filesystem["mountpoint"] = line[5] result.append(filesystem) output = {} output['df'] = result output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() return output
def parse(self, data): lines=data.split('\n') data={} for line in lines: line = line.split() if line[0].startswith('MemTotal'): data['total'] = int(line[1]) if line[0].startswith('MemFree'): data['free'] = int(line[1]) if line[0].startswith('Buffers'): data['buffered'] = int(line[1]) if line[0].startswith('Cached'): data['cached'] = int(line[1]) if line[0].startswith('Slab'): data['slab_total'] = int(line[1]) if line[0].startswith('SReclaimable'): data['slab_reclaimable'] = int(line[1]) if line[0].startswith('SUnreclaim'): data['slab_unreclaimable'] = int(line[1]) data['used'] = data['total'] - (data['free'] + data['buffered'] + data['cached'] + data['slab_total']) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): pattern = re.compile("[ :]+") lines=data.split('\n') data={} for line in lines: if ":" in line: line = pattern.split(line) interface = line[1] included=False for dev in self.__include_devices: if fnmatch(interface, dev): included=True break if not included: continue data[interface] = {} for direction in self.directions: data[interface][direction] = {} for field in self.fields: data[interface][direction][field] = int(line[self.directions[direction] + self.fields[field]]) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): result = {} result["timestamp"] = int(time.time()) result["hostname"] = get_hostname() key = '' for line in data.split("\n"): words = line.split() if len(words) == 0: continue if words[0] == 'device': if not words[7] in self.__fstype: continue key = words[4] new = [ line.strip() ] elif 'nfs' in words or 'nfs4' in words: key = words[3] new = [ line.strip() ] else: new += [ line.strip() ] stats = DeviceData() stats.parse_stats(new) result[key] = stats.raw() return result
def parse(self, data): result = {} result["timestamp"] = int(time.time()) result["hostname"] = get_hostname() key = '' for line in data.split("\n"): words = line.split() if len(words) == 0: continue if words[0] == 'device': if not words[7] in self.__fstype: continue key = words[4] new = [line.strip()] elif 'nfs' in words or 'nfs4' in words: key = words[3] new = [line.strip()] else: new += [line.strip()] stats = DeviceData() stats.parse_stats(new) result[key] = stats.raw() return result
def parse(self, data): lines = data.split('\n') data = {} for line in lines: line = line.split() if line[0].startswith('MemTotal'): data['total'] = int(line[1]) if line[0].startswith('MemFree'): data['free'] = int(line[1]) if line[0].startswith('Buffers'): data['buffered'] = int(line[1]) if line[0].startswith('Cached'): data['cached'] = int(line[1]) if line[0].startswith('Slab'): data['slab_total'] = int(line[1]) if line[0].startswith('SReclaimable'): data['slab_reclaimable'] = int(line[1]) if line[0].startswith('SUnreclaim'): data['slab_unreclaimable'] = int(line[1]) data['used'] = data['total'] - (data['free'] + data['buffered'] + data['cached'] + data['slab_total']) data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() return data
def parse(self, data): tokens = [x.lower().strip() for x in data.split(";")] data={} data["timestamp"] = int(time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() event_type = tokens[3] data['event_type']=event_type if event_type == "req": if len(tokens[4])>0: data['req_type']=tokens[4] data['event_description']=tokens[5] elif event_type == "svr": data['svr_type']=tokens[4] data['event_description']=tokens[5] event_description=tokens[5] if "job nanny" in event_description: # example # 08/06/2014 01:03:59;0001;PBS_Server;Svr;PBS_Server;LOG_ERROR::job nanny, exiting job '786299.tizard1' still exists, sending a SIGKILL data["jobid"] = event_description.split("'")[1] elif event_type == "job": jobid=tokens[4] if jobid == 'NULL': data['event_description']=tokens[5] else: if not jobid.split('.')[0].split('[')[0].isdigit(): data['action_type']=tokens[4] jobid=tokens[5] if not jobid.split('.')[0].split('[')[0].isdigit(): data['event_description']=tokens[5] else: data['jobid']=jobid data['event_description']=tokens[6] else: data['jobid']=jobid data['event_description']=tokens[5] event_description=data['event_description'] if event_description.startswith('exit_status'): # example (split) # 08/06/2014 00:29:47;0010;PBS_Server;Job;796694.tizard1;Exit_status=0 resources_used.cput=18:25:07 # resources_used.mem=2569184kb resources_used.vmem=14061332kb resources_used.walltime=02:59:47 data["stats"] = {} stats = event_description.split() #print stats for stat in stats: stat = stat.split("=") #print stat if "." in stat[0]: stat[0] = stat[0].split(".") if reduce(lambda x, y: x and y, [str.isdigit(_) for _ in stat[1]]): stat[1] = int(stat[1]) if isinstance(stat[0], str): data["stats"][stat[0]] = stat[1] elif isinstance(stat[0], list): list_to_dict(data["stats"], stat[0], stat[1]) elif 'job queued' in event_description: # example (split) # 08/06/2014 00:44:15;0008;PBS_Server;Job;796871.tizard1;Job Queued at request of foo@tizard1, # owner = foo@tizard1, job name = something.sh, queue = gtx data["attributes"] = {} for kv in event_description.split(",")[1:]: kv = kv.split("=") kv = [_.strip() for _ in kv] data["attributes"][kv[0].replace(" ", "_")] = kv[1] if "owner" in data["attributes"]: data["attributes"]["owner"] = data["attributes"]["owner"].split("@")[0] return data
def parse(self, data): output = {} output['timestamp'] = int(time.time()) output['hostname'] = get_hostname() output['content'] = data.strip() return output
def get_data(self, **kwargs): url = 'https://' + self.__host + '/mgr/app/template/simple%2CDownloadFileSystemsScreen.vm' url_change_fs = 'https://' + self.__host + '/mgr/app/action/storage.SelectFileSystemAction/eventsubmit_doprocessselectfilesystem/ignored' url_quota = 'https://' + self.__host + '/mgr/app/template/simple%2CDownloadQuotasScreen.vm' data = {} data['timestamp'] = int(time.time()) data['hostname'] = get_hostname() headers = get_session_headers(self.__host, self.__username, self.__password) request = urllib2.Request(url, None, headers) response = urllib2.urlopen(request) raw_data = response.read() response.close() log.debug(raw_data) data['filesystems'] = {} for line in raw_data.split('\n')[1:]: if len(line.strip()) == 0: continue items = line.split(',') fs_name = items[0] data['filesystems'][fs_name] = { 'capacity': items[1], 'live-fs-used': items[2], 'snapshot-used': items[3], 'free': items[4] } if self.__show_volumes and fs_name in self.__show_volumes: form_data = [ ('selectFS_evsId', self.__show_volumes[fs_name][1]), ('selectFS_currentNameSpace', ''), ('selectFS_devId', self.__show_volumes[fs_name][0]) ] log.debug('change fs %s %s %s' % (self.__show_volumes[fs_name], headers, urllib.urlencode(form_data))) req = urllib2.Request(url_change_fs, urllib.urlencode(form_data), headers) response = urllib2.urlopen(req) request = urllib2.Request(url_quota, None, headers) response = urllib2.urlopen(request) quota_data = response.read() response.close() log.debug(quota_data) if len(quota_data.split('\n')) > 1: data['filesystems'][fs_name]['virtual_volumes'] = [] for quota_line in quota_data.split('\n')[1:]: if len(quota_line.strip()) == 0: continue quota_items = quota_line.split(',') file_count_hard_limit = '' if len(quota_items[15]) > 0: file_count_hard_limit = int(quota_items[15]) data['filesystems'][fs_name]['virtual_volumes'].append( { 'volume-name': quota_items[1], 'path': quota_items[2], 'contacts': quota_items[3], 'user-group-account': quota_items[4], 'quota-type': quota_items[5], 'created-by': quota_items[6], 'usage': float(quota_items[7]) / 1048576, 'usage-limit': float(quota_items[8]) / 1048576, 'usage-hard-limit': quota_items[9], 'usage-reset': int(quota_items[10]), 'usage-warning': int(quota_items[11]), 'usage-critical': int(quota_items[12]), 'file-count': int(quota_items[13]), 'file-count-limit': int(quota_items[14]), 'file-count-hard-limit': file_count_hard_limit, 'file-count-reset': int(quota_items[16]), 'file-count-warning': int(quota_items[17]), 'file-count-critical': int(quota_items[18]) }) return data
def parse(self, data): tokens = [x.lower().strip() for x in data.split(";")] data = {} data["timestamp"] = int( time.mktime(time.strptime(tokens[0], "%m/%d/%Y %H:%M:%S"))) data["hostname"] = get_hostname() event_type = tokens[3] data['event_type'] = event_type if event_type == "req": if len(tokens[4]) > 0: data['req_type'] = tokens[4] data['event_description'] = tokens[5] elif event_type == "svr": data['svr_type'] = tokens[4] data['event_description'] = tokens[5] event_description = tokens[5] if "job nanny" in event_description: # example # 08/06/2014 01:03:59;0001;PBS_Server;Svr;PBS_Server;LOG_ERROR::job nanny, exiting job '786299.tizard1' still exists, sending a SIGKILL data["jobid"] = event_description.split("'")[1] elif event_type == "job": jobid = tokens[4] if jobid == 'NULL': data['event_description'] = tokens[5] else: if not jobid.split('.')[0].split('[')[0].isdigit(): data['action_type'] = tokens[4] jobid = tokens[5] if not jobid.split('.')[0].split('[')[0].isdigit(): data['event_description'] = tokens[5] else: data['jobid'] = jobid data['event_description'] = tokens[6] else: data['jobid'] = jobid data['event_description'] = tokens[5] event_description = data['event_description'] if event_description.startswith('exit_status'): # example (split) # 08/06/2014 00:29:47;0010;PBS_Server;Job;796694.tizard1;Exit_status=0 resources_used.cput=18:25:07 # resources_used.mem=2569184kb resources_used.vmem=14061332kb resources_used.walltime=02:59:47 data["stats"] = {} stats = event_description.split() #print stats for stat in stats: stat = stat.split("=") #print stat if "." in stat[0]: stat[0] = stat[0].split(".") if reduce(lambda x, y: x and y, [str.isdigit(_) for _ in stat[1]]): stat[1] = int(stat[1]) if isinstance(stat[0], str): data["stats"][stat[0]] = stat[1] elif isinstance(stat[0], list): list_to_dict(data["stats"], stat[0], stat[1]) elif 'job queued' in event_description: # example (split) # 08/06/2014 00:44:15;0008;PBS_Server;Job;796871.tizard1;Job Queued at request of foo@tizard1, # owner = foo@tizard1, job name = something.sh, queue = gtx data["attributes"] = {} for kv in event_description.split(",")[1:]: kv = kv.split("=") kv = [_.strip() for _ in kv] data["attributes"][kv[0].replace(" ", "_")] = kv[1] if "owner" in data["attributes"]: data["attributes"]["owner"] = data["attributes"][ "owner"].split("@")[0] return data
def parse(self, data): result = {} full_names = { "extent_alloc": "allocs", "abt": "alloc_btree", "blk_map": "block_map", "bmbt": "bmap_btree", "dir": "dir_ops", "trans": "transactions", "ig": "inode_ops", "push_ail": "log_tail", "qm": "qmstat" } variables = { "allocs": ["alloc_extent", "alloc_block", "free_extent", "free_block"], "alloc_btree": ["lookup", "compare", "insrec", "delrec"], "block_map": [ "read_ops", "write_ops", "unmap", "add_exlist", "del_exlist", "look_exlist", "cmp_exlist" ], "bmap_btree": ["lookup", "compare", "insrec", "delrec"], "dir_ops": ["lookup", "create", "remove", "getdents"], "transactions": ["sync", "async", "empty"], "inode_ops": [ "ig_attempts", "ig_found", "ig_frecycle", "ig_missed", "ig_dup", "ig_reclaims", "ig_attrchg" ], "log": ["writes", "blocks", "noiclogs", "force", "force_sleep"], "log_tail": [ "try_logspace", "sleep_logspace", "push_ail.pushes", "push_ail.success", "push_ail.pushbuf", "push_ail.pinned", "push_ail.locked", "push_ail.flushing", "push_ail.restarts", "push_ail.flush" ], "xstrat": ["quick", "split"], "rw": ["xs_write_calls", "xs_read_calls"], "attr": ["xs_attr_get", "xs_attr_set", "xs_attr_remove", "xs_attr_list"], "icluster": [ "xs_iflush_count", "xs_icluster_flushcnt", "xs_icluster_flushinode" ], "vnodes": [ "vn_active", "vn_alloc", "vn_get", "vn_hold", "vn_rele", "vn_reclaim", "vn_remove" ], "abtb2": [ "lookup", "compare", "insrec", "delrec", "newroot", "killroot", "increment", "decrement", "lshift", "rshift", "split", "join", "alloc", "free", "moves" ], "abtc2": [ "lookup", "compare", "insrec", "delrec", "newroot", "killroot", "increment", "decrement", "lshift", "rshift", "split", "join", "alloc", "free", "moves" ], "bmbt2": [ "lookup", "compare", "insrec", "delrec", "newroot", "killroot", "increment", "decrement", "lshift", "rshift", "split", "join", "alloc", "free", "moves" ], "ibt2": [ "lookup", "compare", "insrec", "delrec", "newroot", "killroot", "increment", "decrement", "lshift", "rshift", "split", "join", "alloc", "free", "moves" ], "qmstat": [ "dqreclaims", "dqreclaim_misses", "dquot_dups", "dqcachemisses", "dqcachehits", "dqwants" ], "buf": [ "xb_get", "xb_create", "xb_get_locked", "xb_get_locked_waited", "xb_busy_locked", "xb_miss_locked", "xb_page_retries", "xb_page_found", "xb_get_read" ], "xpc": ["xs_xstrat_bytes", "xs_write_bytes", "xs_read_bytes"] } result["timestamp"] = int(time.time()) result["hostname"] = get_hostname() result["xfs"] = {} lines = data.split("\n") for i in range(0, len(lines) - 1): values = lines[i].split(' ') cat_name = values[0] if cat_name in full_names: cat_name = full_names[cat_name] if cat_name in variables: result["xfs"][cat_name] = {} for j in range(0, len(variables[cat_name])): #print variables[cat_name][j] result["xfs"][cat_name][variables[cat_name][j]] = values[j + 1] #else: # print cat_name+" not found" result["xfs"]['debug'] = lines[-1].split(' ')[1] return result