def caseResults(self): dataDB = Connector(monitorHistData.connString) dataDB.sqlQuery = DIRECT_QUERY["case"] dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall()[0] dataDB.connClose() v1 = list( data )[:: -1] ## data order - [#processingcase, #pendingcase, #failedcase, #case] return json.dumps(v1, indent=4, ensure_ascii=False)
def wkrCaseSummary(wkrname): summaryData = { 'RejectCase': -1, 'ReadyCase': -1, 'PendingCase': -1, 'FailedCase': -1, 'ValidationFailedCase': -1, 'CancelledCase': -1, 'ProcessingCase': -1, 'CompletedCase': -1 } dataDB = Connector(monitorHistData.connString) dataDB.sqlQuery = DIRECT_QUERY["wkrpage_case_summary"].format(wkrname) dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose summaryData["RejectCase"] = data[0][2] summaryData["ReadyCase"] = data[0][9] summaryData["PendingCase"] = data[0][4] summaryData["FailedCase"] = data[0][3] summaryData["ValidationFailedCase"] = data[0][7] summaryData["CancelledCase"] = data[0][8] summaryData["ProcessingCase"] = data[0][5] summaryData["CompletedCase"] = data[0][6] return json.dumps(summaryData, indent=4, ensure_ascii=False)
def casePageTAT(index, isCase=1): """ index: caseid or pkgid isCase: whether the caseid provided, 1 yes, 0 no """ TATres = { "tasklist": [], "timelist": [], "tatdata": {}, "faildata": {} ## great extensible feature for different matrices } dataDB = Connector(monitorHistData.connString) if isCase: dataDB.sqlQuery = TREND_QUERY["casepage_tat"]["case"].format(index) else: dataDB.sqlQuery = TREND_QUERY["casepage_tat"]["pkg"].format(index) dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose tmp_tasklist = [] tmp_timelist = [] tmp_caseflag = {} for row in data: tmp_tasklist.append(row[3]) if not tmp_caseflag.has_key(row[1]): tmp_timelist.append(row[2]) tmp_caseflag[row[1]] = 1 TATres["tasklist"] = list(set(tmp_tasklist)) ##if isCase: ## optimization should aim to differentiate case and pacakge for item in TATres["tasklist"]: TATres["tatdata"][item] = [0.0 for i in range(len(tmp_timelist))] TATres["faildata"][item] = [0.0 for i in range(len(tmp_timelist))] # drawback is that cannot deal with duplicated casestart time for row in data: TATres["tatdata"][row[3]][tmp_timelist.index(row[2])] = float( row[5]) TATres["faildata"][row[3]][tmp_timelist.index(row[2])] = int( row[7]) #need ways to filter out the cases created at the same time TATres["timelist"] = [ '{}'.format(i.strftime('%Y-%m-%d %H:%M')) for i in tmp_timelist ] result = json.dumps(TATres, indent=4, ensure_ascii=False) return result
def isInDB(employee): '''Check if the employee is already in the database, return 0 if not exists, return > 0 if exists ''' tablename = DecodeConfig("db.config").getConfig("tableConfig", "employeeTable") connString = DecodeConfig('db.config').getConfig() a = Connector(connString) tail = "'" if "@" in employee: header1 = "select count(*) from " + tablename + " where [Email] = '" a.sqlQuery = header1 + employee + tail elif "\\" in employee: header2 = "select count(*) from " + tablename + " where [NTAccount] = '" a.sqlQuery = header2 + employee + tail else: return None a.connOpen() res = a.execQuery().fetchone()[0] a.connClose() return res
def casePageKPI(cid): KPIres = { "Csize": '', "TskMaxVols": '', "FailedTskNum": '', "TblNum": '' } dataDB = Connector(monitorHistData.connString) dataDB.sqlQuery = DIRECT_QUERY["case_page_kpi"].format(cid) dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose KPIres["Csize"] = data[0][0] + "Gb" KPIres["TskMaxVols"] = data[0][1] KPIres["FailedTskNum"] = data[0][2] KPIres["TblNum"] = data[0][3] result = json.dumps(KPIres, indent=4, ensure_ascii=False) return result
def wkrCaseList(wkrname): caselistdata = { "wkrname": [], "cid": [], "pid": [], "pname": [], "owner": [], "cstatus": [], "casecreatedate": [], "casesize": [], "timeconsumed": [], "#Records": [], "#flowin": [], "#flowout": [] } dataDB = Connector(monitorHistData.connString) dataDB.sqlQuery = DIRECT_QUERY["wkrpage_case_list"].format(wkrname) dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose for row in data: caselistdata["wkrname"].append(row[0]) caselistdata["cid"].append(row[1]) caselistdata["pid"].append(row[2]) caselistdata["pname"].append(row[3]) caselistdata["owner"].append(row[4]) caselistdata["cstatus"].append(row[5]) caselistdata["casecreatedate"].append(row[6]) caselistdata["casesize"].append(row[7] + row[8]) caselistdata["#Records"].append(row[9]) caselistdata["timeconsumed"].append(row[10]) caselistdata["#flowin"].append(row[11]) caselistdata["#flowout"].append(row[12]) return caselistdata
def getEmployee(*employees): '''Craw data from internal people finder page, declare an instance using class Connector''' tablename = DecodeConfig("db.config").getConfig("tableConfig", "employeeTable") connString = DecodeConfig('db.config').getConfig() resultSet = {} for email in employees[0]: if len(email) == 0: continue valueSet = {} # a container for any field-value to update, valueSet can be extended # generate sysaddkeys current_time = time.strftime("%Y-%m-%d %H:%M:%S") User_ID = str(uuid.uuid4()).replace('-', '') SysUpdateKey = str(uuid.uuid4()).replace('-', '') valueSet["Current Time:"] = current_time valueSet["Sys Update Key:"] = SysUpdateKey # page 1 to find email content1 = getContent(email, "url_peoplefind1", 0) if content1 == -1: if isInDB(email) > 0: # exist in db but not in web if "@" in email: valueSet["Primary Email1:"] = email else: valueSet["NT User Domain ID:"] = email valueSet["Status:"] = 'Inactive' a = TableOperator(tablename) a._initTable() sqlstring = a.updateQuery(valueSet) resultSet[email] = 0 elif isInDB(email) == 0: # not in db and not in web resultSet[email] = 2 continue else: if "@" in email: valueSet["Primary Email1:"] = email valueSet["Primary Email2:"] = email else: email = get_email(email, content1) valueSet["Primary Email1:"] = email valueSet["Primary Email2:"] = email # crawl page 2, find information content2 = getContent(email, "url_peoplefind2", 1) soup = BeautifulSoup(content2) soup_body = soup.body soup_label = soup_body.select('td[class="mpLabel"]') soup_val = soup_body.select('td[class="mpVal"]') for i in range(len(soup_label)): valueSet[str(soup_label[i].string)] = str(soup_val[i].string) # crawl page 3 content3 = getContent(email, "url_managerlist", 1) content3 = BeautifulSoup(content3) managers = content3.body managers = managers.select('div[class="managers"]') re_img = r'<img src="(.*?)"/>' managers_pictures = re.findall(re_img, str(managers), re.S | re.M) # realize level, manager pictures/emails valueSet["Employee Level:"] = int(len(managers_pictures) + 1) valueSet["Direct Manger Pic URL:"] = str(managers_pictures[-1]) managers_pictures = str(managers_pictures) if "'" in managers_pictures: managers_pictures = managers_pictures.replace("'", "") managers_pictures = managers_pictures.replace(",", ";") managers_pictures = managers_pictures.replace("[", "") managers_pictures = managers_pictures.replace("]", "") managers_pictures = managers_pictures.replace(" ", "") else: managers_pictures = managers_pictures valueSet["Manager Pic URLs:"] = str(managers_pictures) # catch manager list re_email = r'<a href=.*?uid%3D(.*?)%2Cou.*? title.*?>' managers_emails = re.findall(re_email, str(managers), re.S | re.M) managers_emails = str(managers_emails) managers_emails = managers_emails.replace('%40', '@') Managers_Emails = str(managers_emails) if "'" in Managers_Emails: Managers_Emails = Managers_Emails.replace("'", "") Managers_Emails = Managers_Emails.replace(",", ";") Managers_Emails = Managers_Emails.replace(" ", "") Managers_Emails = Managers_Emails.replace("[", "") Managers_Emails = Managers_Emails.replace("]", "") else: Managers_Emails = Managers_Emails valueSet["Direct Manger Email:"] = str(Managers_Emails[-1]) valueSet["Manager Emails:"] = str(Managers_Emails) a = TableOperator(tablename) a._initTable() if isInDB(email) > 0: # in db and in web sqlstring = a.updateQuery(valueSet) resultSet[email] = 1 elif isInDB(email) == 0: # not in db but in web valueSet["User ID:"] = User_ID sqlstring = a.insertQuery(valueSet) resultSet[email] = 3 # communicate with database b = Connector(connString) b.sqlQuery = sqlstring b.connOpen() b.execQuery() b.connClose() return resultSet
def memory(self): charts = {"memorydata": {}, "memorytrend": {"xdata": [], "ydata": {}}} memorydata = {} dataDB = Connector(monitorData.connString) dataDB.sqlQuery = DIRECT_QUERY["memory"] dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose() for i in data: key = i[0].replace("\\\\", "") memorydata[key] = round(i[1], 2) charts["memorydata"] = memorydata dataDB.sqlQuery = TREND_QUERY["memory"] dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose() lineRes = {} attrRes = {} for row in data: if lineRes.has_key(row[0]): lineRes[row[0]].append(round(_isNone(row[2]), 2)) attrRes[row[0]].append( row[1][0:19]) # resolution into seconds only else: lineRes[row[0]] = [] lineRes[row[0]].append(round(_isNone(row[2]), 2)) attrRes[row[0]] = [] attrRes[row[0]].append(row[1][0:19]) attr = ["{}".format(i) for i in attrRes["BICWKR3"]] charts["memorytrend"]["xdata"] = attr charts["memorytrend"]["ydata"] = lineRes f_charts = json.dumps(charts, indent=4, ensure_ascii=False) return f_charts
def cpu(self): charts = {"cpudata": {}, "cputrend": {"xdata": [], "ydata": {}}} cpudata = {} dataDB = Connector(monitorData.connString) dataDB.sqlQuery = DIRECT_QUERY["cpu"] dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose() for i in data: key = i[0].replace("\\\\", "") cpudata[key] = round(i[1], 2) charts["cpudata"] = cpudata dataDB.sqlQuery = TREND_QUERY["cpu"] dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose() lineRes = {} attrRes = {} try: for row in data: if lineRes.has_key(row[0]): lineRes[row[0]].append(round(_isNone(row[2]), 2)) attrRes[row[0]].append( row[1][0:19]) # resolution into seconds only else: lineRes[row[0]] = [] lineRes[row[0]].append(round(_isNone(row[2]), 2)) attrRes[row[0]] = [] attrRes[row[0]].append(row[1][0:19]) except Exception, e: print e.message return '505'
def casePageSankey(index, **kwargs): dataSet = { "cid": [], "pid": [], "tid": [], "dtid": [], "source": [], "cname": [], "dest": [] } f_data = {"nodes": [], "links": [], "pid": []} dataDB = Connector(monitorHistData.connString) if (kwargs["isCase"]): dataDB.sqlQuery = DIRECT_QUERY["case_page_sankey"]["case"].format( index) else: dataDB.sqlQuery = DIRECT_QUERY["case_page_sankey"]["pkg"].format( index, index) dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose for row in data: dataSet["cid"].append(row[0]) dataSet["pid"].append(row[1]) dataSet["tid"].append(row[2]) dataSet["dtid"].append(row[3]) dataSet["source"].append(row[4]) dataSet["cname"].append(row[5]) dataSet["dest"].append(row[6]) f_data["nodes"].append({"name": dataSet["cname"][0]}) casename = dataSet["cname"][0] pkgid = dataSet["pid"][0] # for process node s_nodes = list(set(dataSet["source"])) d_nodes = list(set(dataSet["dest"])) conNodes = s_nodes + d_nodes conNodes.append(casename) dNodes = _dKey(conNodes) # generate values s_values = {} d_values = {} s_flag = {} d_flag = {} for node in s_nodes: if (node is not None): s_values[node] = len([ i for i in range(len(dataSet["source"])) if cmp(dataSet["source"][i], node) == 0 ]) s_flag[node] = 0 for node in d_nodes: if (node is not None): d_values[node] = len([ i for i in range(len(dataSet["dest"])) if cmp(dataSet["dest"][i], node) == 0 ]) d_flag[node] = 0 for i in range(len(dataSet["source"])): if (dataSet["source"][i] is not None): if (s_flag[dataSet["source"][i]] == 0): if (dataSet["source"][i] in dNodes): node = {"name": dataSet["source"][i] + "_in"} link = { "source": dataSet["source"][i] + "_in", "target": dataSet["cname"][0], "value": s_values[dataSet["source"][i]] } else: node = {"name": dataSet["source"][i]} link = { "source": dataSet["source"][i], "target": dataSet["cname"][0], "value": s_values[dataSet["source"][i]] } f_data["nodes"].append(node) f_data["links"].append(link) s_flag[dataSet["source"][i]] = 1 if (dataSet["dest"][i] is not None): if (d_flag[dataSet["dest"][i]] == 0): if dataSet["dest"][i] in dNodes: node = {"name": dataSet["dest"][i] + "_out"} link = { "source": dataSet["cname"][0], "target": dataSet["dest"][i] + "_out", "value": d_values[dataSet["dest"][i]] } else: node = {"name": dataSet["dest"][i]} link = { "source": dataSet["cname"][0], "target": dataSet["dest"][i], "value": d_values[dataSet["dest"][i]] } f_data["nodes"].append(node) f_data["links"].append(link) d_flag[dataSet["dest"][i]] = 1 ## need more decoration for extracting data from pid f_data["pid"] = pkgid response = json.dumps(f_data, indent=4, ensure_ascii=False) if kwargs.has_key("requestType"): if cmp(kwargs["requestType"], "web") == 0: return casename elif cmp(kwargs["requestType"], "ajax") == 0: return response else: return None
def diskSpace(self): charts = {} diskdata = {} v1 = [0 for i in range(4) ] ## data order - [bicwkr6, bicwkr5, bicwkr4, bicwkr3] v2 = [0 for i in range(4)] ## data order - v2 sysdrive; v1 dbdrive dataDB = Connector(monitorData.connString) dataDB.sqlQuery = DIRECT_QUERY["disk"] dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose() for arr in data: dKey = arr[0].replace('\\\\', '').upper() if diskdata.has_key(dKey): diskdata[dKey][arr[1].replace(':', '')] = round(arr[2], 2) else: diskdata[dKey] = {} diskdata[dKey][arr[1].replace(':', '')] = round(arr[2], 2) ## process trend data v_data = {} attrRes = {} dataDB.sqlQuery = TREND_QUERY["disk"] dataDB.connOpen() dataRes = dataDB.execQuery() data = dataRes.fetchall() dataDB.connClose() for row in data: if v_data.has_key(row[0]): if v_data[row[0]].has_key(row[2].replace(':', '')): v_data[row[0]][row[2].replace(':', '')].append( round(_isNone(row[3]), 2)) attrRes[row[0]][row[2].replace(':', '')].append(row[1]) else: v_data[row[0]][row[2].replace(':', '')] = [] v_data[row[0]][row[2].replace(':', '')].append( round(_isNone(row[3]), 2)) attrRes[row[0]][row[2].replace(':', '')] = [] attrRes[row[0]][row[2].replace(':', '')].append(row[1]) else: v_data[row[0]] = {} v_data[row[0]][row[2].replace(':', '')] = [] v_data[row[0]][row[2].replace(':', '')].append( round(_isNone(row[3]), 2)) attrRes[row[0]] = {} attrRes[row[0]][row[2].replace(':', '')] = [] attrRes[row[0]][row[2].replace(':', '')].append(row[1]) charts["xAxisData"] = ['{}'.format(i) for i in attrRes["BICWKR3"]["C"]] charts["seriesData"] = v_data charts["diskdata"] = diskdata f_charts = json.dumps(charts, indent=4, ensure_ascii=False) return f_charts
def getEmployee(*employees): '''Craw data from internal people finder page, declare an instance using class Connector''' tablename = DecodeConfig("db.config").getConfig("tableConfig", "employeeTable") connString = DecodeConfig('db.config').getConfig() resultSet = {} for email in employees[0]: if len(email) == 0: continue valueSet = { } # a container for any field-value to update, valueSet can be extended # generate sysaddkeys current_time = time.strftime("%Y-%m-%d %H:%M:%S") User_ID = str(uuid.uuid4()).replace('-', '') SysUpdateKey = str(uuid.uuid4()).replace('-', '') valueSet["Current Time:"] = current_time valueSet["Sys Update Key:"] = SysUpdateKey # page 1 to find email content1 = getContent(email, "url_peoplefind1", 0) if content1 == -1: if isInDB(email) > 0: # exist in db but not in web if "@" in email: valueSet["Primary Email1:"] = email else: valueSet["NT User Domain ID:"] = email valueSet["Status:"] = 'Inactive' a = TableOperator(tablename) a._initTable() sqlstring = a.updateQuery(valueSet) resultSet[email] = 0 elif isInDB(email) == 0: # not in db and not in web resultSet[email] = 2 continue else: if "@" in email: valueSet["Primary Email1:"] = email valueSet["Primary Email2:"] = email else: email = get_email(email, content1) valueSet["Primary Email1:"] = email valueSet["Primary Email2:"] = email # crawl page 2, find information content2 = getContent(email, "url_peoplefind2", 1) soup = BeautifulSoup(content2) soup_body = soup.body soup_label = soup_body.select('td[class="mpLabel"]') soup_val = soup_body.select('td[class="mpVal"]') for i in range(len(soup_label)): valueSet[str(soup_label[i].string)] = str(soup_val[i].string) # crawl page 3 content3 = getContent(email, "url_managerlist", 1) content3 = BeautifulSoup(content3) managers = content3.body managers = managers.select('div[class="managers"]') re_img = r'<img src="(.*?)"/>' managers_pictures = re.findall(re_img, str(managers), re.S | re.M) # realize level, manager pictures/emails valueSet["Employee Level:"] = int(len(managers_pictures) + 1) valueSet["Direct Manger Pic URL:"] = str(managers_pictures[-1]) managers_pictures = str(managers_pictures) if "'" in managers_pictures: managers_pictures = managers_pictures.replace("'", "") managers_pictures = managers_pictures.replace(",", ";") managers_pictures = managers_pictures.replace("[", "") managers_pictures = managers_pictures.replace("]", "") managers_pictures = managers_pictures.replace(" ", "") else: managers_pictures = managers_pictures valueSet["Manager Pic URLs:"] = str(managers_pictures) # catch manager list re_email = r'<a href=.*?uid%3D(.*?)%2Cou.*? title.*?>' managers_emails = re.findall(re_email, str(managers), re.S | re.M) managers_emails = str(managers_emails) managers_emails = managers_emails.replace('%40', '@') Managers_Emails = str(managers_emails) if "'" in Managers_Emails: Managers_Emails = Managers_Emails.replace("'", "") Managers_Emails = Managers_Emails.replace(",", ";") Managers_Emails = Managers_Emails.replace(" ", "") Managers_Emails = Managers_Emails.replace("[", "") Managers_Emails = Managers_Emails.replace("]", "") else: Managers_Emails = Managers_Emails valueSet["Direct Manger Email:"] = str(Managers_Emails[-1]) valueSet["Manager Emails:"] = str(Managers_Emails) a = TableOperator(tablename) a._initTable() if isInDB(email) > 0: # in db and in web sqlstring = a.updateQuery(valueSet) resultSet[email] = 1 elif isInDB(email) == 0: # not in db but in web valueSet["User ID:"] = User_ID sqlstring = a.insertQuery(valueSet) resultSet[email] = 3 # communicate with database b = Connector(connString) b.sqlQuery = sqlstring b.connOpen() b.execQuery() b.connClose() return resultSet
a Python version. Test environment: SQL SERVER 2014, sqlite3, python2.7.13 """ import os import sqlite3 import pyodbc from dbAccess import Connector from readConfig import DecodeConfig __author__ = "Lei Ji" sourceConn = DecodeConfig(filename='db.config') sourceConn.getConfig(option='Test') sourceDB = Connector(sourceConn.connString) destConn = sqlite3.connect('./test.db') ## Key --> field type in sqlite3; value list --> field types in sql server or other RDBMS TYPE_MAPPING = { "INTEGER": [ 'int', 'integer', 'tinyint', 'smallint', 'mediumint', 'bigint', 'unsigned big int', 'int2', 'int8' ], "TEXT": [ 'character', 'varchar', 'varying character', 'nchar', 'native character', 'nvarchar', 'text', 'clob' ], "NONE": ['blob', 'no datatype specified'], "REAL": ['real', 'double', 'double precision', 'float'],