def bind (self, heads, expandColumnsTag = None): self.columnBindings = [None] * heads.size() i = 0 while heads: name = heads.text() # strip ' ','\n' name = name.replace(' ', '') name = name.replace('\n','') sufix = "()" try: if name[-len(sufix):] == sufix: self.columnBindings[i] = \ self.bindMethod(name[:-len(sufix)]) if expandColumnsTag is not None: setattr(self, 'rowspan', 2) # added in 2009/08/21, the first row means show test result, second test data style = heads.getStyle() if style is None: style = '' try: heads.addToBody(self.getExpandheads(style)) except AttributeError, e: Log.info('please implement getExpandheads()!') Log.exception(e) else: # name is field self.columnBindings[i] = self.bindField(name)
def Run(self): parser = argparse.ArgumentParser( description='GitHub Repository Uploader.', ) #parser.add_argument('path_dir_pj') parser.add_argument('-n', '--username', action='append') parser.add_argument('-d', '--path_dir_db') parser.add_argument('-id', '--path_dir_input_db') parser.add_argument('-od', '--path_dir_output_db') parser.add_argument('-u', '--url', '--upload_url', action='append') parser.add_argument('-y', '--yaml') self.__args = parser.parse_args() print(self.__args) # 起動引数チェック usernames = self.__GetUsernames() Log().debug(f'対象ユーザ: {usernames}') path_out = self.__GetDirOutputDb() Log().debug(f'出力パス: {path_out}') path_out.mkdir(parents=True, exist_ok=True) self.__GetYaml() self.__GetUrl() # 草DB作成 # 草データ取得 data = None try: data = FromApi() except RateLimitError as e: data = FromSvg() finally: if data is None: raise NotGetError() Insert(data)
def proceHeadInfo(self, info): try: if info and 'Set-Cookie' in info: self.headerinfo = info self.cookie = info['Set-Cookie'] except: Log.debug('proceHeadInfo error')
def bind(self, heads, expandColumnsTag=None): self.columnBindings = [None] * heads.size() i = 0 while heads: name = heads.text() # strip ' ','\n' name = name.replace(' ', '') name = name.replace('\n', '') sufix = "()" try: if name[-len(sufix):] == sufix: self.columnBindings[i] = \ self.bindMethod(name[:-len(sufix)]) if expandColumnsTag is not None: setattr( self, 'rowspan', 2 ) # added in 2009/08/21, the first row means show test result, second test data style = heads.getStyle() if style is None: style = '' try: heads.addToBody(self.getExpandheads(style)) except AttributeError, e: Log.info('please implement getExpandheads()!') Log.exception(e) else: # name is field self.columnBindings[i] = self.bindField(name)
def update_website(self): Log.init_log() logging.debug("网站内容更新开始") categories = self.get_categories() for category in categories: self.update_category(category) logging.debug("网站内容更新完成")
def strToJson(data): try: datajson = json.loads(data) except BaseException, e: Log.debug("strToJson", e) print e datajson = {}
def __init__(self, argv, multipleInstanceFlag = False): # output-file argv not exists,deafault name :Year-m-d-h-m-sreport.html self.expandColumnsTag = 'colspan=21' if len(argv) < 2: sys.stderr.write("usage: python input-file output-file\n") sys.exit(-1) if len(argv) == 2: self.outreportname = 'reports\\' + time.strftime('%Y-%m-%d-%H-%M-%S') + 'report.html' argv.append(self.outreportname) elif(len(argv) > 2): self.outreportname = argv[2] infilename = self.convertExcelToHtml(argv[1], multipleInstanceFlag) Log.info('html file: ' + infilename) infile = open(infilename,'r') modtime = time.ctime(os.fstat(infile.fileno())[stat.ST_MTIME]) try: self.outfile = open(argv[2],'w') except IOError: os.mkdir('reports\\') self.outfile = open(argv[2],'w') self.fixture.summary["input file"] = os.path.abspath(argv[1]) self.fixture.summary["input update"] = modtime self.fixture.summary["output file"] = os.path.abspath(argv[2]) self.input = infile.read() infile.close() self.output = self.outfile
class JsonHelper(): def __init__(self): self.TAG = "JsonHelper" self.log = Log("budgie-browser-profile-launcher") def setDictIfNone(self, data, key): if data is not None and key is not None: if data.get(key, None) is None: data[key] = {} return data[key] def setListIfNone(self, data, key): if data is not None and key is not None: if data.get(key, None) is None: data[key] = [] return data[key] def readData(self, filePath): if os.path.exists(filePath): try: with open(filePath) as json_file: data = json.load(json_file) return data except IOError as err: self.log.e(self.TAG, Error.ERROR_1010) def writeData(self, filePath, data): if os.path.exists(filePath): try: with open(filePath, 'w') as outfile: json.dump(data, outfile, indent=4) except: self.log.e(self.TAG, Error.ERROR_1011)
def doTables(self, tables, expandColumnsTag=None): self.summary["run date"] = time.ctime(time.time()) self.summary["run elapsed time"] = RunTime() while tables: try: heading = tables.at(0, 0, 0) if expandColumnsTag is not None: tempTuple = expandColumnsTag.split('=') heading.modifyTagValue(tempTuple[0], tempTuple[1]) except BaseException, e: Log.exception(e) if heading: try: path = heading.text() #path = re.sub(r'^fit\.','',path) #try to cure the fits of madness clas = path.split('.')[-1] # fix for illegal Java trick in AllFiles. Frankly, I like it! i = path.split('$') if len(i) == 1: exec 'import ' + path #test class method exec 'fixture = ' + path + '.' + clas + '()' else: exec "import %s" % (i[0], ) exec "fixture = %s.%s()" % (i[0], i[1]) fixture.counts = self.counts fixture.summary = self.summary fixture.doTable(tables, expandColumnsTag) except Exception, e: self.exception(heading, e)
def getCookie(self, info): cookie = '' try: if info and 'Set-Cookie' in info: cookie = info['Set-Cookie'] except BaseException, e: Log.error("get cookie value error", e)
def hot_search(self, ids): Log.init_log() for id in ids: crawlers = self.get_crawler_category(id) for crawler in crawlers: logging.debug('name:%s, url:%s' % (crawler.author, crawler.url)) if None is crawler.url or '' == crawler.url: #如果爬虫没有url,则不执行 continue if crawler.charset != 'utf-8': #只处理utf-8爬虫 continue try: self.parse(crawler) except Exception as err: logging.debug(traceback.format_exc()) self.crawlers[crawler.id] = crawler self.sort_word_list() url = self.find_hot_url() logging.debug('最热文章url为:') logging.debug(url) crawler_id = self.get_crawler_id(url) real_url = self.get_real_url(url) crawler = self.crawlers[int(crawler_id)] self.search_core.crawler = crawler blog = self.get_blog(self.get_title(url), real_url) self.save(blog) logging.debug('最热文章更新成功')
def setLoginInfo(self, client): if hasattr(self, 'initBeforeTest') and self.initBeforeTest: if "cookie" in self.initBeforeTest: client.cookie = self.initBeforeTest['cookie'] if "token" in self.initBeforeTest: client.token = self.initBeforeTest['token'] Log.debug("initBeforeTest: ", self.initBeforeTest)
def runTest(self): Log.debug('start: ExcelTestDriver.runTest') fixture = None starttime = datetime.datetime.now() runTime = 0 iterationCount = GlobalSetting.ITERATION if type(iterationCount) != types.IntType: iterationCount = 0 count = 0 while True: count += 1 fixture = ExcelColumnFixture() excelAPP = util.excel.ExcelAppRD() excelAPP.openExcel(self.infilename) print 'now start to test......' reportFileName = 'report' if count > 1: reportFileName = 'report' + str(count) fixture.dosheets(excelAPP, reportFileName) endtime = datetime.datetime.now() runTime = (endtime - starttime).seconds print 'run time(seconds) is: ' + str(runTime) #Log.debug('open report ' + str(fixture.reportNameList)) fixture.openFileReport() if GlobalSetting.RUNTIME > 0: if runTime > GlobalSetting.RUNTIME: break elif iterationCount < 1: break iterationCount -= 1 Log.debug('end: ExcelTestDriver.runTest')
def con2mysql(self): try: self.conn = MySQLdb.connect(self.host, self.user, self.passwd, self.database, self.port, charset='utf8') self.cur = self.conn.cursor() except MySQLdb.Error,e: print "Mysql connect failed! Error %d: %s" % (e.args[0], e.args[1]) Log.error("Mysql Error %d: %s" % (e.args[0], e.args[1]))
def doTables(self,tables, expandColumnsTag = None) : self.summary["run date"] = time.ctime(time.time()) self.summary["run elapsed time"] = RunTime() while tables: try: heading = tables.at(0,0,0) if expandColumnsTag is not None: tempTuple = expandColumnsTag.split('=') heading.modifyTagValue(tempTuple[0], tempTuple[1]) except BaseException, e: Log.exception(e) if heading: try: path = heading.text() #path = re.sub(r'^fit\.','',path) #try to cure the fits of madness _CLASSNAME = path.split('.')[-1] # fix for illegal Java trick in AllFiles. Frankly, I like it! i = path.split('$') if len(i) == 1: exec 'import '+path #test class method exec 'fixture = '+path+'.'+_CLASSNAME+'()' else: exec "import %s" % (i[0],) exec "fixture = %s.%s()" % (i[0], i[1]) fixture.counts = self.counts fixture.summary = self.summary fixture.doTable(tables, expandColumnsTag) except Exception, e: self.exception(heading, e)
def permuteMatrix(matrix_file, file_format, destination_path, permutation_number): destination_files = [] for index in range(permutation_number): # compose the destination file path destination_file = os.path.join( destination_path, os.path.basename(matrix_file) + "_" + str(index)) # Compose the permute-matrix command cmd = os.path.join(RSATUtils.RSAT_PATH, "perl-scripts/permute-matrix") cmd += " -i '" + matrix_file + "'" cmd += " -in_format " + file_format cmd += " -o " + destination_file cmd += " -out_format " + file_format # Execute the command cmd_result = commands.getstatusoutput(cmd) if cmd_result[0] != 0: Log.log("RSATUtils.permuteMatrix : status returned is :" + str(cmd_result[0]) + " for command '" + cmd + "'") Log.log(" permute-matrix output is = \n" + str(cmd_result[1])) return None destination_files.append(destination_file) return destination_files
class HttpApiFixture(ExcelColumnFixture): """ process http api 1.read excel file, process header of http request. 2.e.g. url = interface + function.. """ _CLASSNAME = 'apifixture.HttpApiFixture' note = '' comments = ['note', 'Note', 'comment', 'Comment'] interface = '' # http url, like http://www.xxx.com:8080/path function = '' #path argCounts = 0 # initSetupFixture = [] #在测试运行前需要执行的测试构建的【构建名,构建参数】 preResultInfo = {} #前一次请求response的需要保存的结果信息 client = HttpClientUtil() #客户端请求 previousResp = None #前一次请求的response link = '' userdefinefixtureresult = None #测试执行过程中测试构建执行的测试结果信息 reqargs = {} #http请求参数 initInfoBeforeTest = {} def runTest(self, cell, a): Log.debug('start runTest: ' + self._CLASSNAME) try: if not self.expected: self.expected = a.parse(cell.text) except BaseException, e: Log.debug("testcaseid " + str(self.testCaseId)) Log.debug(e) self.expected = '' try: actualresult = a.get() #调用测试构建定义的方法 try: #self.needSavePreResults用于保存上次请求response需要保存的测试字段值,不同字段用“,”分割;可用正则或完成字段名, if hasattr(self, 'needSavePreResults') and self.needSavePreResults: self.preResultInfo = {} #clear上次保存的信息 self.savePreResultInfo(actualresult) Log.debug('preResultInfo', self.preResultInfo) except BaseException, e: Log.error('invoke savePreResultInfo error', e) if self.expected and actualresult: bresult, message = ResultCheck.checkResult(actualresult, self.expected) else: if actualresult and actualresult.find('error') < 0: bresult = 1 message = "expect result column is null, only output!\n" else: bresult = 0 message = "expect result column is null, maybe error!\n the url:%s \n" % self.url if bresult > 0: self.right(cell, message) elif bresult == 0: self.wrong(cell, message) else: self.output(cell, message) try: cell.text = cell.text + self.link except: cell.text = self.link
def getCookie(self, respInfo): cookie = '' try: if respInfo and 'Set-Cookie' in respInfo: cookie = respInfo['Set-Cookie'] except BaseException, e: Log.error("get cookie value error", e)
def strToDict(data): result = {} try: if data: result = eval(data) except BaseException, e: Log.error("strToDict exception, ", e) Log.error("data is, ", data)
def close(self): try: self.cur.close() self.conn.close() print "close db success!" except: Log.error("close fail!") print 'close db fail!'
def query(self, sql): results = None try: if self.cur.execute(sql): results = self.cur.fetchall() except: Log.error("query fail!") return results
def getneedSavePreResultkeyRe(self, value, key): restr = '' try: restr = '\\"' + key + '\\"' + ':' + '\\"' + value + '\\"' except: pass Log.debug('match re: ', restr) return restr
def openReport(self): Log.debug(self.reportNameList) for reportName in self.reportNameList: try: print reportName webbrowser.open_new(reportName) except BaseException, e: Log.exception(e)
def makeReportTail(self, counts, outputReport=None): if outputReport == None: outputReport = self.outputReport try: testResult = 'Test Result: ' + counts.toString() outputReport.write('<H2>' + testResult + '</H2>\n') except BaseException, e: Log.debug('counts') Log.debug(e)
def setUrl(self): #如果测试用例没有url列,或者为空,则用表头的url值 if not hasattr(self, 'url') or not self.url: if self.interface and self.function: self.url = self.interface + self.function else: Log.debug("self.interface or self.function is none") Log.debug("self.url", self.url) return self.url
def getValueFromRespByPattern(self, pattern, resp): result = '' temp = re.findall(pattern, resp, re.IGNORECASE) if temp: #获取匹配的第一个元组 try: result = temp[0].split(":")[-1]# except BaseException, e: Log.error('getValueFromRespByPattern: ', e)
def __init__(self, path): self.path = path print path try: self.doc = xml.dom.minidom.parse(path) except: Log.exception('can not create doc document object') print 'system exit' os._exit(0)
def fileUpload(self): reqargs = self.reqargs if 'filepath' in self.reqargs: self.requestMethod = 'upload' filepath = self.reqargs['filepath'] Log.debug("filepath:", filepath.decode('utf-8').encode('gb2312')) reqargs = filepath.decode('utf-8').encode('gb2312') # self.reqargs = filepath.decode('utf-8').encode('gb2312') self.client.referer = "http://pre.moojnn.com/datasource.html" return reqargs
def execute(self, sql): result = False try: result = self.cur.execute(sql) Log.debug("execute sql statement!", sql) print "execute sql success" result = True except Exception, e: print "Execute Fail!" Log.error("execute sql fail!", e)
def removeChildsOfNode(self, node): try: if node != None: nl = node._get_childNodes() for n in nl: if n.nodeType == xml.dom.Node.ELEMENT_NODE: node.removeChild(n) Log.debug('have successfully removed element') except BaseException, e: Log.exception(e)
def getStyle(self): result = '' try: place1 = self.tag.find('style') if place1 > -1: place2 = self.tag.find("'", place1 + 7) # get last "'" of the tag. style = '...' if place2 > -1: result = self.tag[place1:place2 + 1] except BaseException, e: Log.exception(e)
def getParamlist(self): Log.debug('start getParamlist: ' + self.clas) paramList = [] for param in self._typeDict: if param.lower().find('test') > -1 or param.lower().find('url') > -1 \ or param.lower().find('savepre') > -1: continue paramList.append(param) return paramList Log.debug('end getParamlist: ' + self.clas)
def getDynamicParamVlaue(self, paramvalue, fromWhDict): pattern = '%\\w+%' #动态参数 result = paramvalue try: valueList = re.findall(pattern, paramvalue, re.IGNORECASE) if len(valueList) > 0: keyvalue = valueList[0][1:-1] #去掉%% result = self.getValueFromRespByDict(keyvalue, fromWhDict) print keyvalue, result except BaseException, e: Log.error('getDynamicParamVlaue error:', e)
def convertExcelToHtml(self, filename, multipleInstanceFlag = False): convertExcelToHtml = None lShNames = None if multipleInstanceFlag: convertExcelToHtml = ConvertExcelOfMulInstanceToHtml() else: convertExcelToHtml = ConvertExcelToHtml() Log.debug('GlobalSetting.SHEETS:' + str(GlobalSetting.SHEETS)) if len(GlobalSetting.SHEETS) > 0: lShNames = GlobalSetting.SHEETS return convertExcelToHtml.convertExcelToHtml(filename, lShNames)
def getFixture(self, fixturePath): fixture = None temp = fixturePath.split('.') _CLASSNAME = temp[-1] if len(temp) == 1: fixturePath = 'fixture.' + _CLASSNAME try: exec 'import ' + fixturePath exec 'fixture = ' + fixturePath + '.' + _CLASSNAME + '()' except BaseException, e: Log.error('getFixture error:', e)
def doCells(self, rowpos, startcolumnPos, ncols, nrows=None): Log.debug('start: ExcelColumnFixture.doCells') rowdata = '' for col in range(startcolumnPos, self.posDict['methodPos'] + 1): text = self.excelAPP.getCellStrValue(rowpos, col) cell = self.getCellStrValue(text) self.doCell(cell, col) if cell.text == '' or cell.text == ' ': cell.text = ' ' # space rowdata = rowdata + cell.__str__() Log.debug('end: ExcelColumnFixture.doCells') return rowdata, rowpos + 1
def runSetupFixture(self): if self.setupFixture: fixturePath = self.setupFixture[0] fixtureParams = self.setupFixture[1] _CLASSNAME = fixturePath.split('.')[-1] try: exec 'import ' + _CLASSNAME exec 'fixture = ' + fixturePath + '.' + _CLASSNAME + '()' self.initBeforeTest = fixture.run(fixtureParams) except BaseException, e: Log.error(e) print e
def fliterParamlist(self): Log.debug('start fliterParamlist: ' + self._CLASSNAME) paramList = [] for param in self._typeDict: #如果包含_下划线,表明不是用作请求参数 if param.find('_') >0 or param.lower().find('url') > -1 \ or param.lower().find('savepre') > -1 or \ param.lower().find('fixture') > -1: continue paramList.append(param) return paramList Log.debug('end fliterParamlist: ' + self._CLASSNAME)
def getFixture(self): result = {} if hasattr(self, 'fixturename'): fixturepath = self.fixturename fixturepath = fixturepath.strip() fixturename = fixturepath.split('.')[-1] try: exec 'import ' + fixturepath # test class method exec 'execfixture = ' + fixturepath + '.' + fixturename + '()' result = execfixture.run() except BaseException, e: Log.error(e)
def saveRespDataToFile(self, respData): fileName = str(self.testCaseId + 'json.txt') path = LOGFIELPATH + self.curShName if not os.path.exists(LOGFIELPATH): os.mkdir(LOGFIELPATH) try: if not os.path.exists(path): os.mkdir(path) fileObject = open(path + os.sep + fileName, 'w') fileObject.write(respData) fileObject.close() except BaseException, e: Log.error("create jsontxt fail!", e)
def doCell(self, cell, column): Log.debug('start: ExcelColumnFixture.doCell') a = self.columnBindings[column] text = str(cell.text) if a.shouldIgnore(text) == 1: pass elif a.shouldIgnore(text) == 2: self.ignore(cell) elif a.field: a.set(a.parse(text)) elif a.method: self.check(cell, a) Log.debug('end: ExcelColumnFixture.doCell')