def IsShowEntityModeStartPoint(logMessage): global indexOfCardInfo if GetIsShowEntityMode() == False: searchedLogMessage = re.search( "SHOW_ENTITY - Updating Entity=(.+?) CardID=(.+?)\n", logMessage) if searchedLogMessage != None: # LogManager.PrintLog("ShowEntityObserver", "IsShowEntityModeStartPoint", "entity detail: " + foundedEntityDetailResult[0], DefineManager.LOG_LEVEL_INFO) SetIsShowEntityMode(True) foundedResult = [ searchedLogMessage.group(1), searchedLogMessage.group(2) ] LogManager.PrintLog( "ShowEntityObserver", "IsShowEntityModeStartPoint", "entity: " + foundedResult[0] + " card: " + foundedResult[1], DefineManager.LOG_LEVEL_INFO) indexOfCardInfo["CARD_ID"] = foundedResult[1] return foundedResult else: LogManager.PrintLog("ShowEntityObserver", "IsShowEntityModeStartPoint", "Show entity mode already true", DefineManager.LOG_LEVEL_WARN) return []
def ExportDataArrayAsCSV(self, fileName="", saveKeyList=[]): LogManager.PrintLogMessage( "ExportDataManager", "ExportDataArrayAsCSV", "export data array size: " + str(self.dataArray.__len__()), DefineManager.LOG_LEVEL_INFO) try: fileManager = open(fileName, "w", encoding="utf-8", newline="") csvWriter = csv.writer(fileManager) for indexOfDataArray in self.dataArray: csvRow = [] for key in indexOfDataArray: if any(key in keyItem for keyItem in saveKeyList) == True: csvRow.append(indexOfDataArray[key]) csvWriter.writerow(csvRow) fileManager.close() LogManager.PrintLogMessage("ExportDataManager", "ExportDataArrayAsCSV", "exported done!", DefineManager.LOG_LEVEL_INFO) except: LogManager.PrintLogMessage("ExportDataManager", "ExportDataArrayAsCSV", "export failed", DefineManager.LOG_LEVEL_ERROR) return
def ParseFieldStatus(fieldData): fieldStatusPlayer1 = ["", "", "", "", "", "", "", ""] fieldStatusPlayer2 = ["", "", "", "", "", "", "", ""] fieldPrintFormat = "{0:>10} |{1:>10} |{2:>10} |{3:>10} |{4:>10} |{5:>10} |{6:>10} |{7:>10} |" for indexOfFieldNumber, indexOfCard in fieldData.iteritems(): if indexOfCard[0][3] != 0: playerNumber = int( indexOfCard[0][DefineManager.PLAYER_NUMBER_SAVED_POINT]) zonePosition = int(indexOfCard[0][3]) if playerNumber == 1: fieldStatusPlayer1[zonePosition] = indexOfCard[0][4] elif playerNumber == 2: fieldStatusPlayer2[zonePosition] = indexOfCard[0][4] else: LogManager.PrintLog("FieldObserver", "ParseFieldStatus", "unknown player", DefineManager.LOG_LEVEL_WARN) fieldOfPlayer1 = fieldPrintFormat.format(*fieldStatusPlayer1) fieldOfPlayer2 = fieldPrintFormat.format(*fieldStatusPlayer2) LogManager.PrintLog("FieldObserver", "ParseFieldStatus", "player1: " + fieldOfPlayer1, DefineManager.LOG_LEVEL_INFO) LogManager.PrintLog("FieldObserver", "ParseFieldStatus", "player2: " + fieldOfPlayer2, DefineManager.LOG_LEVEL_INFO) return [fieldStatusPlayer1, fieldStatusPlayer2]
def CrawlWorstYearPrice(self): try: webDriver = self.webCrawler.GetDriver() sideTab = webDriver.find_element_by_class_name( DefineManager.STOCK_SIDE_TAB_CLASS_NAME) investmentOpinionSection = sideTab.find_element_by_class_name( DefineManager.STOCK_INVESTMENT_OPINION_CLASS_NAME) investmentOpinionRow = investmentOpinionSection.find_elements_by_tag_name( DefineManager.TAG_TR)[ DefineManager.BEST_PRICE_OF_THE_YEAR_ROW_POINT] investmentOpinionCols = investmentOpinionRow.find_elements_by_tag_name( DefineManager.TAG_EM) worstPriceOfTheYear = investmentOpinionCols[ DefineManager.WORST_PRICE_OF_THE_YEAR_COL_POINT].text LogManager.PrintLogMessage( "CrawlBasicInfo", "CrawlWorstYearPrice", "crawl worst price of the year successfully: " + worstPriceOfTheYear, DefineManager.LOG_LEVEL_INFO) return worstPriceOfTheYear except: LogManager.PrintLogMessage("CrawlBasicInfo", "CrawlWorstYearPrice", "crawl worst price of the year failed", DefineManager.LOG_LEVEL_ERROR) return None
def CheckHideEntity(logMessage): foundedEntityGroup = re.search(" HIDE_ENTITY - Entity=(.+?) tag=(.+?) value=(.+?)\n", logMessage) if foundedEntityGroup != None: hideEntityData = [foundedEntityGroup.group(1), foundedEntityGroup.group(2), foundedEntityGroup.group(3)] # LogManager.PrintLog("HideEntityObserver", "CheckHideEntity", "entity: " + hideEntityData[0] + " tag: " + hideEntityData[1] + " value: " + hideEntityData[2], DefineManager.LOG_LEVEL_INFO) hideEntityDetail = re.search("name=(.+?) id=(.+?) zone=(.+?) zonePos=(.+?) cardId=(.+?) player=(.+?)]", hideEntityData[0]) if hideEntityDetail != None: hideEntityData[0] = [hideEntityDetail.group(1), hideEntityDetail.group(2), hideEntityDetail.group(3), hideEntityDetail.group(4), hideEntityDetail.group(5), hideEntityDetail.group(6)] # LogManager.PrintLog("HideEntityObserver", "CheckHideEntity", # "entity: " + hideEntityData[0] + " tag: " + hideEntityData[1] + " value: " + # hideEntityData[2], DefineManager.LOG_LEVEL_INFO) LogManager.PrintLog("HideEntityObserver", "CheckHideEntity", "entity: " + " ".join(hideEntityData[0]) + " tag: " + hideEntityData[1] + " value: " + hideEntityData[2], DefineManager.LOG_LEVEL_INFO) if hideEntityData[1] == "ZONE": LogManager.PrintLog("HideEntityObserver", "CheckHideEntity", "Card state " + hideEntityData[0][2] + " -> " + hideEntityData[2], DefineManager.LOG_LEVEL_INFO) return hideEntityData else: LogManager.PrintLog("HideEntityObserver", "CheckHideEntity", "Unkown tag", DefineManager.LOG_LEVEL_WARN) return None else: LogManager.PrintLog("HideEntityObserver", "CheckHideEntity", "wrong entity accepted", DefineManager.LOG_LEVEL_WARN) return None
def CrawlPriceChangedPercent(self): try: webDriver = self.webCrawler.GetDriver() stockElements = webDriver.find_element_by_class_name( DefineManager.STOCK_PRICE_ELEMENTS_CLASS_NAME) priceChangedPercent = stockElements.find_elements_by_tag_name( DefineManager.TAG_EM)[ DefineManager.CHANGED_PRICE_PERCENT_SAVED_POINT] priceChangedPercentNumberElements = priceChangedPercent.find_elements_by_tag_name( DefineManager.TAG_SPAN) priceChangedPercentStr = "" for indexOfElement in priceChangedPercentNumberElements: priceChangedPercentStr = priceChangedPercentStr + indexOfElement.text LogManager.PrintLogMessage( "CrawlBasicInfo", "CrawlPriceChangedPercent", "crawl price changed percent successfully: " + priceChangedPercentStr, DefineManager.LOG_LEVEL_INFO) return priceChangedPercentStr except: LogManager.PrintLogMessage("CrawlBasicInfo", "CrawlPriceChangedPercent", "crawl price changed percent failed", DefineManager.LOG_LEVEL_ERROR) return None
def CrawlStockPrice(self): try: webDriver = self.webCrawler.GetDriver() stockElements = webDriver.find_element_by_class_name( DefineManager.STOCK_PRICE_ELEMENTS_CLASS_NAME) stockPrice = stockElements.find_element_by_class_name( DefineManager.STOCK_NUMBER_CLASS_NAME) stockPriceNumberElements = stockPrice.find_elements_by_tag_name( DefineManager.TAG_SPAN) stockPriceStr = "" for indexOfSpanNumber in stockPriceNumberElements: stockPriceStr = stockPriceStr + indexOfSpanNumber.text LogManager.PrintLogMessage( "CrawlBasicInfo", "CrawlStockPrice", "crawl stock price successfully: " + stockPriceStr, DefineManager.LOG_LEVEL_INFO) return stockPriceStr except: LogManager.PrintLogMessage("CrawlBasicInfo", "CrawlStockPrice", "crawl stock price failed", DefineManager.LOG_LEVEL_ERROR) return None
def CrawlHighestStockPrice(self): try: webDriver = self.webCrawler.GetDriver() highLowPriceTable = webDriver.find_element_by_class_name( DefineManager.STOCK_HIGH_LOW_PRICE_INFO_TABLE) highPriceTableRow = highLowPriceTable.find_elements_by_tag_name( DefineManager.TAG_TR)[ DefineManager.HIGHEST_PRICE_SAVED_ROW_POINT] highPriceTableCol = highPriceTableRow.find_elements_by_tag_name( DefineManager.TAG_TD)[ DefineManager.HIGHEST_PRICE_SAVED_COL_POINT] highPrice = highPriceTableCol.find_element_by_class_name( DefineManager.STOCK_HIGH_NUMBER_CLASS_NAME) highPriceNumberElements = highPrice.find_elements_by_tag_name( DefineManager.TAG_SPAN) highPriceStr = "" for indexOfSpanNumber in highPriceNumberElements: highPriceStr = highPriceStr + indexOfSpanNumber.text LogManager.PrintLogMessage( "CrawlBasicInfo", "CrawlHighestStockPrice", "crawl highest stock price successfully: " + highPriceStr, DefineManager.LOG_LEVEL_INFO) return highPriceStr except: LogManager.PrintLogMessage("CrawlBasicInfo", "CrawlStockHighestPrice", "crawl highest stock price failed", DefineManager.LOG_LEVEL_ERROR) return None
def StaticLoader(targetFilePath = DefineManager.DEFAULT_LOG_FILE_SAVED_PATH): LogManager.PrintLog("FileIO", "StaticLoader", "Load file path: " + targetFilePath, DefineManager.LOG_LEVEL_INFO) hearthStoneLogFile = open(targetFilePath) DeckObserver.GameObservingInit() while True: logMessage = hearthStoneLogFile.readline() # if ExceptionManager.DetectOutOfLog(logMessage): # DirectoryManager.FindLatestLogFile() # DirectoryManager.MakeNewDirectory(DefineManager.DEFAULT_HEARTH_STONE_LOG_FILES_PATH, "test") # DirectoryManager.MoveFileToDirectory(DefineManager.DEFAULT_HEARTH_STONE_LOG_FILES_PATH, # "hearthstone_2017_07_17_11_17_09.log", # "hearthstone_2017_07_17_11_17_09") if not logMessage: LogManager.PrintLog("FileIO", "StaticLoader", "File read process ended", DefineManager.LOG_LEVEL_INFO) break else: # LogManager.PrintLog("FileIO", "StaticLoader", logMessage, DefineManager.LOG_LEVEL_INFO) # DeckObserver.IsShowEntityModeStartPoint(logMessage) DeckObserver.ParseShowEntity(logMessage, targetFilePath)
def DetectTurns(logMessage): global fieldCardsInfo tagChangeEntityData = re.search( "TAG_CHANGE Entity=(.+?) tag=STEP value=(.+?) \n", logMessage) if tagChangeEntityData != None: LogManager.PrintLog( "TagChangeEntityObserver", "DetectTurns", "entity: " + tagChangeEntityData.group(1) + " value: " + tagChangeEntityData.group(2), DefineManager.LOG_LEVEL_INFO) if tagChangeEntityData.group(2) == "MAIN_READY": fieldCardsInfo = {} LogManager.PrintLog("TagChangeEntityObserver", "DetectTurns", "Print field status", DefineManager.LOG_LEVEL_INFO) elif tagChangeEntityData.group(2) == "MAIN_START": AdvancedPrintManager.PrintFieldStatus(fieldCardsInfo) FieldObserver.FieldMainObserver(fieldCardsInfo) LogManager.PrintLog("TagChangeEntityObserver", "DetectTurns", "MAIN_START_TRIGGERS", DefineManager.LOG_LEVEL_INFO) else: LogManager.PrintLog("TagChangeEntityObserver", "DetectTurns", "not rdy tag value", DefineManager.LOG_LEVEL_WARN) else: return
def StartCrawl(self): LogManager.PrintLogMessage("CrawlRoutineManager", "StartCrawl", "crawl data", DefineManager.LOG_LEVEL_INFO) crawlDataDic = {} crawlBasicInfo = CrawlBasicInfo.CrawlBasicInfo(self.webCrawler, self.targetUrl) crawlDataDic["Name"] = crawlBasicInfo.CrawlCompanyName() or "" crawlDataDic["Code"] = crawlBasicInfo.CrawlCompanyStockCode() or "" crawlDataDic["Price"] = crawlBasicInfo.CrawlStockPrice() or "" crawlDataDic["D_PRH"] = crawlBasicInfo.CrawlHighestStockPrice() or "" crawlDataDic["D_PRL"] = crawlBasicInfo.CrawlLowestStockPrice() or "" crawlDataDic["Y_PRH"] = crawlBasicInfo.CrawlBestYearPrice() or "" crawlDataDic["Y_PRL"] = crawlBasicInfo.CrawlWorstYearPrice() or "" crawlDataDic["D_IV"] = crawlBasicInfo.CrawlDividendYield() or "" crawlDataDic["Change"] = crawlBasicInfo.CrawlPriceChangedPercent() or "" crawlDataDic["Value"] = crawlBasicInfo.CrawlMarketCapitalization() or "" crawlDataDic["Beta"] = crawlBasicInfo.CrawlYearBeta() or "" crawlDataDic["PER"] = crawlBasicInfo.CrawlPER() or "" crawlDataDic["PBR"] = crawlBasicInfo.CrawlPBR() or "" crawlDataDic["EPS"] = crawlBasicInfo.CrawlEPS() or "" crawlDetailInfo = CrawlDetailInfo.CrawlDetailInfo(self.webCrawler, self.targetDetailUrl) crawlDataDic["SALEQ2"] = crawlDetailInfo.Crawl3YearsBeforeSale() or "" crawlDataDic["SALEQ1"] = crawlDetailInfo.Crawl2YearsBeforeSale() or "" crawlDataDic["SALEQ0"] = crawlDetailInfo.Crawl1YearsBeforeSale() or "" crawlDataDic["NIQ2"] = crawlDetailInfo.Crawl3YearsBeforeNetIncome() or "" crawlDataDic["NIQ1"] = crawlDetailInfo.Crawl2YearsBeforeNetIncome() or "" crawlDataDic["NIQ0"] = crawlDetailInfo.Crawl1YearsBeforeNetIncome() or "" crawlDataDic["ACT"] = crawlDetailInfo.CrawlActQ3() or "" crawlDataDic["DPT"] = crawlDetailInfo.CrawlDptQ3() or "" crawlDataDic["CAP"] = crawlDetailInfo.CrawlCapQ3() or "" for key in crawlDataDic: LogManager.PrintLogMessage("CrawlRoutineManager", "StartCrawl", "" + key + ": " + crawlDataDic[key], DefineManager.LOG_LEVEL_DEBUG) self.crawlDataArray.append(crawlDataDic) LogManager.PrintLogMessage("CrawlRoutineManager", "StartCrawl", "saved crawl data size: " + str(self.crawlDataArray.__len__()), DefineManager.LOG_LEVEL_INFO)
def CloseDriver(self): LogManager.PrintLogMessage("WebCrawler", "CloseDriver", "close chrome browser", DefineManager.LOG_LEVEL_INFO) try: self.driver.quit() self.driverStatus = False except: LogManager.PrintLogMessage("WebCrawler", "CloseDriver", "cannot close chrome browser", DefineManager.LOG_LEVEL_ERROR) self.driverStatus = True
def RunCrawling(self): LogManager.PrintLogMessage("CrawlRoutineManager", "RunCrawling", "running company stock price crawling", DefineManager.LOG_LEVEL_INFO) for indexOfCompanyCode in self.companyCodes: companyCode = indexOfCompanyCode self.targetUrl = "http://finance.naver.com/item/main.nhn?code=" + companyCode self.targetDetailUrl = "http://finance.naver.com/item/coinfo.nhn?code=" + companyCode + "&target=finsum_more" LogManager.PrintLogMessage("CrawlRoutineManager", "RunCrawling", "start crawling company: " + companyCode, DefineManager.LOG_LEVEL_INFO) self.StartCrawl()
def TakePicture(self, url): if self.driverStatus == False: LogManager.PrintLogMessage("WebCrawler", "TakePicture", "chrome browser not working", DefineManager.LOG_LEVEL_WARN) return False if self.SetDriverUrl(url) == False: return False LogManager.PrintLogMessage("WebCrawler", "TakePicture", "taking shot screen url: " + url, DefineManager.LOG_LEVEL_INFO) self.driver.get_screenshot_as_file("../Src/test.png") return True
def SetDriverUrl(self, url): LogManager.PrintLogMessage("WebCrawler", "SetDriverUrl", "moving on " + url, DefineManager.LOG_LEVEL_INFO) try: self.driver.get(url) self.driver.implicitly_wait(3) return True except: LogManager.PrintLogMessage("WebCrawler", "SetDriverUrl", "connection failed " + url, DefineManager.LOG_LEVEL_ERROR) return False
def SwitchToFrame(self, frameTarget): try: self.driver.switch_to.frame(frameTarget) LogManager.PrintLogMessage("WebCrawler", "SwitchToFrame", "frame switched", DefineManager.LOG_LEVEL_INFO) return self.driver except: LogManager.PrintLogMessage("WebCrawler", "SwitchToFrame", "frame not switched", DefineManager.LOG_LEVEL_ERROR) return None
def SwitchToDefault(self): try: self.driver.switch_to.default_content() LogManager.PrintLogMessage("WebCrawler", "SwitchToDefault", "frame switched", DefineManager.LOG_LEVEL_INFO) return self.driver except: LogManager.PrintLogMessage("WebCrawler", "SwitchToDefault", "frame not switched", DefineManager.LOG_LEVEL_ERROR) return None
def __init__(self): LogManager.PrintLogMessage("WebCrawler", "__init__", "open chrome browser", DefineManager.LOG_LEVEL_INFO) try: options = webdriver.ChromeOptions() # options.add_argument('headless') options.add_argument('window-size=1920x1080') # options.add_argument("disable-gpu") self.driver = webdriver.Chrome(chrome_options=options) self.driverStatus = True except: LogManager.PrintLogMessage("WebCrawler", "__init__", "cannot open chrome browser", DefineManager.LOG_LEVEL_ERROR) self.driverStatus = False
def ClickElement(self, clickTarget): LogManager.PrintLogMessage("WebCrawler", "ClickElement", "try to click target", DefineManager.LOG_LEVEL_INFO) try: clickTarget.click() self.driver.implicitly_wait(DefineManager.DELAY) LogManager.PrintLogMessage("WebCrawler", "ClickElement", "target clicked", DefineManager.LOG_LEVEL_INFO) except: LogManager.PrintLogMessage("WebCrawler", "ClickElement", "cannot click target", DefineManager.LOG_LEVEL_ERROR)
def CalculateProcess(fieldData): LogManager.PrintLog("FieldHelper", "CalculateProcess", "player field data accepted", DefineManager.LOG_LEVEL_INFO) BestCardSwap(fieldData, 0) BestCardSwap(fieldData, 1) return
def __init__(self, targetUrl = "", targetDetailUrl = ""): self.targetUrl = targetUrl self.targetDetailUrl = targetDetailUrl self.companyCodes = [] self.crawlDataArray = [] LogManager.PrintLogMessage("CrawlRoutineManager", "__init__", "init routine manager", DefineManager.LOG_LEVEL_INFO) return
def FindSquareObjectFromContourData(contourDatas): for indexOfContour in contourDatas: peri = cv2.arcLength(indexOfContour, True) approx = cv2.approxPolyDP(indexOfContour, 0.02 * peri, True) if len(approx) == Setting.DefineManager.SQUARE_CORNER_NUM: squareContourData = approx LogManager.PrintLog("ObjectDetect", "FindSquareObjectFromContourData", "Square Contour Data Founded", DefineManager.LOG_LEVEL_INFO) return squareContourData LogManager.PrintLog("ObjectDetect", "FindSquareObjectFromContourData", "Square Contour Data Not Founded", DefineManager.LOG_LEVEL_WARN) return None
def CrawlCompanyName(self): try: webDriver = self.webCrawler.GetDriver() companyElements = webDriver.find_element_by_class_name( DefineManager.COMPANY_INFO_ELEMENTS_CLASS_NAME) companyName = companyElements.find_element_by_tag_name("a").text LogManager.PrintLogMessage( "CrawlBasicInfo", "CrawlCompanyName", "crawl company name successfully: " + companyName, DefineManager.LOG_LEVEL_INFO) return companyName except: LogManager.PrintLogMessage("CrawlBasicInfo", "CrawlCompanyName", "crawl company name failed", DefineManager.LOG_LEVEL_ERROR) return None
def __init__(self, webCrawler, crawlUrl): self.webCrawler = webCrawler self.crawlUrl = crawlUrl urlStatus = str(self.webCrawler.SetDriverUrl(crawlUrl)) crawlerStatus = str(self.webCrawler.GetDriverStatus()) msg = "web driver status: " + crawlerStatus + " url status: " + urlStatus LogManager.PrintLogMessage("CrawlDetailInfo", "__init__", msg, DefineManager.LOG_LEVEL_INFO)
def RealtimeLoader(targetFilePath = DefineManager.DEFAULT_HEARTH_STONE_LOG_FILES_PATH): targetFilePath = DirectoryManager.FindLatestLogFile(targetFilePath) LogManager.PrintLog("FileIO", "RealtimeLoader", "Load file path: " + targetFilePath, DefineManager.LOG_LEVEL_INFO) hearthStoneLogFile = subprocess.Popen(['tail', '-F', targetFilePath], stdout = subprocess.PIPE, stderr = subprocess.PIPE) while True: logMessage = hearthStoneLogFile.stdout.readline() if not logMessage: LogManager.PrintLog("FileIO", "RealtimeLoader", "File read process ended", DefineManager.LOG_LEVEL_INFO) break else: # LogManager.PrintLog("FileIO", "RealtimeLoader", logMessage, DefineManager.LOG_LEVEL_INFO) DeckObserver.ParseShowEntity(logMessage, targetFilePath)
def GetBestSwap(playerNumber): global maxScore global bestCardSwap defenderNumber = (playerNumber + 1) % 2 cardPlayerField = ', '.join(str(e) for e in bestCardSwap[playerNumber]) cardDefenderField = ', '.join(str(e) for e in bestCardSwap[defenderNumber]) LogManager.PrintLog( "FieldHelper", "GetBestSwap", "max score: " + str(maxScore) + "player: " + cardPlayerField + "\ndefender: " + cardDefenderField, DefineManager.LOG_LEVEL_INFO)
def CrawlEPS(self): try: webDriver = self.webCrawler.GetDriver() tabSubMenu = webDriver.find_element_by_class_name( DefineManager.STOCK_TAB_SUB_MENUS_CLASS_NAME) menuItems = tabSubMenu.find_elements_by_tag_name( DefineManager.TAG_A) self.webCrawler.ClickElement( menuItems[DefineManager.ITEM_ANALYSIS_POINT]) subHtmlIframe = webDriver.find_element_by_id("coinfo_cp") webDriver = self.webCrawler.SwitchToFrame(subHtmlIframe) fundamentalTable = webDriver.find_element_by_class_name( DefineManager.FUNDAMENTAL_TABLE_CLASS_NAME) fundamentalRows = fundamentalTable.find_elements_by_tag_name( DefineManager.TAG_TR) fundamentalEpsRow = fundamentalRows[ DefineManager.FUNDAMENTAL_EPS_ROW_POINT] fundamentalEpsStr = fundamentalEpsRow.find_elements_by_tag_name( DefineManager.TAG_TD)[DefineManager.TABLE_RIGHT_SIDE].text LogManager.PrintLogMessage( "CrawlBasicInfo", "CrawlPBR", "crawl EPS successfully: " + fundamentalEpsStr, DefineManager.LOG_LEVEL_INFO) webDriver = self.webCrawler.SwitchToDefault() tabSubMenu = webDriver.find_element_by_class_name( DefineManager.STOCK_TAB_SUB_MENUS_CLASS_NAME) menuItems = tabSubMenu.find_elements_by_tag_name( DefineManager.TAG_A) self.webCrawler.ClickElement( menuItems[DefineManager.TOTAL_INFO_POINT]) return fundamentalEpsStr except: LogManager.PrintLogMessage("CrawlBasicInfo", "CrawlPBR", "crawl EPS failed", DefineManager.LOG_LEVEL_ERROR) return None
def DetectGameStatus(logMessage): tagChangeEntityData = re.search( "TAG_CHANGE Entity=(.+?) tag=PLAYSTATE value=(.+?)\n", logMessage) if tagChangeEntityData != None: LogManager.PrintLog( "TagChangeEntityObserver", "DetectGameStatus", "entity: " + tagChangeEntityData.group(1) + " value: " + tagChangeEntityData.group(2), DefineManager.LOG_LEVEL_INFO) return [tagChangeEntityData.group(1), tagChangeEntityData.group(2)] return None
def IsGameComplete(logMessage): checkedStatusValue = DetectGameStatus(logMessage) if checkedStatusValue != None: if checkedStatusValue[1] == "LOST" or checkedStatusValue[1] == "WON": LogManager.PrintLog( "TagChangeEntityObserver", "IsGameComplete", "End of Game, Player " + " ".join(checkedStatusValue), DefineManager.LOG_LEVEL_INFO) return True return False
def CrawlMarketCapitalization(self): try: webDriver = self.webCrawler.GetDriver() marketCapitalizationTable = webDriver.find_element_by_class_name( DefineManager.MARKET_CAPITALIZATION_CLASS_NAME) marketCapitalizationStr = marketCapitalizationTable.find_element_by_tag_name( DefineManager.TAG_TD).text LogManager.PrintLogMessage( "CrawlBasicInfo", "CrawlMarketCapitalization", "crawl market capitalization successfully: " + marketCapitalizationStr, DefineManager.LOG_LEVEL_INFO) return marketCapitalizationStr except: LogManager.PrintLogMessage("CrawlBasicInfo", "CrawlMarketCapitalization", "crawl market capitalization failed", DefineManager.LOG_LEVEL_ERROR) return None