def __init__(self, input_list, parent=None): super(InputTable, self).__init__(parent) self.setColumnCount(7) header = ("Tag", "Id", "Name", "Class", "innerHTML", "Original Value", "Value") self.setHorizontalHeaderLabels(header) self.setRowCount(len(input_list)) self.horizontalHeader().setSectionResizeMode(1) self.rowcount = 0 # self.setStyleSheet("font-size:30px;") for inp in input_list: itemtype = QTableWidgetItem(inp["tag"]) itemid = QTableWidgetItem(inp["id"]) itemname = QTableWidgetItem(inp["name"]) itemhtml = QTableWidgetItem(inp["innerHTML"]) item_class = QTableWidgetItem(normalize_string(inp["class"])) item_original_value = QTableWidgetItem(inp["original_value"]) itemtype.setFlags(itemtype.flags() & ~PyQt5.QtCore.Qt.ItemIsEditable & ~PyQt5.QtCore.Qt.TextEditable) itemid.setFlags(itemtype.flags() & ~PyQt5.QtCore.Qt.ItemIsEditable & ~PyQt5.QtCore.Qt.TextEditable) itemname.setFlags(itemtype.flags() & ~PyQt5.QtCore.Qt.ItemIsEditable & ~PyQt5.QtCore.Qt.TextEditable) self.setItem(self.rowcount, 0, itemtype) self.setItem(self.rowcount, 1, itemid) self.setItem(self.rowcount, 2, itemname) self.setItem(self.rowcount, 3, item_class) self.setItem(self.rowcount, 4, itemhtml) self.setItem(self.rowcount, 5, item_original_value) self.setItem(self.rowcount, 6, QTableWidgetItem(inp["value"])) self.rowcount += 1
def execute_all_click(self): print("executed") description = "" from scraper import scraper scraper.browser = scraper.dive_plus(self.url, self.list_of_input) wait = WebDriverWait(scraper.browser, GlobalPreferences.setting["timeout"]) try: page_loaded = wait.until_not( lambda browser: browser.current_url == self.url) print("Page is ready!") cookies = scraper.browser.get_cookies() for cookie in cookies: print(cookie['name'], " : ", cookie['value']) scraper.session.cookies.set(cookie['name'], cookie['value']) # loginResult = scraper.scrape(self.expected["url_after"]) # self.browser_shower.setText(str(loginResult)) except TimeoutException: print("Timeout") description = "Timeout\n" finally: result = { "url_after": scraper.browser.current_url, "text_found": scraper.find_text( GlobalPreferences.setting["expected"]["text_after"]), "element_found": scraper.find_element( GlobalPreferences.setting["expected"]["element_after"]) } data = { "result": result, "expected": GlobalPreferences.setting["expected"], "id": str(get_uuid()), "date": get_today(), "title": "Skreepy", "description": description, "tester": GlobalPreferences.setting["tester"], "inputs": self.list_of_input, "master_test_id": normalize_string(util.get_uuid()) } if GlobalPreferences.setting["close_browser_after_test"]: scraper.browser.close() from ui.report_window import ReportWindow o = ReportWindow(800, 680, data=data, parent=self) o.setVisible(True)
def get_master_tests(self): sql = """ SELECT id,test_date,tester_name,test_title FROM master_tests """ self.open_connection() cursor = self.get_cursor() res = cursor.execute(sql) rows = res.fetchall() data_list = [] cursor.close() for row in rows: data = { "id": normalize_string(row[0]), "test_date": normalize_string(row[1]), "tester_name": normalize_string(row[2]), "test_title": normalize_string(row[3]) } data_list.append(data) self.close_connection() return data_list
def insert_inputs(self, test_id, input_data): sql = """ INSERT INTO test_inputs(id, test_id, tag, input_id,name,inner_html,original_value,value,class) VALUES (?,?,?,?,?,?,?,?,?) """ tuple_data = ( normalize_string(util.get_uuid()), normalize_string(test_id), normalize_string(input_data["tag"]), normalize_string(input_data["id"]), normalize_string(input_data["name"]), normalize_string(input_data["innerHTML"]), normalize_string(input_data["original_value"]), normalize_string(input_data["value"]), normalize_string(input_data["class"]), ) self.open_connection() cursor = self.get_cursor() cursor.execute(sql, tuple_data) self.commit() cursor.close() self.close_connection()
def insert_data(self, data): rowPosition = self.rowCount() self.insertRow(rowPosition) itemtype = QTableWidgetItem(data["tag"]) itemid = QTableWidgetItem(data["id"]) itemname = QTableWidgetItem(data["name"]) item_class = QTableWidgetItem(normalize_string(data["class"])) itemhtml = QTableWidgetItem(data["innerHTML"]) item_original_value = QTableWidgetItem(data["original_value"]) itemtype.setFlags(itemtype.flags() & ~PyQt5.QtCore.Qt.ItemIsEditable & ~PyQt5.QtCore.Qt.TextEditable) itemid.setFlags(itemtype.flags() & ~PyQt5.QtCore.Qt.ItemIsEditable & ~PyQt5.QtCore.Qt.TextEditable) itemname.setFlags(itemtype.flags() & ~PyQt5.QtCore.Qt.ItemIsEditable & ~PyQt5.QtCore.Qt.TextEditable) self.setItem(rowPosition, 0, itemtype) self.setItem(rowPosition, 1, itemid) self.setItem(rowPosition, 2, itemname) self.setItem(rowPosition, 3, item_class) self.setItem(rowPosition, 4, itemhtml) self.setItem(rowPosition, 5, item_original_value) self.setItem(rowPosition, 6, QTableWidgetItem(data["value"]))
def execute_alternate(self): input_combinations = Combination( self.list_of_input).get_result_reversed() master_data = [] master_id = normalize_string(util.get_uuid()) for com in input_combinations: description = "" scr = Scraper() browser = scr.dive_plus(self.url, com) wait = WebDriverWait(browser, GlobalPreferences.setting["timeout"]) try: page_loaded = wait.until_not( lambda browser: browser.current_url == self.url) except TimeoutException: print("Timeout") description = "Timeout\n" finally: result = { "url_after": browser.current_url, "text_found": scr.find_text_in_browser( GlobalPreferences.setting["expected"]["text_after"]), "element_found": scr.find_element_in_browser( GlobalPreferences.setting["expected"]["element_after"]) } data = { "result": result, "expected": GlobalPreferences.setting["expected"], "id": str(get_uuid()), "date": get_today(), "title": "Skreepy", "description": description, "tester": GlobalPreferences.setting["tester"], "inputs": com, "master_test_id": master_id } master_data.append(data) browser.close() MasterReportWindow(master_data, self).show()