def get_page_info(plan, app, device): if Setting.TimeModel == 'Limit': time_now = datetime.datetime.now() if (time_now - device.beginCrawlTime).seconds > (Setting.LimitTime * 60): Saver.save_crawler_log_both( plan.logPath, device.logPath, "Step : crawl time out , finish crawl.") del time_now return None Saver.save_crawler_log(device.logPath, "get all nodes in this page") page = PageInfo.Page() result = False time = 0 while not result: try: if time > 2: appController.click_back(device) get_uidump_xml_file(device) break get_uidump_xml_file(device) dom = xml.dom.minidom.parse(device.logPath + '/Uidump.xml') result = True except Exception, e: time += 1 print(str(e)) result = False
def init_application(plan, app, device): Saver.save_crawler_log_both(plan.logPath, device.logPath, "Step : init application") if Setting.RunInitNodes: appController.start_activity(device, app.packageName, app.launcherActivity) launcherPage = PageInfo.Page() while True: launcherPage = pageController.get_page_info(plan, app, device) if launcherPage.clickableNodesNum == 0: Saver.save_crawler_log_both(plan.logPath, device.logPath, 'scroll to left') appController.drag_screen_to_left(device) if launcherPage.clickableNodesNum != 0: Saver.save_crawler_log_both(plan.logPath, device.logPath, 'stop scroll') break Saver.save_crawler_log_both(plan.logPath, device.logPath, 'Step : init nodes run begin') crawl_init_nodes(plan, app, device, launcherPage) del launcherPage if Setting.RunInitCase: run_init_cases(plan, app, device) # when go in mainActivity, will add the nodes in MainActivity to device.unCrawledNodes # if crawl main Nodes , after start mainActivity, these nodes can't be added to the page, will get unCrawlable page device.unCrawledNodes = [] del plan, app, device
def crawl_init_nodes(plan, app, device, page_before_run): Saver.save_crawler_log_both(plan.logPath, device.logPath, "Step : run init nodes") device.update_uncrawled_nodes(page_before_run) if page_before_run.currentActivity != app.mainActivity or page_before_run.package != app.packageName: page_now = PageInfo.Page() if page_before_run.clickableNodesNum != 0: device.update_crawl_page(page_before_run.nodesInfoList) if page_before_run.clickableNodesNum > 0: page_now = crawl_clickable_nodes(plan, app, device, page_before_run, page_now, True) if page_before_run.longClickableNodesNum > 0: page_now = crawl_longclickable_nodes(plan, app, device, page_before_run, page_now, True) if page_before_run.editTextsNum > 0: page_now = crawl_edittext(plan, app, device, page_before_run, page_now, False) del plan, app, device, page_before_run return page_now else: Saver.save_crawler_log_both(plan.logPath, device.logPath, 'Is in ' + app.mainActivity) del plan, app, device return page_before_run
def get_page_info(plan, app, device): if Setting.TimeModel == 'Limit': time_now = datetime.datetime.now() if (time_now - device.beginCrawlTime).seconds > (Setting.LimitTime * 60): Saver.save_crawler_log_both( plan.logPath, device.logPath, "Step : crawl time out , finish crawl.") del plan, app, device, time_now return None Saver.save_crawler_log(device.logPath, "get all nodes in this page") page = PageInfo.Page() result = False t = 0 while not result: try: if t > 2: Saver.save_crawler_log( device.logPath, "get page error after 3 times , click back .") appController.click_back(device) time.sleep(1) get_uidump_xml_file(device) break get_uidump_xml_file(device) dom = xml.dom.minidom.parse(device.logPath + '/Uidump.xml') result = True except Exception as e: t += 1 print(str(e)) result = False try: root = dom.documentElement nodes = root.getElementsByTagName('node') Saver.save_crawler_log(device.logPath, len(nodes)) info = get_top_activity_info(device) for node in nodes: n = NodeInfo.Node(node) n.update_current_activity(info['activity']) if n.resource_id in app.firstClickViews: device.save_screen(n, False) appController.tap_node(device, n) page = get_page_info(plan, app, device) page.add_node(device, app, n) del node, n page = appController.close_sys_alert(plan, app, device, page) del result, dom, root, nodes, info, plan, app, device, t return page except Exception as e: print(str(e)) del plan, app, device, t return page
def crawl_main_nodes(plan, app, device, page_before_run): device.update_uncrawled_nodes(page_before_run) page_now = PageInfo.Page() if pageController.page_is_crawlable(app, device, page_before_run): device.update_crawl_page(page_before_run.nodesInfoList) if page_before_run.clickableNodesNum > 0: page_now = crawl_clickable_nodes(plan, app, device, page_before_run, page_now, False) if page_before_run.longClickableNodesNum > 0: page_now = crawl_longclickable_nodes(plan, app, device, page_before_run, page_now, False) if page_before_run.editTextsNum > 0: page_now = crawl_edittext(plan, app, device, page_before_run, page_now, False) del plan, app, device, page_before_run return page_now