def runGraph(self): controller = ControllerManager().getController(self.controllerKey) self.uuidIndex = {node.uuid: node for node in controller.nodes} data = controller.getData() config_data = single_file_export(data) if self.process and self.process.is_alive(): self.killChildProcess() # 开启爬虫子进程 from run import crawl self.parent_conn.send(self.spinBoxValue) # 模块间隔时间 self.process = multiprocessing.Process(target=crawl, args=(config_data, self.child_conn)) self.startSlot() # 初始设置 self.process.start()
def runGraph(self): controller = ControllerManager().getController(self.controllerKey) data = controller.getData() config_data = single_file_export(data) # self.thread = CrawlThread(config_data) # 创建线程 # self.thread.started.connect(lambda: print('=========== Starting Crawl ==========')) # self.thread.start() # self.thread.finished.connect(lambda: print("============ Done ================")) # 开启爬虫子 from crawler_graph.crawler import crawl import multiprocessing try: process = multiprocessing.Process(target=crawl, args=(config_data, )) process.start() except Exception as e: print(f'Crawler Error: {e}') # 开启爬虫子进程 obj_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'crawler_graph', 'TestOne.py') # obj_file = resource_path(obj_file) self.process = QProcess(self) self.process.readyReadStandardOutput.connect(self.stdoutReady) self.process.readyReadStandardError.connect(self.stderrReady) # start_time = datetime.now() self.process.started.connect( lambda: print('********* Started! **********')) self.process.finished.connect( lambda: print('********** Finished! *** Timer: {} *********'. format(datetime.now() - start_time))) self.process.start('python', [obj_file, str(config_data)])
def getScriptJsonData(self): controller = ControllerManager().getController(self.controllerKey) data = controller.getData() return data