def test_tasks_finished_some_failed(self): # Arrange tasks = [ Task(1, "", [""], None, "", ""), Task(2, "", [""], None, "", ""), Task(3, "", [""], None, "", ""), Task(4, "", [""], None, "", "") ] tasks[0].message_type = TaskMessageType.TASK_PROCESSED tasks[1].message_type = TaskMessageType.TASK_FAILED tasks[2].message_type = TaskMessageType.TASK_FAILED tasks[3].message_type = TaskMessageType.TASK_PROCESSED for task in tasks: task.job_id = 1234 connected_task = ConnectedTask(task, "") self.task_manager.in_progress[task.task_id] = connected_task # Act self.task_manager.tasks_finished(tasks) # Assert assert self.task_manager.finished_tasks.qsize() == 2 assert self.task_manager.status_manager.status.num_tasks_done == 2 assert not self.task_manager.status_manager.is_job_done() assert len(self.task_manager.in_progress) == 0 assert self.task_manager.available_tasks.qsize() == 2 assert self.task_manager.available_tasks.get() == tasks[1] assert self.task_manager.available_tasks.get() == tasks[2]
def add_new_available_task(self, task: Task, job_id: int): """ Adds the task to the Available Tasks Queue and attaches job id to them :param task: :param job_id: :return: """ task.set_job(job_id) task.set_message_type(TaskMessageType.TASK_RAW) self.available_tasks.put(task) logger.log_trace(f'{self.log_prefix}New Available Task {task.task_id}')
def test_task_finished_queue(self): # Arrange task = Task(1, "", [""], None, "", "") task.job_id = 1234 task.message_type = TaskMessageType.TASK_PROCESSED connected_task = ConnectedTask(task, "") self.task_manager.in_progress[task.task_id] = connected_task # Act self.task_manager.task_finished(task) # Assert assert self.task_manager.finished_tasks.qsize() == 1 assert self.task_manager.finished_tasks.get() == task
def test_new_available_tasks(self): # Arrange new_tasks = [ Task(1, "", [""], None, "", ""), Task(2, "", [""], None, "", "") ] for task in new_tasks: task.job_id = 1234 # Act self.task_manager.add_new_available_tasks(new_tasks, 1234) # Assert assert self.task_manager.available_tasks.qsize() == 2 assert self.task_manager.available_tasks.get() == new_tasks[0] assert self.task_manager.available_tasks.get() == new_tasks[1]
def get_tasks(job_id: int, num_tasks: int): """ fetch task from the queue return this task "formatted" back to slave :param job_id: Integer :param num_tasks: Integer :return Any: """ try: conn_id = request.cookies.get('id') job_check(job_id) tasks: List[Task] = self.task_manager.connect_available_tasks( num_tasks, conn_id) pickled_tasks = pickle_dumps(tasks) compressed_data = compress(pickled_tasks) return create_binary_resp(compressed_data, f'tasks_job_{self.job.job_id}') except NoMoreTasks: if self.status_manager.is_job_done(): job_finished_task = Task(-1, "", [], None, "", "") job_finished_task.set_message_type(TaskMessageType.JOB_END) pickled_tasks = pickle_dumps([job_finished_task]) compressed_data = compress(pickled_tasks) return create_binary_resp(compressed_data, f'job_{self.job.job_id}_done') logger.log_error('Unable to retrieve tasks from manager') return Response(status=500) except JobNotInitialized: return Response(response="Job Not Initialized", status=403) except WrongJob: return Response(response="Wrong Master", status=403) except PicklingError as error: logger.log_error(f'Unable to pickle tasks\n{error}') return Response(status=500) except CompressionException as error: logger.log_error(f'Unable to compress pickled tasks\n{error}') return Response(status=500) except Exception as error: logger.log_error(f'{type(error)} {error}') return Response(status=501)
def start_task(): try: task = Task.from_request(request) except (BadRequestError, TooEarlyError) as e: return e.message, e.code worker_class = workers.find(task.worker_class) worker_params = task.worker_params.copy() for setting in worker_class.GLOBAL_SETTINGS: worker_params[setting] = task.general_settings[setting] worker = worker_class(worker_params, task.pipeline_id, task.job_id) try: workers_to_enqueue = worker.execute() except WorkerException as e: worker.log_error('Execution failed: %s: %s', e.__class__.__name__, e) result = Result(task.name, task.job_id, False) result.report() except Exception as e: # pylint: disable=broad-except worker.log_error('Unexpected error %s', format_exc()) if task.attempts < worker.MAX_ATTEMPTS: task.reenqueue() else: worker.log_error('Giving up after %i attempt(s)', task.attempts) result = Result(task.name, task.job_id, False) result.report() else: result = Result(task.name, task.job_id, True, workers_to_enqueue) result.report() return 'OK', 200
def GetTaskFile(self, request, context): request_task = request.task task: Task = Task(task_id=request_task.task_id, name=request_task.name, create_time=request_task.create_time, start_time=request_task.start_time, end_time=request_task.end_time, union_train=request_task.union_train, edge_nodes=request_task.edge_nodes, file=request_task.file, status=0) if request_task.task_id == 0: return task_runtime_pb2.GetTaskFileResp( resp=task_runtime_pb2.Response( code=10001, message="task id can't be empty"), script=b'', config=b'') file_path = get_script_path(task) file = open(file_path, 'rb') file_bytes = file.read() file.close() return task_runtime_pb2.GetTaskFileResp(resp=task_runtime_pb2.Response( code=0, message='success'), script=file_bytes, config=b'')
def main(): times_per_minute = int(sys.argv[1]) # This is a really quick fix. You should not create more than 1 client for i in range(times_per_minute): client = Client(address) sub_cmd_joined = sys.argv[2] sub_cmd = sub_cmd_joined.split("|") operation_str = sys.argv[3] binary = sys.argv[4] run_measurement = run_scamper if binary == SCAMPER_BINARY else run_dig start = time.time() task = Task(str(uuid.uuid4())) run_measurement(task, sub_cmd) time.sleep(max(60 / times_per_minute - int(time.time() - start), 0)) end_task(operation_str, task, client) client.close()
def start(self): data = self.communicator.read_transmit() while data[0] != STOP: operation_data = data[1] task_data = data[2] task = Task(task_data["code"]) operation = Operation( operation_data["id"], operation_data["params"], operation_data["credits"], operation_data["cron"], operation_data["times_per_minute"], operation_data["stop_time"], operation_data["binary"]) print("Got finished task: ", task.code, " for operation: ", operation.id) def ack(operation_id): # If operation was successfully saved in the server if operation_id == operation.id: print("Successfully sent task: ", task.code, " for operation: ", operation.id) self.communicator.sent_task(operation, task) # Send operation to server self.send_results(operation, task, ack) data = self.communicator.read_transmit() print("Transmit manager ending its work...")
def StopTask(self, request, context): self.db.stop_task( Task( task_id=request.task_id, end_time=request.stop_time, )) return data_manager_pb2.StopTaskResp( resp=data_manager_pb2.Response(code=0, message="success"))
def test_new_available_task(self): # Arrange new_task = Task(1, "", [""], None, "", "") # Act self.task_manager.add_new_available_task(new_task, 1234) # Assert assert self.task_manager.available_tasks.qsize() == 1 assert self.task_manager.available_tasks.get() == new_task
def test_task_finished_in_progress(self): # Arrange finished_task = Task(1, "", [""], None, "", "") task = Task(2, "", [""], None, "", "") self.task_manager.add_new_available_task(finished_task, 1234) self.task_manager.add_new_available_task(task, 1234) self.task_manager.connect_available_tasks(2, "") finished_task.message_type = TaskMessageType.TASK_PROCESSED # Act self.task_manager.task_finished(finished_task) # Assert assert len(self.task_manager.in_progress) == 1 assert self.task_manager.in_progress[task.task_id].task == task assert self.task_manager.finished_tasks.qsize() == 1 assert self.task_manager.finished_tasks.get() == finished_task assert self.task_manager.status_manager.status.num_tasks_done == 1 assert not self.task_manager.status_manager.is_job_done()
def test_connect_available_task_no_available_tasks(self): # Arrange connected_task = ConnectedTask(Task(1, "", [""], None, "", ""), "") self.task_manager.in_progress[ connected_task.task.task_id] = connected_task # Act & Assert with pytest.raises(NoMoreAvailableTasks): assert self.task_manager.connect_available_task("")
def test_tasks_finished(self): # Arrange finished_tasks = [ Task(1, "", [""], None, "", ""), Task(2, "", [""], None, "", ""), Task(3, "", [""], None, "", "") ] for task in finished_tasks: task.job_id = 1234 task.message_type = TaskMessageType.TASK_PROCESSED connected_task = ConnectedTask(task, "") self.task_manager.in_progress[task.task_id] = connected_task # Act self.task_manager.tasks_finished(finished_tasks) # Assert assert self.task_manager.finished_tasks.qsize() == 3 assert self.task_manager.status_manager.status.num_tasks_done == 3 assert self.task_manager.status_manager.is_job_done()
def test_flush_finished_tasks(self): # Arrange tasks = [ Task(1, "", [""], None, "", ""), Task(2, "", [""], None, "", ""), Task(3, "", [""], None, "", "") ] for task in tasks: task.set_job(1234) task.message_type = TaskMessageType.TASK_PROCESSED connected_task = ConnectedTask(task, "") self.task_manager.in_progress[task.task_id] = connected_task finished_tasks = tasks.copy() self.task_manager.tasks_finished(finished_tasks) # Act flushed = self.task_manager.flush_finished_tasks() # Assert assert flushed == tasks
def FinishTask(self, request, context): print("finish task") self.db.finish_task( Task( task_id=request.task_id, end_time=request.finish_time, )) return data_manager_pb2.FinishTaskResp( resp=data_manager_pb2.Response(code=0, message="success"))
def StartTask(self, request, context): print("start task:" + str(request.task_id)) print("time:" + str(request.start_time)) self.db.start_task( Task( task_id=request.task_id, start_time=request.start_time, )) return data_manager_pb2.StartTaskResp( resp=data_manager_pb2.Response(code=0, message="success"))
def test_connect_available_tasks_less_than_available(self): # Arrange new_task_1 = Task(1, "", [""], None, "", "") new_task_2 = Task(2, "", [""], None, "", "") self.task_manager.add_new_available_task(new_task_1, 1234) self.task_manager.add_new_available_task(new_task_2, 1234) connection_id = "conn_1" expected_connected_task_1 = ConnectedTask(new_task_1, connection_id) # Act tasks = self.task_manager.connect_available_tasks(1, connection_id) # Assert assert new_task_1 in tasks assert new_task_2 not in tasks assert len(self.task_manager.in_progress) == 1 assert self.task_manager.in_progress.get( new_task_1.task_id).task == expected_connected_task_1.task assert self.task_manager.in_progress.get( new_task_1.task_id ).connection_id == expected_connected_task_1.connection_id
def test_update_status(self): # Remove previous jobs & tasks self.manager.remove_all_jobs_and_tasks() # Create a sample task program_name = "ooanalyzer" input_file_args = { "-f": "oo.exe" } input_text_args = { "--timeout": "300" } input_flag_args = [ "-v", ] output_file_args = { "-j": "output.json", "-F": "facts", "-R": "results" } new_task = Task() new_task.program_name = program_name new_task.input_file_args = input_file_args new_task.input_text_args = input_text_args new_task.input_flag_args = input_flag_args new_task.output_file_args = output_file_args # Create a sample job job_name = "Test_job" job_comment = "Just for test" created_time = datetime.now() new_job = Job(job_name, job_comment, created_time) new_job.tasks = [new_task] # Insert the sample job with the sample task job_id, tasks_id = self.manager.insert_new_job(new_job) task_id = tasks_id[0] # Update the status self.manager.update_job_status(job_id, Status.Failed) self.manager.update_task_status(task_id, Status.Successful) # Retrieve the status of task & job self.assertEqual(self.manager.get_status(task_id), Status.Successful.name) # Rebuild the job object and check the status rebuilt_job = self.manager.db_manager.get_job_by_id_without_tasks(job_id) self.assertEqual(rebuilt_job.status, Status.Failed) # Rebuild the task object and check the status rebuilt_task = self.manager.db_manager.get_task_by_id(task_id) self.assertEqual(rebuilt_task.status, Status.Successful) # Remove the inserted job & task after test self.manager.remove_all_jobs_and_tasks()
def test_connection_dropped(self): # Arrange connection_id_1 = "connection_1" connection_id_2 = "connection_2" task_1 = Task(1, "", [""], None, "", "") task_2 = Task(2, "", [""], None, "", "") task_3 = Task(3, "", [""], None, "", "") self.task_manager.add_new_available_task(task_1, 1234) self.task_manager.add_new_available_task(task_2, 1234) self.task_manager.add_new_available_task(task_3, 1234) self.task_manager.connect_available_tasks(2, connection_id_1) self.task_manager.connect_available_task(connection_id_2) # Act self.task_manager.connection_dropped(connection_id_1) # Assert assert len(self.task_manager.in_progress) == 1 assert self.task_manager.in_progress[task_3.task_id].task == task_3 assert self.task_manager.available_tasks.qsize() == 2 assert self.task_manager.available_tasks.get( ).task_id == task_1.task_id assert self.task_manager.available_tasks.get( ).task_id == task_2.task_id
def test_connect_available_tasks(self): # Arrange new_task_1 = Task(1, "", [""], None, "", "") new_task_2 = Task(2, "", [""], None, "", "") self.task_manager.add_new_available_task(new_task_1, 1234) self.task_manager.add_new_available_task(new_task_2, 1234) connection_id = "conn_1" expected_connected_task_1 = ConnectedTask(new_task_1, connection_id) expected_connected_task_2 = ConnectedTask(new_task_2, connection_id) # Act tasks = self.task_manager.connect_available_tasks(3, connection_id) # Assert assert new_task_1 in tasks assert new_task_2 in tasks assert len(self.task_manager.in_progress) == 2 assert self.task_manager.in_progress[ new_task_1.task_id].task == expected_connected_task_1.task assert self.task_manager.in_progress[ new_task_2.task_id].task == expected_connected_task_2.task assert self.task_manager.in_progress[ new_task_1.task_id].connection_id == connection_id assert self.task_manager.in_progress[ new_task_2.task_id].connection_id == connection_id
class ControllerWorkload(WorkloadStorable): ''' 通过Controller进行workload管理 ''' def __init__(self, host, sources = None): self.__client = HttpClientPool(host, timeout = 1000, maxsize = 10) self.__sources = sources self.__sem = threading.Semaphore() self.__tasks = [] self.__tasks_status = [] #self.__timer = timer.Timer(TASK_TIME_SPAN, self.get_workloads) #self.__timer.start() self.__timer2 = timer.Timer(COMPLETE_TIME_SPAN, self.complete_workloads) self.__timer2.start() def add_workload(self, task): pass def get_workloads(self): ''' 从master取一批workloads get every TASK_TIME_SPAN (s), up to TASK_COUNT ''' task_length = TASK_COUNT - len(self.__tasks) if task_length <= 0 : return None logger.info('Need %d New Tasks'%task_length) url = "/workload?count=" + str(task_length) result = self.__client.get(url) if result == None or result == []: return False try: result = result.strip('\0').strip() tasks = eval(result) except Exception,e: logger.info('GET TASKS ERROR: '+str(e)) return False logger.info('Get %d New Tasks From Master'%len(tasks)) for task in tasks: #logger.info("parse string is : %s" % str(task)) self.__tasks.append(Task.parse(json.dumps(task))) return True
def test_connect_available_task(self): # Arrange new_task = Task(1, "", [""], None, "", "") self.task_manager.add_new_available_task(new_task, 1234) connection_id = "conn_1" expected_connected_task = ConnectedTask(new_task, connection_id) # Act task = self.task_manager.connect_available_task(connection_id) # Assert assert new_task == task assert len(self.task_manager.in_progress) == 1 assert self.task_manager.in_progress.get( new_task.task_id).task == expected_connected_task.task assert self.task_manager.in_progress.get( new_task.task_id ).connection_id == expected_connected_task.connection_id
def setUp(self): print("Test with command\n%s\n" % self.command_line_input) print("Submitting job...") now = datetime.now() dummy_job = Job(self.job_name, self.comments, now) test_task = Task(self.location, self.tool_type, self.command_line_input) dummy_job.tasks = [test_task] job_id, tasks_id = JobManager().submit_job(dummy_job) self.job_id = job_id self.tasks_id = tasks_id ms_without_ns = int(now.microsecond / 1000) * 1000 self.now = now.replace(microsecond=ms_without_ns)
def AddTask(self, request, context): print(request.task) t: Task = request.task task_id = self.db.add_task( Task( task_id=t.task_id, name=t.name, create_time=t.create_time, start_time=t.start_time, end_time=t.end_time, union_train=t.union_train, edge_nodes=t.edge_nodes, file=t.file, )) return data_manager_pb2.AddTaskResp(resp=data_manager_pb2.Response( code=0, message=json.dumps(task_id)))
def SendFile(self, request, context): request_task = request.task task: Task = Task(task_id=request_task.task_id, name=request_task.name, create_time=request_task.create_time, start_time=request_task.start_time, end_time=request_task.end_time, union_train=request_task.union_train, edge_nodes=request_task.edge_nodes, file=request_task.file, status=0) script = request.script config = request.config resp = self.dr.upload_task(task, script, config).resp return message_hub_pb2.SendFileResp(resp=message_hub_pb2.Response( code=resp.code, message=resp.message))
def UpdateTask(self, request, context): t: Task = request.task self.db.update_task( Task( task_id=t.task_id, name=t.name, create_time=t.create_time, start_time=t.start_time, end_time=t.end_time, union_train=t.union_train, edge_nodes=t.edge_nodes, file=t.file, status=t.status, )) return data_manager_pb2.UpdateTaskResp( resp=data_manager_pb2.Response(code=0, message="success"))
def handle_tasks(self, tasks: List[Task]): """ Handle the tasks :param tasks: :return (Boolean, List[Task]): """ try: handled_tasks: List[Task] = [] # If job done, set flag and return True if tasks[0].message_type == TaskMessageType.JOB_END: self.job_done = True return True, handled_tasks # For each task make a file containing the the task content for task in tasks: self.save_processed_data(task.payload_filename, task.payload) logger.log_info( f"Creating '{task.payload_filename}' file to use during execution" ) failed_tasks = self.execute_tasks(tasks) for task in tasks: if task not in failed_tasks: with open( f'{self.job_path}/{self.job_id}/' f'{task.result_filename}', 'rb') as file: payload = file.read() task_status = TaskMessageType.TASK_PROCESSED else: payload = None task_status = TaskMessageType.TASK_FAILED handled_task: Task = Task(task.task_id, task.program, task.arg_file_names, payload, task.result_filename, task.payload_filename) handled_task.message_type = task_status handled_task.job_id = self.job_id handled_tasks.append(handled_task) return True, handled_tasks except Exception as error: logger.log_error(f'Handle_tasks broad exception\n{error}') return False, []
def request(params): task = Task() task.source = 'source_100' result = {'result': '0', 'task': []} try: task.error = '12' req_tasks = eval(urllib.unquote(params.get('req'))) task.req_qid = params.get('qid') task.req_uid = params.get('uid') except Exception, e: logger.error('get request params error: ' + str(task.source) + str(e)) result['err_code'] = 'Not enough arguments' return json.dumps(result)
def AddTask(self, request, context): request_task = request.task task: Task = Task(task_id=request_task.task_id, name=request_task.name, create_time=request_task.create_time, start_time=request_task.start_time, end_time=request_task.end_time, union_train=request_task.union_train, edge_nodes=request_task.edge_nodes, file=request_task.file, status=0) self.tasks.append(task) resp = self.db.add_task(task).resp print("add task:" + resp.message) return task_controller_pb2.AddTaskResp( resp=task_controller_pb2.Response( code=resp.code, message=resp.message, ))
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = expedia_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = expediaFlightParser() task = Task() task.content = '' task.source = 'expediaFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'expediaFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = ceair_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = ceairParser() task = Task() task.content = '' task.source = 'ceairFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'ceairFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = easyjet_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = easyjetFlightParser() task = Task() task.content = '' task.source = 'easyjetFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'easyjetFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = elong_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = elongFlightParser() task = Task() task.content = '' task.source = 'elongFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'elongFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = lcairRound_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = lcairParser() task = Task() task.content = '' task.source = 'lcairRoundFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'lcairRoundFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = ryanair_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = ryanairParser() task = Task() task.content = '' task.source = 'ryanairFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'ryanairFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = jijitong_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = jijitongParser() task = Task() task.content = '' task.source = 'jijitongFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'jijitongFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = feifan_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = feifanParser() task = Task() task.content = 'beijing-paris&20140602' task.source = 'feifanFlight' result = Parser.parse(task) task2 = Task() task2.content = 'AY058_AY735-PVG-AGP|20140521_09:20|feifan::feifan' task2.source = 'feifanFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = wego_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = wegoParser() task = Task() task.content = 'PEK&CDG&20140602' task.source = 'wegoFlight' result = Parser.parse(task) task2 = Task() task2.content = 'AY058_AY735-PVG-AGP|20140521_09:20|wego::ticket24' task2.source = 'wegoFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
def request(self, task): print task strs = task.content.split(" ") if len(strs) != 5: return "" tickets = Parse(strs[0], strs[1], strs[2], strs[3], strs[4]) return jsonlib.write(tickets) if __name__ == "__main__": import sys if len(sys.argv) < 1: print "Usage: %s " %sys.argv[0] sys.exit() # 测试 from common.task import Task from common.task import RequestTask ryanair_parser = RyanairParser() task = Task() task.source = "ryanair" task.content = "OneWay STN DUB 2014-05-10 2014-05-25" ryanair_parser.parse(task) task = RequestTask() task.content = "OneWay STN DUB 2014-04-20 2014-04-25" task.source = "ryanair" print ryanair_parser.request(task)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = ctripFlight_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = ctripFlightParser() task = Task() task.content = 'BJS&PAR&20140602' task.source = 'ctripFlight' result = Parser.parse(task) task2 = Task() task2.content = 'AY058_AY735-PVG-AGP|20140620_09:20|ctrip::ctrip' task2.source = 'ctripFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = smartfares_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = smartfaresRoundParser() task = Task() task.content = '' task.source = 'feiquanqiuFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'feiquanqiuFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)
result = -1 #request重复抓两遍,失败则返回-1 for i in range(2): result = vueling_request_parser(content) if result != -1: break return result if __name__ == "__main__": Parser = vuelingParser() task = Task() task.content = '' task.source = 'vuelingFlight' result = Parser.parse(task) task2 = Task() task2.content = '' task2.source = 'vuelingFlight' result2 = Parser.request(task2.content) print str(result) print str(result2)