async def handler(request, task_id): task = await Task.find_one({'_id': ObjectId(task_id)}) if not task: return json(response_message(ENOENT, 'Task not found')) if not task.upload_dir: return json(response_message(NO_TASK_RESOURCES), status=204) upload_root = get_upload_files_root(task) result_root = await get_test_result_path(task) if not await async_exists(result_root / TARBALL_TEMP): await aiofiles.os.mkdir(result_root / TARBALL_TEMP) upload_file = request.args.get('file', None) if upload_file: return await file(upload_root / upload_file) tarball = await pack_files(task_id, upload_root, result_root / TARBALL_TEMP) if not tarball: return json(response_message(EIO, 'Packing task resource files failed')) tarball = os.path.basename(tarball) return await file(result_root / TARBALL_TEMP / tarball)
def get(self, task_id): try: task = Task.objects(pk=task_id).get() except ValidationError as e: print(e) return error_message(EINVAL, 'Task ID incorrect'), 400 except Task.DoesNotExist: return error_message(ENOENT, 'Task not found'), 404 if not task.upload_dir: return error_message(SUCCESS, 'Upload directory is empty'), 406 upload_root = get_upload_files_root(task) result_root = get_test_result_path(task) if request.args.get('file', None): return send_from_directory( Path(os.getcwd()) / upload_root, request.args['file']) tarball = pack_files(task_id, upload_root, result_root / TARBALL_TEMP) if not tarball: return error_message(EIO, 'Packing task resource files failed'), 401 tarball = os.path.basename(tarball) return send_from_directory( Path(os.getcwd()) / result_root / TARBALL_TEMP, tarball)
def get(self, task_id): """ Return the test result files If a file name specified, a file in the upload directory will be returned If a file name is not specified, return the bundled file that contains all result files """ try: task = Task.objects(pk=task_id).get() except ValidationError as e: current_app.logger.exception(e) return response_message(EINVAL, 'Task ID incorrect'), 400 except Task.DoesNotExist: return response_message(ENOENT, 'Task not found'), 404 if not task.upload_dir: return response_message(SUCCESS, 'Upload directory is empty'), 406 upload_root = get_upload_files_root(task) result_root = get_test_result_path(task) upload_file = request.args.get('file', None) if upload_file: return send_from_directory( Path(os.getcwd()) / upload_root, upload_file) tarball = pack_files(task_id, upload_root, result_root / TARBALL_TEMP) if not tarball: return response_message(EIO, 'Packing task resource files failed'), 401 tarball = os.path.basename(tarball) return send_from_directory( Path(os.getcwd()) / result_root / TARBALL_TEMP, tarball)
def post(self, **kwargs): organization = kwargs['organization'] team = kwargs['team'] found = False temp_id = request.form.get('resource_id', None) if not temp_id: temp_id = str(ObjectId()) os.mkdir(UPLOAD_DIR / temp_id) upload_root = UPLOAD_DIR / temp_id for name, file in request.files.items(): found = True filename = upload_root / file.filename file.save(str(filename)) files = request.form.getlist('file') if len(files) > 0: retrigger_task_id = request.form.get('retrigger_task', None) if not retrigger_task_id: return error_message(EINVAL, 'Field retrigger_task is required'), 400 retrigger_task = Task.objects(pk=retrigger_task_id).first() if not retrigger_task: return error_message(ENOENT, 'Re-trigger task not found'), 404 if retrigger_task.test.organization != organization or retrigger_task.test.team != team: return error_message( EINVAL, 'Re-triggering a task not belonging to your organization/team is not allowed' ), 403 retrigger_task_upload_root = get_upload_files_root(retrigger_task) if not os.path.exists(retrigger_task_upload_root): return error_message( ENOENT, 'Re-trigger task upload directory does not exist'), 404 for f in files: try: shutil.copy(retrigger_task_upload_root / f, upload_root) found = True except FileNotFoundError: shutil.rmtree(upload_root) return error_message( ENOENT, 'File {} used in the re-triggered task not found'.format( f)), 404 if not found: return error_message(ENOENT, 'No files are found in the request'), 404 return error_message(SUCCESS, resource_id=temp_id), 200
def get(self, **kwargs): task = kwargs['task'] if not task.upload_dir: return [] upload_root = get_upload_files_root(task) if not os.path.exists(upload_root): return error_message(ENOENT, 'Task upload directory does not exist'), 404 return os.listdir(upload_root)
def get(self, **kwargs): """Get the file list in the upload directory""" task = kwargs['task'] if not task.upload_dir: return [] upload_root = get_upload_files_root(task) if not os.path.exists(upload_root): return response_message( ENOENT, 'Task upload directory does not exist'), 404 return os.listdir(upload_root)
async def handler(request): task = request.ctx.task if not task.upload_dir: return [] upload_root = get_upload_files_root(task) if not await async_exists(upload_root): return json( response_message(ENOENT, 'Task upload directory does not exist')) return json( response_message(SUCCESS, files=await async_wraps(path_to_dict)(upload_root)))
async def process_task_per_endpoint(app, endpoint, organization=None, team=None): global ROBOT_PROCESSES, TASKS_CACHED if not organization and not team: logger.error('Argument organization and team must neither be None') return room_id = get_room_id(str(organization.pk), str(team.pk) if team else '') taskqueues = await TaskQueue.find({ 'organization': organization.pk, 'team': team.pk if team else None, 'endpoint': endpoint.pk }).to_list(len(QUEUE_PRIORITY)) if len(taskqueues) == 0: logger.error('Taskqueue not found') return # taskqueues = [q for q in taskqueues] # query becomes stale if the document it points to gets changed elsewhere, use document instead of query to perform deletion taskqueue_first = taskqueues[0] endpoint_id = str(endpoint.pk) endpoint_uid = endpoint.uid if team and not organization: organization = await team.organization.fetch() org_name = (organization.name + '-' + team.name) if team else organization.name while True: await taskqueue_first.reload() if taskqueue_first.to_delete: for taskqueue in taskqueues: await taskqueue.delete() await endpoint.delete() logger.info('Abort the task loop: {} @ {}'.format( org_name, endpoint_uid)) break # TODO: lower priority tasks will take precedence if higher priority queue is empty first # but filled then when thread is searching for tasks in the lower priority task queues for priority in QUEUE_PRIORITY: exit_task = False for taskqueue in taskqueues: await taskqueue.reload() if taskqueue.to_delete: exit_task = True break if taskqueue.priority == priority: break else: logger.error('Found task queue with unknown priority') continue if exit_task: break # "continue" to search for tasks in the lower priority task queues # "break" to start over to search for tasks from the top priority task queue task = await taskqueue.pop() if not task: continue task_id = str(task.pk) if isinstance(task, DBRef): logger.warning( 'task {} has been deleted, ignore it'.format(task_id)) taskqueue.running_task = None await taskqueue.commit() break if task.kickedoff != 0 and not task.parallelization: logger.info( 'task has been taken over by other threads, do nothing') taskqueue.running_task = None await taskqueue.commit() break await task.collection.find_one_and_update( {'_id': task.pk}, {'$inc': { 'kickedoff': 1 }}) await task.reload() if task.kickedoff != 1 and not task.parallelization: logger.warning('a race condition happened') taskqueue.running_task = None await taskqueue.commit() break test = await task.test.fetch() logger.info('Start to run task {} in the thread {}'.format( task_id, threading.current_thread().name)) result_dir = await get_test_result_path(task) scripts_dir = await get_user_scripts_root(task) await async_makedirs(result_dir) args = [ '--loglevel', 'debug', '--outputdir', str(result_dir), '--consolecolors', 'on', '--consolemarkers', 'on' ] if hasattr(task, 'testcases'): for t in task.testcases: args.extend(['-t', t]) if hasattr(task, 'variables') and task.variables: variable_file = result_dir / 'variablefile.py' convert_json_to_robot_variable(task.variables, test.variables, variable_file) args.extend(['--variablefile', str(variable_file)]) addr, port = '127.0.0.1', 8270 args.extend([ '-v', f'address_daemon:{addr}', '-v', f'port_daemon:{port}', '-v', f'task_id:{task_id}', '-v', f'endpoint_uid:{endpoint_uid}' ]) args.append( os.path.join(scripts_dir, test.path, test.test_suite + '.md')) logger.info('Arguments: ' + str(args)) p = await asyncio.create_subprocess_exec( 'robot', *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT) ROBOT_PROCESSES[str(task.pk)] = p task.status = 'running' task.run_date = datetime.datetime.utcnow() task.endpoint_run = endpoint await task.commit() await sio.emit('task started', {'task_id': task_id}, room=room_id) log_msg = StringIO() if room_id not in ROOM_MESSAGES: ROOM_MESSAGES[room_id] = {task_id: log_msg} else: if task_id not in ROOM_MESSAGES[room_id]: ROOM_MESSAGES[room_id][task_id] = log_msg ss = b'' msg_q = asyncio.Queue() async def _read_log(): nonlocal msg_q, ss, log_msg while True: c = await p.stdout.read(1) if not c: await msg_q.put(None) break try: c = c.decode() except UnicodeDecodeError: ss += c else: c = '\r\n' if c == '\n' else c log_msg.write(c) await msg_q.put(c) asyncio.create_task(_read_log()) async def _emit_log(): nonlocal msg_q msg = '' while True: try: c = msg_q.get_nowait() except asyncio.QueueEmpty: if msg: await sio.emit('test report', { 'task_id': task_id, 'message': msg }, room=room_id) msg = '' c = await msg_q.get() finally: if c: msg += c else: break emit_log_task = asyncio.create_task(_emit_log()) await emit_log_task del ROBOT_PROCESSES[str(task.pk)] if ss != b'': log_msg_all = StringIO() log_msg_all.write(log_msg.getvalue()) try: ss = ss.decode(chardet.detect(ss)['encoding']) except UnicodeDecodeError: try: logger.warning( f'chardet error: {ss.decode("unicode_escape").encode("latin-1")}' ) except UnicodeEncodeError: pass else: log_msg_all.write(ss) logger.info('\n' + log_msg_all.getvalue()) await sio.emit('test report', { 'task_id': task_id, 'message': ss }, room=room_id) else: logger.info('\n' + log_msg.getvalue()) await p.wait() if p.returncode == 0: task.status = 'successful' else: await task.reload() if task.status != 'cancelled': task.status = 'failed' await task.commit() await sio.emit('task finished', { 'task_id': task_id, 'status': task.status }, room=room_id) ROOM_MESSAGES[room_id][task_id].close() del ROOM_MESSAGES[room_id][task_id] if task_id in TASKS_CACHED: del TASKS_CACHED[task_id] del taskqueue.running_task await taskqueue.commit() endpoint.last_run_date = datetime.datetime.utcnow() await endpoint.commit() if task.upload_dir: resource_dir_tmp = get_upload_files_root(task) if await async_exists(resource_dir_tmp): await make_tarfile_from_dir( str(result_dir / 'resource.tar.gz'), resource_dir_tmp) result_dir_tmp = result_dir / 'temp' if await async_exists(result_dir_tmp): await async_rmtree(result_dir_tmp) await notification_chain_call(task) TASK_PER_ENDPOINT[endpoint_id] = 1 break else: if TASK_PER_ENDPOINT[endpoint_id] != 1: TASK_PER_ENDPOINT[endpoint_id] = 1 logger.info('Run the recently scheduled task') continue # del TASK_PER_ENDPOINT[endpoint_id] logger.info('task processing finished, exiting the process loop') break
def task_loop_per_endpoint(endpoint_address, organization=None, team=None): global ROBOT_TASKS if not organization and not team: print('Argument organization and team must neither be None') return taskqueues = TaskQueue.objects(organization=organization, team=team, endpoint_address=endpoint_address) if taskqueues.count() == 0: print('Taskqueue not found') return taskqueus = [ q for q in taskqueues ] # query becomes stale if the document it points to gets changed elsewhere, use document instead of query to perform deletion taskqueue_first = taskqueues[0] endpoints = Endpoint.objects(endpoint_address=endpoint_address, organization=organization, team=team) if endpoints.count() == 0: print('Endpoint not found') return org_name = team.organization.name + '-' + team.name if team else organization.name print('Start task loop: {} @ {}'.format(org_name, endpoint_address)) while True: taskqueue_first.reload('to_delete') if taskqueue_first.to_delete: for taskqueue in taskqueues: taskqueue.delete() endpoints.delete() print('Exit task loop: {} @ {}'.format(org_name, endpoint_address)) break # TODO: lower priority tasks will take precedence if higher priority queue is empty first # but filled then when thread is searching for tasks in the lower priority task queues for priority in (QUEUE_PRIORITY_MAX, QUEUE_PRIORITY_DEFAULT, QUEUE_PRIORITY_MIN): for taskqueue in taskqueues: if taskqueue.priority == priority: break else: print('Error: Found task queue with unknown priority') # "continue" to search for tasks in the lower priority task queue # "break" to start over to search for tasks from top priority task queue task = taskqueue.pop() if not task: continue if isinstance(task, DBRef): print('task {} has been deleted, ignore it'.format(task.id)) break if task.kickedoff != 0 and not task.parallelization: print('task has been taken over by other threads, do nothing') break task.modify(inc__kickedoff=1) if task.kickedoff != 1 and not task.parallelization: print('a race condition happened') break print('\nStart to run task {} ...'.format(task.id)) task.status = 'running' task.run_date = datetime.datetime.utcnow() task.endpoint_run = endpoint_address task.save() result_dir = get_test_result_path(task) args = [ '--loglevel', 'debug', '--outputdir', str(result_dir), '--extension', 'md', '--log', 'NONE', '--report', 'NONE' ] # args = ['--outputdir', str(result_dir), '--extension', 'md'] os.makedirs(result_dir) if hasattr(task, 'testcases'): for t in task.testcases: args.extend(['-t', t]) if hasattr(task, 'variables'): variable_file = Path(result_dir) / 'variablefile.py' convert_json_to_robot_variable(args, task.variables, variable_file) addr, port = endpoint_address.split(':') args.extend([ '-v', 'address_daemon:{}'.format(addr), '-v', 'port_daemon:{}'.format(port), '-v', 'port_test:{}'.format(int(port) + 1), '-v', 'task_id:{}'.format(task.id) ]) args.append(task.test.path) taskqueue.modify(running_task=task) print('Arguments: ' + str(args)) proc_queue_read = multiprocessing.Queue() proc = multiprocessing.Process(target=run_robot_task, args=(proc_queue_read, args)) proc.daemon = True proc.start() ROBOT_TASKS.append({ 'task_id': task.id, 'process': proc, 'queue_read': proc_queue_read }) proc.join() try: ret = proc_queue_read.get(timeout=1) except queue.Empty: pass else: if ret == 0: task.status = 'successful' else: task.status = 'failed' task.save() taskqueue.modify(running_task=None) endpoint = Endpoint.objects(endpoint_address=endpoint_address, organization=organization, team=team).first() if not endpoint: print('No endpoint found with the address {}'.format( endpoint_address)) else: endpoint.last_run_date = datetime.datetime.utcnow() endpoint.save() if task.upload_dir: resource_dir_tmp = get_upload_files_root(task) if os.path.exists(resource_dir_tmp): make_tarfile(str(result_dir / 'resource.tar.gz'), resource_dir_tmp) result_dir_tmp = result_dir / 'temp' if os.path.exists(result_dir_tmp): shutil.rmtree(result_dir_tmp) notification_chain_call(task) break time.sleep(1)
async def post(self, request): organization = request.ctx.organization team = request.ctx.team found = False temp_id = request.form.get('resource_id', None) if not temp_id: temp_id = str(ObjectId()) await aiofiles.os.mkdir(request.app.config.UPLOAD_ROOT / temp_id) upload_root = request.app.config.UPLOAD_ROOT / temp_id for name, file in request.files.items(): if not is_path_secure(file.name): return json( response_message(EINVAL, 'saving file with an illegal file name')) found = True async with aiofiles.open(upload_root / file.name, 'wb') as f: await f.write(file.body) files = request.form.getlist('file') if len(files) > 0: retrigger_task_id = request.form.get('retrigger_task', None) if not retrigger_task_id: return json( response_message(EINVAL, 'Field retrigger_task is required')) retrigger_task = await Task.find_one( {'_id': ObjectId(retrigger_task_id)}) if not retrigger_task: return json( response_message(ENOENT, 'Re-trigger task not found')) test = await retrigger_task.test.fetch() if test.organization != organization or test.team != team: return json( response_message( EINVAL, 'Re-triggering a task not belonging to your organization/team is not allowed' )) retrigger_task_upload_root = get_upload_files_root(retrigger_task) if not await async_exists(retrigger_task_upload_root): return json( response_message( ENOENT, 'Re-trigger task upload directory does not exist')) for f in files: try: await async_copy(retrigger_task_upload_root / f, upload_root) found = True except FileNotFoundError: await async_rmtree(upload_root) return json( response_message( ENOENT, 'File {} used in the re-triggered task not found'. format(f))) if not found: return json( response_message(ENOENT, 'No files are found in the request')) return json(response_message(SUCCESS, resource_id=temp_id))
def process_task_per_endpoint(app, endpoint, organization=None, team=None): global ROBOT_PROCESSES, TASKS_CACHED if not organization and not team: app.logger.error('Argument organization and team must neither be None') return room_id = get_room_id(str(organization.id), str(team.id) if team else '') taskqueues = TaskQueue.objects(organization=organization, team=team, endpoint=endpoint) if taskqueues.count() == 0: app.logger.error('Taskqueue not found') return # taskqueues = [q for q in taskqueues] # query becomes stale if the document it points to gets changed elsewhere, use document instead of query to perform deletion taskqueue_first = taskqueues.first() endpoint_id = str(endpoint.id) endpoint_uid = endpoint.uid org_name = team.organization.name + '-' + team.name if team else organization.name while True: taskqueue_first.reload('to_delete') if taskqueue_first.to_delete: taskqueues.delete() endpoint.delete() app.logger.info('Abort the task loop: {} @ {}'.format( org_name, endpoint_uid)) break # TODO: lower priority tasks will take precedence if higher priority queue is empty first # but filled then when thread is searching for tasks in the lower priority task queues for priority in QUEUE_PRIORITY: exit_task = False for taskqueue in taskqueues: taskqueue.reload('to_delete') if taskqueue.to_delete: exit_task = True break if taskqueue.priority == priority: break else: app.logger.error('Found task queue with unknown priority') continue if exit_task: break # "continue" to search for tasks in the lower priority task queues # "break" to start over to search for tasks from the top priority task queue task = taskqueue.pop() if not task: continue task_id = str(task.id) if isinstance(task, DBRef): app.logger.warning( 'task {} has been deleted, ignore it'.format(task_id)) taskqueue.modify(running_task=None) break if task.kickedoff != 0 and not task.parallelization: app.logger.info( 'task has been taken over by other threads, do nothing') taskqueue.modify(running_task=None) break task.modify(inc__kickedoff=1) if task.kickedoff != 1 and not task.parallelization: app.logger.warning('a race condition happened') taskqueue.modify(running_task=None) break app.logger.info('Start to run task {} in the thread {}'.format( task_id, threading.current_thread().name)) result_dir = get_test_result_path(task) scripts_dir = get_user_scripts_root(task) args = [ 'robot', '--loglevel', 'debug', '--outputdir', str(result_dir), '--extension', 'md', '--consolecolors', 'on', '--consolemarkers', 'on' ] os.makedirs(result_dir) if hasattr(task, 'testcases'): for t in task.testcases: args.extend(['-t', t]) if hasattr(task, 'variables'): variable_file = Path(result_dir) / 'variablefile.py' convert_json_to_robot_variable(args, task.variables, variable_file) addr, port = '127.0.0.1', 8270 args.extend([ '-v', f'address_daemon:{addr}', '-v', f'port_daemon:{port}', '-v', f'task_id:{task_id}', '-v', f'endpoint_uid:{endpoint_uid}' ]) args.append( os.path.join(scripts_dir, task.test.path, task.test.test_suite + '.md')) app.logger.info('Arguments: ' + str(args)) p = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=0, creationflags=subprocess.CREATE_NEW_PROCESS_GROUP if os.name == 'nt' else 0) ROBOT_PROCESSES[task.id] = p task.status = 'running' task.run_date = datetime.datetime.utcnow() task.endpoint_run = endpoint task.save() RPC_SOCKET.emit('task started', {'task_id': task_id}, room=room_id) log_msg = StringIO() if room_id not in ROOM_MESSAGES: ROOM_MESSAGES[room_id] = {task_id: log_msg} else: if task_id not in ROOM_MESSAGES[room_id]: ROOM_MESSAGES[room_id][task_id] = log_msg ss = b'' while True: c = p.stdout.read(1) if not c: break try: c = c.decode(encoding=sys.getdefaultencoding()) except UnicodeDecodeError: ss += c else: c = '\r\n' if c == '\n' else c log_msg.write(c) RPC_SOCKET.emit('test report', { 'task_id': task_id, 'message': c }, room=room_id) del ROBOT_PROCESSES[task.id] if ss != b'': ss = ss.decode(chardet.detect(ss)['encoding']) log_msg_all = StringIO() log_msg_all.write(ss) log_msg_all.write(log_msg.getvalue()) app.logger.info('\n' + log_msg_all.getvalue()) RPC_SOCKET.emit('test report', { 'task_id': task_id, 'message': ss }, room=room_id) else: app.logger.info('\n' + log_msg.getvalue()) #app.logger.info('\n' + log_msg.getvalue().replace('\r\n', '\n')) p.wait() if p.returncode == 0: task.status = 'successful' else: task.reload('status') if task.status != 'cancelled': task.status = 'failed' task.save() RPC_SOCKET.emit('task finished', { 'task_id': task_id, 'status': task.status }, room=room_id) ROOM_MESSAGES[room_id][task_id].close() del ROOM_MESSAGES[room_id][task_id] if task_id in TASKS_CACHED: del TASKS_CACHED[task_id] taskqueue.modify(running_task=None) endpoint.modify(last_run_date=datetime.datetime.utcnow()) if task.upload_dir: resource_dir_tmp = get_upload_files_root(task) if os.path.exists(resource_dir_tmp): make_tarfile_from_dir(str(result_dir / 'resource.tar.gz'), resource_dir_tmp) result_dir_tmp = result_dir / 'temp' if os.path.exists(result_dir_tmp): shutil.rmtree(result_dir_tmp) notification_chain_call(task) TASK_LOCK.acquire() TASK_THREADS[endpoint_id] = 1 TASK_LOCK.release() break else: TASK_LOCK.acquire() if TASK_THREADS[endpoint_id] != 1: TASK_THREADS[endpoint_id] = 1 TASK_LOCK.release() app.logger.info('Run the lately scheduled task') continue del TASK_THREADS[endpoint_id] TASK_LOCK.release() break