def task_log_buffer(_id): if not ObjectId.is_valid(_id): return jsonify({ 'message': 'invalid id', 'code': 104000 }), 400 query = request.args record = TaskModel.find_by_id(_id) if not record: return jsonify({ 'message': 'record not found', 'code': 104040 }), 404 start = int(query.get('page', 0)) end = -1 reporter = Reporter(task_id=_id) buffer = reporter.get_buffer(start=start, end=end) return jsonify({ 'message': 'ok', 'code': 0, 'data': { 'list': buffer, 'page': start, 'state': record.get('state') } })
def get_tasks_by_job(job_id, offset=0, limit=20, sort=None): queue = get_queue_by_job(job_id) sort = sort or [('_id', -1)] if not queue: return [] where = { 'job_id': job_id, } return TaskModel.find(where, skip=offset, limit=limit, sort=sort)
def task_log_buffer(_id): if not ObjectId.is_valid(_id): return jsonify({ 'message': 'invalid id', 'code': 104000 }), 400 query = request.args record = None if query.get('type') == 'task': record = TaskModel.find_by_id(_id) elif query.get('type') == 'book': # @todo record = Perform.find_by_id(_id) if not record: return jsonify({ 'message': 'record not found', 'code': 104040 }), 404 start = int(query.get('page', 0)) end = -1 reporter = Reporter(task_id=_id) buffer = reporter.get_buffer(start=start, end=end) if not buffer and record.get('trace'): buffer = [record.get('trace')] return jsonify({ 'message': 'ok', 'code': 0, 'data': { 'list': buffer, 'page': start, 'state': record.get('state'), 'record': record } })
def get_job(_id): username = login_user.get('username') if not _id: return jsonify({'message': 'invalid id', 'code': 154000}), 400 job = Job.find_one({ '_id': ObjectId(_id), 'maintainer': { '$in': [username] } }) # @todo job status if not job: return jsonify({ 'message': 'invalid id', 'code': 154001, }), 400 template = job.get('template') inventory_type = template.get('inventory_type') inventory = template.get('inventory') if job.get('type') == 'adhoc': inventory_content = parse_cmdb_inventory(inventory) return jsonify({ 'message': 'ok', 'code': 0, 'data': { 'record': job, 'previewContent': inventory_content, }, }) if inventory_type == 'file': inventory_content = parse_file_inventory(inventory) else: inventory_content = parse_cmdb_inventory(inventory) check_playbook(job['book_id']) if inventory_type == 'file': book = Book.find_one({'_id': ObjectId(job['book_id'])}) if not book: hosts = [] else: hosts = get_inventory_by_book(book.get('_id'), book_name=book.get('name')) else: hosts = get_inventory_from_cmdb() roles = [] condition = { 'book_id': str(job['book_id']), 'role': 'roles', 'is_dir': True } parent = Playbook.find_one(condition) if parent: where = { 'book_id': job['book_id'], 'is_dir': True, 'parent': parent.get('path') } cursor = Playbook.find(where) roles = list(cursor) logs = None task = Task.find_one({'job_id': _id}) if task: log = db.collection('logs').find_one({'task_id': str(task['_id'])}) if log: logs = log.get('message') return jsonify({ 'message': 'ok', 'code': 0, 'data': { 'record': job, 'previewContent': inventory_content, 'hosts': hosts, 'roles': roles, 'logs': logs, }, })
def run_playbook_task(_id, request_id, username, history_id, **kwargs): db = Mongo() record = Job.find_by_id(ObjectId(_id)) task_record = TaskModel.find_one({'request_id': request_id}) if not task_record: return False start_at = time() state = 'progressing' result = '' task_id = task_record.get('_id') job_id = task_record.get('job_id') old_stdout = sys.stdout old_stderr = sys.stderr sys.stderr = sys.stdout = temp_stdout = Reporter(str(task_id)) try: if history_id: history = db.collection('build_history').find_one( {'_id': ObjectId(history_id)}) record = history['job_info'] kwargs = task_record.get('kwargs') template = record.get('template') body = { 'template': record.get('template'), 'extra': record.get('extra') } payload = load_ansible_playbook(body) if payload.get('message') is not 'ok': raise Exception('load ansible options error: ' + payload.get('message')) app_id = template.get('app') if app_id: app_info = Application.find_by_id(ObjectId(app_id)) if not app_info: raise Exception('app not found: {}'.format(app_id)) app_type = app_info.get('type') app_params = app_info.get('params') if kwargs: app_params.update(kwargs) integration = Integration(app_type, app_params) integration.install() data = payload.get('data') options = data.get('options') private_key = data.get('private_key') wk = Workspace() roles = data.get('roles') if history_id: bookspace = wk.build_book(history_id) else: bookname = data.get('book_name') bookspace = wk.load_book_from_db(name=bookname, roles=roles, build_id=task_id) if not bookspace or not os.path.isdir(bookspace): raise Exception('install playbook failed, book name: {}'.format( data.get('book_name'))) entry = os.path.join(bookspace, data.get('entry')) with NamedTemporaryFile('w+t', delete=False) as fd: if private_key: key_text = get_credential_content_by_id( private_key, 'private_key') if not key_text: raise Exception('invalid private_key') fd.write(key_text) fd.seek(0) options['private-key'] = fd.name options['tags'] = ['uptime'] options['verbosity'] = 2 inventory = data.get('inventory') logger.info('ansible-playbook run load inventory: \n{}'.format( yaml.safe_dump(inventory))) play = PlayBookRunner(data.get('inventory'), options, job_id=job_id) play.run(entry) result = play.get_result() builds = db.collection('build_history').count({'job_id': _id}) state = 'finish' # @todo if builds > cache_result_numer: last_one = db.collection('build_history').find_one( {'job_id': _id}, sort=[('_id', 1)]) if last_one: db.fs().delete(last_one.get('file_id')) db.collection('build_history').delete_one( {'_id': last_one['_id']}) with TemporaryDirectory() as dir_name: bookname = data.get('book_name') zip_file = os.path.join(dir_name, bookname) zip_file = make_zip(bookspace, zip_file) with open(zip_file, mode='rb') as stream: filename = bookname + '.zip' file_id = db.save_file(filename=filename, fileobj=stream) store_info = { 'task_id': str(task_id), 'file_id': str(file_id), 'job_id': str(_id), 'job_info': record, 'filename': filename, 'created_at': time(), 'kwargs': kwargs, } db.collection('build_history').insert_one(store_info) shutil.rmtree(bookspace) except Exception as e: result = str(e) extra = {'task_id': task_id} logger.error('run task with exception: {}'.format(str(e)), extra=extra) state = 'error' extra_options = record.get('extra') user = User.find_one({'username': username}) if user: user_id = str(user['_id']) notification = extra_options.get('notification') message = '[error]run job: {}, message: {}'.format( record.get('name'), str(e)) sys.stdout.write(message) if notification and type(notification) == list: Notify().dispatch(user_id=user_id, msg_type='task', content=message, channel=notification) finally: content = temp_stdout.getvalue() temp_stdout.close(True) sys.stdout = old_stdout sys.stderr = old_stderr finish_at = time() update = { '$set': { 'start_at': start_at, 'finish_at': finish_at, 'state': state, 'duration': finish_at - start_at, 'result': result, } } TaskModel.update_one({'_id': task_id}, update=update) trace = { 'task_id': str(task_id), 'request_id': request_id, 'username': username, 'content': str(content), 'created_at': time(), } db.collection('task_logs').insert_one(trace)
def monitor(): """ :return: json response """ queue_stats = tiger.get_queue_stats() sorted_stats = sorted(queue_stats.items(), key=lambda k: k[0]) queues = dict() for queue, stats in sorted_stats: queue_list = queue.split('.') if len(queue_list) == 2: queue_base, job_id = queue_list job = db.collection('jobs').find_one({'_id': ObjectId(job_id)}) job_name = job.get('name') if job else None else: queue_base = queue_list[0] job_id = None job_name = None # job = db.collection('jobs').find_one({'_id': ObjectId(job_id)}) if queue_base not in queues: queues[queue_base] = [] queues[queue_base].append({ 'queue': queue, 'job_id': job_id, 'job_name': job_name, 'stats': stats, 'total': tiger.get_total_queue_size(queue), 'lock': tiger.get_queue_system_lock(queue) }) schedule_jobs = scheduler.get_jobs() schedules = [] for job in schedule_jobs: stats = job.__getstate__() item = {} for field, value in stats.items(): item[field] = str(value) schedules.append(item) # today = date.today() # today = datetime.combine(today, datetime.min.time()) # tomorrow = date.today() + timedelta(days=1) # tomorrow = datetime.combine(tomorrow, datetime.min.time()) # print(time.mktime(today.timetuple()), today) histogram = db.collection('tasks').aggregate([ { '$match': { 'created_at': { '$gte': time.time() - 86400 * 7, '$lte': time.time() }, } }, { '$group': { '_id': { 'interval': { '$subtract': [ {'$divide': ['$created_at', 3600]}, {'$mod': [{'$divide': ['$created_at', 3600]}, 1]} ] }, 'state': '$state', }, 'count': { '$sum': 1 } } }, ]) task_model = TaskModel() task_histogram = task_model.histogram() task_state_pies = task_model.state_pies() task_pies = { 'jobType': [ { 'name': 'adhoc', 'count': db.collection('jobs').count({'type': 'adhoc'}) }, { 'name': 'playbook', 'count': db.collection('jobs').count({'type': 'playbook'}) } ], 'runType': [ { 'name': 'schedule', 'count': db.collection('jobs').count({'template.schedule': {'$exists': True}}) }, { 'name': 'trigger', 'count': db.collection('jobs').count({'template.runType': {'$exists': False}}) } ], } return jsonify({ 'message': 'ok', 'code': 0, 'data': { 'queues': queues, 'taskHistogram': list(task_histogram), 'taskPies': task_pies, 'taskStatePies': task_state_pies, 'schedule': schedules, }, # 'schedule': schedules })