def logs(statuses, folder: str = None): if folder is not None: for file in glob(join(LOG_FOLDER, '*')): shutil.copy(file, join(folder, basename(file))) print('logs formed') if statuses.query('status == "ERROR"').shape[0] > 0: return log_provider = LogProvider() errors = log_provider.last(count=1000, levels=[LogStatus.Error.value]) service_components = [ ComponentType.Supervisor.value, ComponentType.API.value, ComponentType.WorkerSupervisor.value ] services = log_provider.last(count=1000, components=service_components) logs = errors + services rows = [] for l, _ in logs: rows.append({ 'status': to_snake(LogStatus(l.level).name), 'component': to_snake(ComponentType(l.component).name), 'time': l.time, 'message': l.message, }) df = pd.DataFrame(rows) df.to_csv(join(folder, 'logs_db.csv'), index=False) return df
def describe_logs(dag: int, axis, max_log_text: int = None, log_count: int = 5, col_withds: List[float] = None): columns = ['Component', 'Level', 'Task', 'Time', 'Text'] provider = LogProvider() logs = provider.last(log_count, dag=dag) res = [] cells = [] cells_colours = [] for log, task_id in logs: component = to_snake(ComponentType(log.component).name) level = log.level level = 'debug' if level == 10 else 'info' \ if level == 20 else 'warning' \ if level == 30 else 'error' message = log.message if max_log_text: message = message[:max_log_text] log_cells = [ component, level, str(task_id), log.time.strftime('%m.%d %H:%M:%S'), message ] cells.append(log_cells) level_color = 'lightblue' if level == 'info' else 'lightyellow' \ if level == 'warning' else 'red' if level == 'error' else 'white' log_colours = ['white', level_color, 'white', 'white', 'white'] cells_colours.append(log_colours) if level == 'error': res.append(log) col_withds = col_withds or [0.2, 0.1, 0.25, 0.2, 0.45] if len(cells) > 0: table = axis.table(cellText=cells, colLabels=columns, cellColours=cells_colours, cellLoc='center', colWidths=col_withds, bbox=[0, 0, 1, 1.0], loc='center') table.auto_set_font_size(False) table.set_fontsize(14) axis.set_xticks([]) axis.axis('off') axis.set_title('Logs') return res
def get(self, filter: dict, options: PaginatorOptions): query = self.query(Log, Step, Task). \ join(Step, Step.id == Log.step, isouter=True). \ join(Task, Task.id == Log.task, isouter=True) if filter.get('message'): query = query.filter(Log.message.contains(filter['message'])) if filter.get('dag'): query = query.filter(Task.dag == filter['dag']) if filter.get('task'): child_tasks = self.query(Task.id ).filter(Task.parent == filter['task'] ).all() child_tasks = [c[0] for c in child_tasks] child_tasks.append(filter['task']) query = query.filter(Task.id.in_(child_tasks)) if len(filter.get('components', [])) > 0: query = query.filter(Log.component.in_(filter['components'])) if filter.get('computer'): query = query.filter(Computer.name == filter['computer']) if len(filter.get('levels', [])) > 0: query = query.filter(Log.level.in_(filter['levels'])) if filter.get('task_name'): query = query.filter(Task.name.like(f'%{filter["task_name"]}%')) if filter.get('step_name'): query = query.filter(Step.name.like(f'%{filter["step_name"]}%')) if filter.get('step'): query = query.filter(Step.id == filter['step']) total = query.count() data = [] for log, step, task in self.paginator(query, options): item = { 'id': log.id, 'message': log.message.split('\n'), 'module': log.module, 'line': log.line, 'time': self.serializer.serialize_datetime(log.time), 'level': log_name(log.level), 'component': to_snake(ComponentType(log.component).name), 'computer': log.computer, 'step': self.to_dict(step) if step else None, 'task': self.to_dict(task, rules=('-additional_info', )) if task else None } data.append(item) return {'total': total, 'data': data}