def search(self, marker=0, nodes=None, owner=None, time_from=None, time_to=None, **kwargs): pattern = kwargs.get('filter') marker = int(marker) if nodes: if not isinstance(nodes, list): nodes = re.split('[\s,;]', nodes) cache = CacheRegistry() filtered_uuids = [] try: if time_from: time_from = float(time_from) if time_to: time_to = float(time_to) except: pass with cache.reader(request.user.org) as c: marker, filtered_uuids = c.search(marker=marker, nodes=nodes, owner=owner, pattern=pattern, start=time_from, end=time_to) if not filtered_uuids: return O.tasks(etag=0, marker=marker, groups=[]) return self.all(run_uuids=filtered_uuids.keys(), marker=marker)
def all(self, nodes=None, run_uuids=None, etag=None, marker=None, page=None, deployment=None, **kwargs): if etag: etag = int(etag) else: etag = 0 start, end = 0, PAGE_SIZE try: start = max(0, (int(page or 1) - 1) * PAGE_SIZE) end = start + PAGE_SIZE except: pass cache = CacheRegistry() run_uuids = run_uuids or [] max_score = 0 uuids = [] total_logs = 0 with cache.reader(request.user.org) as c: if etag: max_score, uuids = c.get_uuid_by_score(min_score=etag) total_logs = c.log_count() if run_uuids: if not isinstance(run_uuids, list): run_uuids = run_uuids.split(",") if uuids: uuids = set(run_uuids).intersection(set(uuids)) else: uuids = run_uuids if deployment: script_uuids = request.db.query(distinct(Run.uuid)).join( Task, TaskGroup, Deployment).filter( Deployment.name == deployment).all() or [''] if uuids: uuids = set(script_uuids).intersection(set(uuids)) else: uuids = script_uuids groups = TaskGroup.unique(request).order_by(Task.created_at.desc()) ts = None if etag: ts = datetime.utcfromtimestamp(etag / 1000.0) groups = groups.filter(Task.created_at >= ts) if uuids: groups = groups.filter(Run.uuid.in_(uuids)) if nodes: nodes = nodes.split(',') if len(nodes) == 1: nodes = [valid_node_name(*nodes)] else: nodes = valid_node_name(*nodes) if nodes: groups = groups.filter(RunNode.name.in_(nodes)) group_ids = groups.all()[start:end] tasks = Task.visible(request) if group_ids: tasks = tasks.filter(Task.taskgroup_id.in_( [g[0] for g in group_ids])).all() else: tasks = [] task_list = [] task_map = {} def serialize(t): return t.serialize( skip=['owner_id', 'revision_id', 'full_script', 'timeout', 'taskgroup_id'], rel=[('taskgroup_id', 'group'), ('script_content.version', 'revision'), ('owner.username', 'owner'), ('runs', 'nodes', map_nodes)]) def walk(t): ser = serialize(t) task_map[t.id] = ser if not t.parent_id: if t.group.deployment: ser['deployment'] = t.group.deployment.serialize( skip=["id", "content", "owner_id"]) else: ser['deployment'] = dict(name=t.script_name) task_list.append(ser) task_map[t.id] = ser else: parent = task_map.get(t.parent_id) if parent and not any([s for s in parent.get("subtasks", []) if s['id'] == ser['id']]): parent.setdefault("subtasks", []).append(ser) for sub in t.children: walk(sub) for t in tasks: walk(t) pages = int(total_logs / PAGE_SIZE) return O.tasks(etag=max_score, marker=marker, pages=pages, groups=sorted(task_list, key=lambda t: t['created_at'], reverse=True))