Exemple #1
0
def cleanup_task(task: Task, force: bool = False):
    """
    Validate task deletion and delete/modify all its output.
    :param task: task object
    :param force: whether to delete task with published outputs
    :return: count of delete and modified items
    """
    models, child_tasks = get_outputs_for_deletion(task, force)
    deleted_task_id = trash_task_id(task.id)
    if child_tasks:
        with TimingContext("mongo", "update_task_children"):
            updated_children = child_tasks.update(parent=deleted_task_id)
    else:
        updated_children = 0

    if models.draft:
        with TimingContext("mongo", "delete_models"):
            deleted_models = models.draft.objects().delete()
    else:
        deleted_models = 0

    if models.published:
        with TimingContext("mongo", "update_task_models"):
            updated_models = models.published.objects().update(task=deleted_task_id)
    else:
        updated_models = 0

    event_bll.delete_task_events(task.company, task.id, allow_locked=force)

    return CleanupResult(
        deleted_models=deleted_models,
        updated_children=updated_children,
        updated_models=updated_models,
    )
Exemple #2
0
def verify_task_children_and_ouptuts(task: Task,
                                     force: bool) -> TaskOutputs[Model]:
    if not force:
        with TimingContext("mongo", "count_published_children"):
            published_children_count = Task.objects(
                parent=task.id, status=TaskStatus.published).count()
            if published_children_count:
                raise errors.bad_request.TaskCannotBeDeleted(
                    "has children, use force=True",
                    task=task.id,
                    children=published_children_count,
                )

    with TimingContext("mongo", "get_task_models"):
        models = TaskOutputs(
            attrgetter("ready"),
            Model,
            Model.objects(task=task.id).only("id", "task", "ready"),
        )
        if not force and models.published:
            raise errors.bad_request.TaskCannotBeDeleted(
                "has output models, use force=True",
                task=task.id,
                models=len(models.published),
            )

    if task.models and task.models.output:
        with TimingContext("mongo", "get_task_output_model"):
            model_ids = [m.model for m in task.models.output]
            for output_model in Model.objects(id__in=model_ids):
                if output_model.ready:
                    if not force:
                        raise errors.bad_request.TaskCannotBeDeleted(
                            "has output model, use force=True",
                            task=task.id,
                            model=output_model.id,
                        )
                    models.published.append(output_model)
                else:
                    models.draft.append(output_model)

    if models.draft:
        with TimingContext("mongo", "get_execution_models"):
            model_ids = models.draft.ids
            dependent_tasks = Task.objects(
                models__input__model__in=model_ids).only("id", "models")
            input_models = {
                m.model
                for m in chain.from_iterable(
                    t.models.input for t in dependent_tasks if t.models)
            }
            if input_models:
                models.draft = DocumentGroup(
                    Model,
                    (m for m in models.draft if m.id not in input_models))

    return models
Exemple #3
0
def _validate_and_get_task_from_call(call: APICall, **kwargs) -> Tuple[Task, dict]:
    with translate_errors_context(
        field_does_not_exist_cls=errors.bad_request.ValidationError
    ), TimingContext("code", "parse_call"):
        fields = prepare_create_fields(call, **kwargs)
        task = task_bll.create(call, fields)

    with TimingContext("code", "validate"):
        task_bll.validate(task)

    return task, fields
Exemple #4
0
    def scroll_task_events(
        self,
        company_id: str,
        task_id: str,
        order: str,
        event_type: EventType,
        batch_size=10000,
        scroll_id=None,
    ):
        if scroll_id == self.empty_scroll:
            return [], scroll_id, 0

        if scroll_id:
            with translate_errors_context(), TimingContext(
                    "es", "task_log_events"):
                es_res = self.es.scroll(scroll_id=scroll_id, scroll="1h")
        else:
            size = min(batch_size, 10000)
            if check_empty_data(self.es,
                                company_id=company_id,
                                event_type=event_type):
                return [], None, 0

            es_req = {
                "size": size,
                "sort": {
                    "timestamp": {
                        "order": order
                    }
                },
                "query": {
                    "bool": {
                        "must": [{
                            "term": {
                                "task": task_id
                            }
                        }]
                    }
                },
            }

            with translate_errors_context(), TimingContext(
                    "es", "scroll_task_events"):
                es_res = search_company_events(
                    self.es,
                    company_id=company_id,
                    event_type=event_type,
                    body=es_req,
                    scroll="1h",
                )

        events, total_events, next_scroll_id = self._get_events_from_es_res(
            es_res)
        return events, next_scroll_id, total_events
    def get_last_iters(
        self, company_id: str, event_type: EventType, task_id: str, iters: int
    ):
        if check_empty_data(self.es, company_id=company_id, event_type=event_type):
            return []

        es_req: dict = {
            "size": 0,
            "aggs": {
                "iters": {
                    "terms": {
                        "field": "iter",
                        "size": iters,
                        "order": {"_key": "desc"},
                    }
                }
            },
            "query": {"bool": {"must": [{"term": {"task": task_id}}]}},
        }

        with translate_errors_context(), TimingContext("es", "task_last_iter"):
            es_res = search_company_events(
                self.es, company_id=company_id, event_type=event_type, body=es_req
            )

        if "aggregations" not in es_res:
            return []

        return [b["key"] for b in es_res["aggregations"]["iters"]["buckets"]]
Exemple #6
0
    def edit_configuration(
        cls,
        company_id: str,
        task_id: str,
        configuration: Sequence[Configuration],
        replace_configuration: bool,
        force: bool,
    ) -> int:
        with TimingContext("mongo", "edit_configuration"):
            task = get_task_for_update(company_id=company_id,
                                       task_id=task_id,
                                       force=force)

            update_cmds = dict()
            configuration = {
                ParameterKeyEscaper.escape(c.name):
                ConfigurationItem(**c.to_struct())
                for c in configuration
            }
            if replace_configuration:
                update_cmds["set__configuration"] = configuration
            else:
                for name, value in configuration.items():
                    update_cmds[
                        f"set__configuration__{mongoengine_safe(name)}"] = value

            return update_task(task, update_cmds=update_cmds)
Exemple #7
0
def _delete_tasks(company: str, projects: Sequence[str]) -> Tuple[int, Set, Set]:
    """
    Delete only the task themselves and their non published version.
    Child models under the same project are deleted separately.
    Children tasks should be deleted in the same api call.
    If any child entities are left in another projects then updated their parent task to None
    """
    tasks = Task.objects(project__in=projects).only("id", "execution__artifacts")
    if not tasks:
        return 0, set(), set()

    task_ids = {t.id for t in tasks}
    with TimingContext("mongo", "delete_tasks_update_children"):
        Task.objects(parent__in=task_ids, project__nin=projects).update(parent=None)
        Model.objects(task__in=task_ids, project__nin=projects).update(task=None)

    event_urls, artifact_urls = set(), set()
    for task in tasks:
        event_urls.update(collect_debug_image_urls(company, task.id))
        event_urls.update(collect_plot_image_urls(company, task.id))
        if task.execution and task.execution.artifacts:
            artifact_urls.update(
                {
                    a.uri
                    for a in task.execution.artifacts.values()
                    if a.mode == ArtifactModes.output and a.uri
                }
            )

    event_bll.delete_multi_task_events(company, list(task_ids))
    deleted = tasks.delete()
    return deleted, event_urls, artifact_urls
    def add_or_update_artifacts(
        cls,
        company_id: str,
        task_id: str,
        artifacts: Sequence[ApiArtifact],
        force: bool,
    ) -> int:
        with TimingContext("mongo", "update_artifacts"):
            task = get_task_for_update(
                company_id=company_id,
                task_id=task_id,
                force=force,
            )

            artifacts = {
                get_artifact_id(a): Artifact(**a)
                for a in (api_artifact.to_struct()
                          for api_artifact in artifacts)
            }

            update_cmds = {
                f"set__execution__artifacts__{mongoengine_safe(name)}": value
                for name, value in artifacts.items()
            }
            return update_task(task, update_cmds=update_cmds)
Exemple #9
0
    def move_under_project(
        cls,
        entity_cls: Type[Document],
        user: str,
        company: str,
        ids: Sequence[str],
        project: str = None,
        project_name: str = None,
    ):
        """
        Move a batch of entities to `project` or a project named `project_name` (create if does not exist)
        """
        with TimingContext("mongo", "move_under_project"):
            project = cls.find_or_create(
                user=user,
                company=company,
                project_id=project,
                project_name=project_name,
                description="Auto-generated during move",
            )
            extra = ({
                "set__last_change": datetime.utcnow()
            } if hasattr(entity_cls, "last_change") else {})
            entity_cls.objects(company=company,
                               id__in=ids).update(set__project=project,
                                                  **extra)

            return project
Exemple #10
0
    def get_by_id(
        company_id,
        task_id,
        required_status=None,
        only_fields=None,
        allow_public=False,
    ):
        if only_fields:
            if isinstance(only_fields, string_types):
                only_fields = [only_fields]
            else:
                only_fields = list(only_fields)
            only_fields = only_fields + ["status"]

        with TimingContext("mongo", "task_by_id_all"):
            tasks = Task.get_many(
                company=company_id,
                query=Q(id=task_id),
                allow_public=allow_public,
                override_projection=only_fields,
                return_dicts=False,
            )
            task = None if not tasks else tasks[0]

        if not task:
            raise errors.bad_request.InvalidTaskId(id=task_id)

        if required_status and not task.status == required_status:
            raise errors.bad_request.InvalidTaskStatus(
                expected=required_status)

        return task
Exemple #11
0
    def assert_exists(company_id,
                      task_ids,
                      only=None,
                      allow_public=False,
                      return_tasks=True) -> Optional[Sequence[Task]]:
        task_ids = [task_ids] if isinstance(task_ids,
                                            six.string_types) else task_ids
        with translate_errors_context(), TimingContext("mongo", "task_exists"):
            ids = set(task_ids)
            q = Task.get_many(
                company=company_id,
                query=Q(id__in=ids),
                allow_public=allow_public,
                return_dicts=False,
            )
            if only:
                # Make sure to reset fields filters (some fields are excluded by default) since this
                # is an internal call and specific fields were requested.
                q = q.all_fields().only(*only)

            if q.count() != len(ids):
                raise errors.bad_request.InvalidTaskId(ids=task_ids)

            if return_tasks:
                return list(q)
Exemple #12
0
    def get_active_users(
        cls,
        company,
        project_ids: Sequence[str],
        user_ids: Optional[Sequence[str]] = None,
    ) -> Set[str]:
        """
        Get the set of user ids that created tasks/models in the given projects
        If project_ids is empty then all projects are examined
        If user_ids are passed then only subset of these users is returned
        """
        with TimingContext("mongo", "active_users_in_projects"):
            query = Q(company=company)
            if user_ids:
                query &= Q(user__in=user_ids)

            projects_query = query
            if project_ids:
                project_ids = _ids_with_children(project_ids)
                query &= Q(project__in=project_ids)
                projects_query &= Q(id__in=project_ids)

            res = set(Project.objects(projects_query).distinct(field="user"))
            for cls_ in (Task, Model):
                res |= set(cls_.objects(query).distinct(field="user"))

            return res
Exemple #13
0
    def edit_params(
        cls,
        company_id: str,
        task_id: str,
        hyperparams: Sequence[HyperParamItem],
        replace_hyperparams: str,
        force: bool,
    ) -> int:
        with TimingContext("mongo", "edit_hyperparams"):
            properties_only = cls._normalize_params(hyperparams)
            task = get_task_for_update(
                company_id=company_id,
                task_id=task_id,
                allow_all_statuses=properties_only,
                force=force,
            )

            update_cmds = dict()
            hyperparams = cls._db_dicts_from_list(hyperparams)
            if replace_hyperparams == ReplaceHyperparams.all:
                update_cmds["set__hyperparams"] = hyperparams
            elif replace_hyperparams == ReplaceHyperparams.section:
                for section, value in hyperparams.items():
                    update_cmds[
                        f"set__hyperparams__{mongoengine_safe(section)}"] = value
            else:
                for section, section_params in hyperparams.items():
                    for name, value in section_params.items():
                        update_cmds[
                            f"set__hyperparams__{section}__{mongoengine_safe(name)}"] = value

            return update_task(task,
                               update_cmds=update_cmds,
                               set_last_update=not properties_only)
Exemple #14
0
    def delete_task_events(self, company_id, task_id, allow_locked=False):
        with translate_errors_context():
            extra_msg = None
            query = Q(id=task_id, company=company_id)
            if not allow_locked:
                query &= Q(status__nin=LOCKED_TASK_STATUSES)
                extra_msg = "or task published"
            res = Task.objects(query).only("id").first()
            if not res:
                raise errors.bad_request.InvalidTaskId(extra_msg,
                                                       company=company_id,
                                                       id=task_id)

        es_req = {"query": {"term": {"task": task_id}}}
        with translate_errors_context(), TimingContext("es",
                                                       "delete_task_events"):
            es_res = delete_company_events(
                es=self.es,
                company_id=company_id,
                event_type=EventType.all,
                body=es_req,
                refresh=True,
            )

        return es_res.get("deleted", 0)
Exemple #15
0
    def get_vector_metrics_per_iter(self, company_id, task_id, metric, variant):
        event_type = EventType.metrics_vector
        if check_empty_data(self.es, company_id=company_id, event_type=event_type):
            return [], []

        es_req = {
            "size": 10000,
            "query": {
                "bool": {
                    "must": [
                        {"term": {"task": task_id}},
                        {"term": {"metric": metric}},
                        {"term": {"variant": variant}},
                    ]
                }
            },
            "_source": ["iter", "value"],
            "sort": ["iter"],
        }
        with translate_errors_context(), TimingContext("es", "task_stats_vector"):
            es_res = search_company_events(
                self.es, company_id=company_id, event_type=event_type, body=es_req
            )

        vectors = []
        iterations = []
        for hit in es_res["hits"]["hits"]:
            vectors.append(hit["_source"]["value"])
            iterations.append(hit["_source"]["iter"])

        return iterations, vectors
Exemple #16
0
    def update(cls, company: str, project_id: str, **fields):
        with TimingContext("mongo", "projects_update"):
            project = Project.get_for_writing(company=company, id=project_id)
            if not project:
                raise errors.bad_request.InvalidProjectId(id=project_id)

            new_name = fields.pop("name", None)
            if new_name:
                new_name, new_location = _validate_project_name(new_name)
                old_name, old_location = _validate_project_name(project.name)
                if new_location != old_location:
                    raise errors.bad_request.CannotUpdateProjectLocation(
                        name=new_name)
                fields["name"] = new_name

            fields["last_update"] = datetime.utcnow()
            updated = project.update(upsert=False, **fields)

            if new_name:
                old_name = project.name
                project.name = new_name
                children = _get_sub_projects([project.id],
                                             _only=("id", "name",
                                                    "path"))[project.id]
                _update_subproject_names(project=project,
                                         children=children,
                                         old_name=old_name)

            return updated
    def get_queue_metrics(
        self,
        company_id: str,
        from_date: float,
        to_date: float,
        interval: int,
        queue_ids: Sequence[str],
    ) -> dict:
        """
        Get the company queue metrics in the specified time range.
        Returned as date histograms of average values per queue and metric type.
        The from_date is extended by 'metrics_before_from_date' seconds from
        queues.conf due to possibly small amount of points. The default extension is 3600s
        In case no queue ids are specified the avg across all the
        company queues is calculated for each metric
        """
        # self._log_current_metrics(company, queue_ids=queue_ids)

        if from_date >= to_date:
            raise bad_request.FieldsValueError(
                "from_date must be less than to_date")

        seconds_before = config.get("services.queues.metrics_before_from_date",
                                    3600)
        must_terms = [
            QueryBuilder.dates_range(from_date - seconds_before, to_date)
        ]
        if queue_ids:
            must_terms.append(QueryBuilder.terms("queue", queue_ids))

        es_req = {
            "size": 0,
            "query": {
                "bool": {
                    "must": must_terms
                }
            },
            "aggs": self._get_dates_agg(interval),
        }

        with translate_errors_context(), TimingContext("es",
                                                       "get_queue_metrics"):
            res = self._search_company_metrics(company_id, es_req)

        if "aggregations" not in res:
            return {}

        date_metrics = [
            dict(
                timestamp=d["key"],
                queue_metrics=self._extract_queue_metrics(
                    d["queues"]["buckets"]),
            ) for d in res["aggregations"]["dates"]["buckets"]
            if d["doc_count"] > 0
        ]
        if queue_ids:
            return self._datetime_histogram_per_queue(date_metrics)

        return self._average_datetime_histogram(date_metrics)
Exemple #18
0
def get_output_model(task, force=False):
    with TimingContext("mongo", "get_task_output_model"):
        output_model = Model.objects(id=task.output.model).first()
    if output_model and output_model.ready and not force:
        raise errors.bad_request.TaskCannotBeDeleted(
            "has output model, use force=True", task=task.id, model=task.output.model
        )
    return output_model
Exemple #19
0
    def get_last_iterations_per_event_metric_variant(
        self,
        company_id: str,
        task_id: str,
        num_last_iterations: int,
        event_type: EventType,
    ):
        if check_empty_data(self.es, company_id=company_id, event_type=event_type):
            return []

        es_req: dict = {
            "size": 0,
            "aggs": {
                "metrics": {
                    "terms": {
                        "field": "metric",
                        "size": EventSettings.max_metrics_count,
                        "order": {"_key": "asc"},
                    },
                    "aggs": {
                        "variants": {
                            "terms": {
                                "field": "variant",
                                "size": EventSettings.max_variants_count,
                                "order": {"_key": "asc"},
                            },
                            "aggs": {
                                "iters": {
                                    "terms": {
                                        "field": "iter",
                                        "size": num_last_iterations,
                                        "order": {"_key": "desc"},
                                    }
                                }
                            },
                        }
                    },
                }
            },
            "query": {"bool": {"must": [{"term": {"task": task_id}}]}},
        }

        with translate_errors_context(), TimingContext(
            "es", "task_last_iter_metric_variant"
        ):
            es_res = search_company_events(
                self.es, company_id=company_id, event_type=event_type, body=es_req
            )

        if "aggregations" not in es_res:
            return []

        return [
            (metric["key"], variant["key"], iter["key"])
            for metric in es_res["aggregations"]["metrics"]["buckets"]
            for variant in metric["variants"]["buckets"]
            for iter in variant["iters"]["buckets"]
        ]
Exemple #20
0
 def _get(self,
          company: str,
          user: str = "*",
          worker_id: str = "*") -> Sequence[WorkerEntry]:
     """Get worker entries matching the company and user, worker patterns"""
     match = self._get_worker_key(company, user, worker_id)
     with TimingContext("redis", "workers_get_all"):
         res = self.redis.scan_iter(match)
     return [WorkerEntry.from_json(self.redis.get(r)) for r in res]
Exemple #21
0
    def get_metrics_and_variants(self, company_id: str, task_id: str,
                                 event_type: EventType):
        if check_empty_data(self.es,
                            company_id=company_id,
                            event_type=event_type):
            return {}

        es_req = {
            "size": 0,
            "aggs": {
                "metrics": {
                    "terms": {
                        "field": "metric",
                        "size": EventSettings.max_metrics_count,
                        "order": {
                            "_key": "asc"
                        },
                    },
                    "aggs": {
                        "variants": {
                            "terms": {
                                "field": "variant",
                                "size": EventSettings.max_variants_count,
                                "order": {
                                    "_key": "asc"
                                },
                            }
                        }
                    },
                }
            },
            "query": {
                "bool": {
                    "must": [{
                        "term": {
                            "task": task_id
                        }
                    }]
                }
            },
        }

        with translate_errors_context(), TimingContext(
                "es", "events_get_metrics_and_variants"):
            es_res = search_company_events(self.es,
                                           company_id=company_id,
                                           event_type=event_type,
                                           body=es_req)

        metrics = {}
        for metric_bucket in es_res["aggregations"]["metrics"].get("buckets"):
            metric = metric_bucket["key"]
            metrics[metric] = [
                b["key"] for b in metric_bucket["variants"].get("buckets")
            ]

        return metrics
Exemple #22
0
def create(call: APICall, company_id, req_model: CreateRequest):
    task, fields = _validate_and_get_task_from_call(call)

    with translate_errors_context(), TimingContext("mongo", "save_task"):
        task.save()
        _update_cached_tags(company_id, project=task.project, fields=fields)
        update_project_time(task.project)

    call.result.data_model = IdResponse(id=task.id)
Exemple #23
0
def get_by_id_ex(call: APICall, company_id, _):
    conform_tag_fields(call, call.data)
    with translate_errors_context():
        with TimingContext("mongo", "models_get_by_id_ex"):
            models = Model.get_many_with_join(company=company_id,
                                              query_dict=call.data,
                                              allow_public=True)
        conform_output_tags(call, models)
        call.result.data = {"models": models}
Exemple #24
0
def get_all_ex(call: APICall, company_id: str, request: ProjectsGetRequest):
    conform_tag_fields(call, call.data)
    allow_public = not request.non_public
    data = call.data
    requested_ids = data.get("id")
    with TimingContext("mongo", "projects_get_all"):
        data = call.data
        if request.active_users:
            ids = project_bll.get_projects_with_active_user(
                company=company_id,
                users=request.active_users,
                project_ids=requested_ids,
                allow_public=allow_public,
            )
            if not ids:
                call.result.data = {"projects": []}
                return
            data["id"] = ids

        _adjust_search_parameters(data, shallow_search=request.shallow_search)

        projects = Project.get_many_with_join(
            company=company_id,
            query_dict=data,
            allow_public=allow_public,
        )

        if request.check_own_contents and requested_ids:
            existing_requested_ids = {
                project["id"]
                for project in projects if project["id"] in requested_ids
            }
            if existing_requested_ids:
                contents = project_bll.calc_own_contents(
                    company=company_id,
                    project_ids=list(existing_requested_ids))
                for project in projects:
                    project.update(**contents.get(project["id"], {}))

        conform_output_tags(call, projects)
        if not request.include_stats:
            call.result.data = {"projects": projects}
            return

        project_ids = {project["id"] for project in projects}
        stats, children = project_bll.get_project_stats(
            company=company_id,
            project_ids=list(project_ids),
            specific_state=request.stats_for_state,
        )

        for project in projects:
            project["stats"] = stats[project["id"]]
            project["sub_projects"] = children[project["id"]]

        call.result.data = {"projects": projects}
Exemple #25
0
 def delete_metadata(cls, obj: Document, keys: Sequence[str],
                     **more_updates) -> int:
     with TimingContext("mongo", "delete_metadata"):
         return obj.update(
             **{
                 f"unset__metadata__{ParameterKeyEscaper.escape(key)}": 1
                 for key in set(keys)
             },
             **more_updates,
         )
Exemple #26
0
def get_by_id_ex(call: APICall, company_id, _):
    conform_tag_fields(call, call.data)
    Metadata.escape_query_parameters(call)
    with TimingContext("mongo", "models_get_by_id_ex"):
        models = Model.get_many_with_join(company=company_id,
                                          query_dict=call.data,
                                          allow_public=True)
    conform_output_tags(call, models)
    unescape_metadata(call, models)
    call.result.data = {"models": models}
Exemple #27
0
    def _get_valid_tasks(company_id, task_ids: Set, allow_locked_tasks=False) -> Set:
        """Verify that task exists and can be updated"""
        if not task_ids:
            return set()

        with translate_errors_context(), TimingContext("mongo", "task_by_ids"):
            query = Q(id__in=task_ids, company=company_id)
            if not allow_locked_tasks:
                query &= Q(status__nin=LOCKED_TASK_STATUSES)
            res = Task.objects(query).only("id")
            return {r.id for r in res}
Exemple #28
0
def get_outputs_for_deletion(task, force=False):
    with TimingContext("mongo", "get_task_models"):
        models = TaskOutputs(
            attrgetter("ready"),
            Model,
            Model.objects(task=task.id).only("id", "task", "ready"),
        )
        if not force and models.published:
            raise errors.bad_request.TaskCannotBeDeleted(
                "has output models, use force=True",
                task=task.id,
                models=len(models.published),
            )

    if task.output.model:
        output_model = get_output_model(task, force)
        if output_model:
            if output_model.ready:
                models.published.append(output_model)
            else:
                models.draft.append(output_model)

    if models.draft:
        with TimingContext("mongo", "get_execution_models"):
            model_ids = [m.id for m in models.draft]
            dependent_tasks = Task.objects(execution__model__in=model_ids).only(
                "id", "execution.model"
            )
            busy_models = [t.execution.model for t in dependent_tasks]
            models.draft[:] = [m for m in models.draft if m.id not in busy_models]

    with TimingContext("mongo", "get_task_children"):
        tasks = Task.objects(parent=task.id).only("id", "parent", "status")
        published_tasks = [
            task for task in tasks if task.status == TaskStatus.published
        ]
        if not force and published_tasks:
            raise errors.bad_request.TaskCannotBeDeleted(
                "has children, use force=True", task=task.id, children=published_tasks
            )
    return models, tasks
Exemple #29
0
    def _query_aggregation_for_task_metrics(
        self,
        company_id: str,
        event_type: EventType,
        aggs: dict,
        task_id: str,
        metrics: Sequence[Tuple[str, str]],
    ) -> dict:
        """
        Return the result of elastic search query for the given aggregation filtered
        by the given task_ids and metrics
        """
        must = [{"term": {"task": task_id}}]
        if metrics:
            should = [{
                "bool": {
                    "must": [
                        {
                            "term": {
                                "metric": metric
                            }
                        },
                        {
                            "term": {
                                "variant": variant
                            }
                        },
                    ]
                }
            } for metric, variant in metrics]
            must.append({"bool": {"should": should}})

        es_req = {
            "size": 0,
            "query": {
                "bool": {
                    "must": must
                }
            },
            "aggs": aggs,
        }

        with translate_errors_context(), TimingContext("es",
                                                       "task_stats_scalar"):
            es_res = search_company_events(
                self.es,
                company_id=company_id,
                event_type=event_type,
                body=es_req,
            )

        return es_res.get("aggregations")
Exemple #30
0
def get_by_id_ex(call: APICall, company_id, _):
    conform_tag_fields(call, call.data)

    escape_execution_parameters(call)

    with translate_errors_context():
        with TimingContext("mongo", "task_get_by_id_ex"):
            tasks = Task.get_many_with_join(
                company=company_id, query_dict=call.data, allow_public=True,
            )

        unprepare_from_saved(call, tasks)
        call.result.data = {"tasks": tasks}