Esempio n. 1
0
    def get_train_values(
            cls, field: DocumentField, train_data_project_ids: Optional[List],
            field_values_only: Set[str],
            use_only_confirmed_field_values: bool) -> List[Dict[str, Any]]:
        repo = DocumentFieldRepository()
        fd_repo = FieldDetectionRepository()

        if train_data_project_ids and not use_only_confirmed_field_values:
            return [
                field_values for doc_id, field_values in
                repo.get_field_code_to_python_value_multiple_docs(
                    document_type_id=field.document_type_id,
                    project_ids=train_data_project_ids,
                    doc_limit=settings.ML_TRAIN_DATA_SET_GROUP_LEN)
                if field_values.get(field.code) in field_values_only
            ]
        else:
            qs_modified_document_ids = fd_repo.get_qs_active_modified_document_ids(
                field, train_data_project_ids)

            qs_finished_document_ids = fd_repo.get_qs_finished_document_ids(
                field.document_type, train_data_project_ids)

            qs_train_doc_ids = qs_modified_document_ids.union(
                qs_finished_document_ids)
            return [
                field_values for _doc_id, field_values in
                repo.get_field_code_to_python_value_multiple_docs(
                    document_type_id=field.document_type_id,
                    doc_ids=qs_train_doc_ids,
                    doc_limit=settings.ML_TRAIN_DATA_SET_GROUP_LEN)
                if field_values.get(field.code) in field_values_only
            ]
Esempio n. 2
0
    def get_document_fields(
            data: DocumentNotificationSource, document_type: DocumentType,
            subscription: DocumentNotificationSubscription) -> None:
        """
        Get document field values - those stored in Document object itself
        and those listed in subscription. Fills data.field_values dictionary.

        Make document fields codes follow RawDB "pattern": i.e.
        "document_name" instead of "name" or "assignee_name" instead of "assignee.name".
        We rename field codes because existing templates reference these "RawDB codes"
        :param data: notification's data
        :param document_type: DocumentType for associated document
        :param subscription: DocumentNotificationSubscription (refers to extra fields to obtain)
        """
        repo = DocumentFieldRepository()
        fields_to_get = \
            {f.code for f in subscription.user_fields.all() if f.document_type == document_type} if subscription \
            else set()
        fields_to_get.update(ALL_DOCUMENT_FIELD_CODES)
        doc_field_values = repo.get_document_own_and_field_values(
            data.document, fields_to_get)
        doc_field_values.update(data.field_values)
        # make field codes "RawDB style"
        for key in doc_field_values:
            new_key = DOC_FIELD_TO_CACHE_FIELD.get(key) or key
            if new_key not in data.field_values:
                data.field_values[new_key] = doc_field_values[key]
def detect_field_value(log: ProcessLogger,
                       doc: Document,
                       field: DocumentField,
                       save: bool = False) -> Optional[FieldValueDTO]:
    field_repo = DocumentFieldRepository()
    strategy = FIELD_DETECTION_STRATEGY_REGISTRY[
        field.value_detection_strategy] \
        if field.value_detection_strategy else STRATEGY_DISABLED

    doc_field_values = None

    depends_on_codes = set(field.get_depends_on_codes())

    if depends_on_codes:
        doc_field_values = field_repo.get_field_code_to_python_value(
            document_type_id=doc.document_type_id,
            doc_id=doc.pk,
            field_codes_only=depends_on_codes)

    dto = strategy.detect_field_value(log, doc, field, doc_field_values)
    if save and dto is not None:
        field_repo.update_field_value_with_dto(document=doc,
                                               field=field,
                                               field_value_dto=dto,
                                               user=None)
    return dto
    def _validate_choice_values_removed(self, context: dict) -> None:
        saved_field = self._get_saved_field(context)
        if not saved_field or not TypedField.by(saved_field).is_choice_field \
                or not TypedField.by(self.object).is_choice_field:
            return
        err_msg = ''
        invalid_choices = self._get_invalid_choices(saved_field)
        if self._is_allow_values_not_specified_in_choices_was_unset(
                saved_field):
            err_msg += '"Allow values not specified in choices" flag is unset in the the config being imported. '
        if invalid_choices:
            invalid_choices = [
                '"{0}"'.format(invalid_choice)
                for invalid_choice in invalid_choices
            ]
            err_msg += 'The following choice values are missing in the config being imported: {0}. ' \
                .format(', '.join(invalid_choices))

        if err_msg:
            invalid_values_count = self.object.get_invalid_choice_annotations(
            ).count()
            user_values_count = 0
            detected_values_count = 0
            if invalid_values_count > 0:
                field_repo = DocumentFieldRepository()
                user_values_count = field_repo.get_invalid_choice_vals_count(
                    self.object)
                detected_values_count = self._get_detected_values_count(
                    invalid_values_count, user_values_count)
            err_msg += 'Number of invalid values: user entered values {0}, automatically detected values {1}.' \
                       ' You need to set force auto-fixes option to continue (this option will remove all invalid' \
                       ' values) or make manual updates.'.format(user_values_count, detected_values_count)
            err_msg = 'Unable to update field #{0} "{1}". {2}'.format(
                self.pk, self.object.code, err_msg)
            raise ValidationError(err_msg)
Esempio n. 5
0
def do_save_document_field_value(
        field_val: FieldValueDTO,
        document_id: int,
        field_id: str,
        user) -> \
        Tuple[Document, DocumentField, Dict]:
    field_repo = DocumentFieldRepository()

    document = DocumentRepository().get_document_by_id(document_id)
    field = field_repo.get_document_field_by_id(field_id)
    field_val, field_ants = field_repo.update_field_value_with_dto(
        document=document, field=field, field_value_dto=field_val, user=user)

    annotation = field_ants[0]

    field_value = {
        'document': document.pk,
        'document_name': document.name,
        'field': field.uid,
        'field_name': field.code,
        'value': field_val.value,
        'pk': annotation.pk,
        'project': document.project.name,
        'location_start': annotation.location_start,
        'location_end': annotation.location_end,
        'location_text': annotation.location_text,
        'modified_by': annotation.modified_by.pk,
        'modified_date': annotation.modified_date
    }

    # return field_repo.save_posted_field_value(request_data, user)
    return document, field, field_value
Esempio n. 6
0
def clean_up_assignees(instance, removed_user_ids):
    documents_qs = instance.document_set.filter(
        assignee_id__in=removed_user_ids)
    if documents_qs.exists():
        document_ids = list(documents_qs.values_list('pk', flat=True))
        documents_qs.update(assignee=None)
        from apps.rawdb.tasks import plan_reindex_tasks_in_chunks
        from apps.document.repository.document_field_repository import DocumentFieldRepository
        from apps.document.constants import DocumentSystemField

        field_repo = DocumentFieldRepository()
        field_repo.update_docs_assignee(document_ids, None)
        plan_reindex_tasks_in_chunks(
            document_ids,
            None,
            cache_system_fields=[DocumentSystemField.assignee.value],
            cache_generic_fields=False,
            cache_user_fields=False)

    from apps.document.models import FieldAnnotation, FieldAnnotationFalseMatch
    FieldAnnotation.objects.filter(
        document__project=instance,
        assignee_id__in=removed_user_ids).update(assignee=None)
    FieldAnnotationFalseMatch.objects.filter(
        document__project=instance,
        assignee_id__in=removed_user_ids).update(assignee=None)
Esempio n. 7
0
def update_documents_assignee_impl(sender, signal, doc_ids: List[int],
                                   new_assignee_id: int,
                                   changed_by_user: User):
    from apps.rawdb.tasks import plan_reindex_tasks_in_chunks

    field_repo = DocumentFieldRepository()
    field_repo.update_docs_assignee(doc_ids, new_assignee_id)

    plan_reindex_tasks_in_chunks(
        doc_ids,
        changed_by_user.pk,
        cache_system_fields=[DocumentSystemField.assignee.value],
        cache_generic_fields=False,
        cache_user_fields=False)
Esempio n. 8
0
    def process(self, **kwargs):
        ant_uids = kwargs.get('ids')
        status_id = kwargs.get('status_id')

        # for preventing "connection already closed"
        TaskUtils.prepare_task_execution()
        ann_status = FieldAnnotationStatus.objects.get(pk=status_id)
        user = User.objects.get(pk=kwargs.get('user_id'))

        true_annotations = FieldAnnotation.objects.filter(uid__in=ant_uids)
        false_annotations = FieldAnnotationFalseMatch.objects.filter(uid__in=ant_uids)

        if ann_status.is_rejected:
            from apps.document.repository.document_field_repository import DocumentFieldRepository
            field_repo = DocumentFieldRepository()
            for ant in true_annotations:
                field_repo.delete_field_annotation_and_update_field_value(ant, user)
        else:
            import apps.document.repository.document_field_repository as dfr
            field_repo = dfr.DocumentFieldRepository()
            field_repo.update_field_annotations_by_ant_ids(
                ant_uids, [(f'{FIELD_CODE_STATUS_ID}', status_id)])

            if false_annotations:
                for false_ant in false_annotations:
                    field_repo.restore_field_annotation_and_update_field_value(
                        false_ant, status_id, user)

        ant_docs = set(FieldAnnotation.objects.filter(
            uid__in=ant_uids).values_list('document_id', flat=True))
        false_ant_docs = set(FieldAnnotationFalseMatch.objects.filter(
            uid__in=ant_uids).values_list('document_id', flat=True))
        ant_docs.update(false_ant_docs)
        Document.reset_status_from_annotations(ann_status=ann_status,
                                               document_ids=list(ant_docs))
Esempio n. 9
0
class FieldValueViewSet(viewsets.ModelViewSet):
    """
    list: Annotation (Document Field Value) List
    retrieve: Retrieve Annotation (Document Field Value)
    create: Create Annotation (Document Field Value)
    update: Update Annotation (Document Field Value)
    delete: Delete Annotation (Document Field Value)
    """
    field_repo = DocumentFieldRepository()
    queryset = field_repo.get_all_docfieldvalues()
    serializer_class = FieldValueDTOSerializer
    http_method_names = ['put', 'patch']

    @action(detail=True, methods=['patch'])
    def patch_fields(self, request):
        """
        http.patch
        {
          "field_code1": { "field_value": ....,
                           "annotations": [ {"location_start": ...,
                                             "location_end": ....,
                                             "annotation_value": ....}]
                         },
        }
        """
        res = self.field_repo.update_field_values(request.data)
        return Response(res)

    def update(self, request, pk=None):
        # $http.put()
        res = self.field_repo.update_field_values(request.data)
        return Response(res)
 def _maybe_save_reverse_similarity_value(self,
                                          log: ProcessLogger,
                                          field: DocumentField,
                                          document: Document,
                                          other_doc_id) -> bool:
     field_repo = DocumentFieldRepository()
     if not field_repo.field_value_exists(other_doc_id, field.pk, [document.pk]):
         other_document = Document.all_objects.get(pk=other_doc_id)
         field_repo.update_field_value_with_dto(document=other_document,
                                                field=field,
                                                field_value_dto=FieldValueDTO(field_value=[document.pk]),
                                                merge=True)
         cache_document_fields(log=log,
                               document=other_document,
                               cache_system_fields=False,
                               cache_generic_fields=False,
                               cache_user_fields=[field.code])
Esempio n. 11
0
def cleanup_document_relations(document):

    # 1. delete history
    document_repo = DocumentRepository()
    field_repo = DocumentFieldRepository()
    document_repo.delete_document_history_by_ids([document.pk])
    field_repo.delete_document_history_values(document.pk)

    # INFO: skip "delete step" (set delete=False) since we clean tasks periodically now
    # 2. delete Tasks, Task history, TaskResults, child tasks
    if document.metadata and document.metadata.get('cascade_delete_tasks',
                                                   True):
        task_kwargs = dict(file_name=document.name)
        if document.upload_session_id:
            task_kwargs['session_id'] = str(document.upload_session_id)
        file_tasks = Task.objects.main_tasks().filter_metadata(**task_kwargs)
        for file_task in file_tasks:
            purge_task(file_task.id, delete=False)

    # 3. Remove files
    if file_storage.document_exists(document.source_path):
        file_storage.delete_document(document.source_path)
Esempio n. 12
0
def _process_documents_status_changed(task: ExtendedTask, doc_ids: List, new_status_id: int, changed_by_user_id: int):
    from apps.document.repository.document_field_repository import DocumentFieldRepository

    dfr = DocumentFieldRepository()

    status = ReviewStatus.objects.get(pk=new_status_id)  # type: ReviewStatus
    docs_qr = Document.objects.filter(pk__in=doc_ids)
    changed_by_user = User.objects.get(pk=changed_by_user_id) if changed_by_user_id is not None else None
    if not status.is_active:
        for doc in docs_qr:
            dfr.delete_hidden_field_values_if_needed(doc, event_sender=task)

    fire_documents_status_changed(sender=task,
                                  documents=docs_qr,
                                  new_status_id=new_status_id,
                                  changed_by_user=changed_by_user)
Esempio n. 13
0
def _notify_field_value_saved(instance: FieldValue, deleted=False):
    if not instance.document.processed:
        return
    field_value = {
        'document': instance.document_id,
        'project_id': instance.document.project_id,
        'field__code': instance.field.code,
        'value': instance.value if not deleted else None
    }

    annotation_stats = DocumentFieldRepository(
    ).get_annotation_stats_by_field_value(instance)

    message = ChannelMessage(
        message_types.CHANNEL_MSG_TYPE_FIELD_VALUE_SAVED, {
            'field_value': field_value,
            'annotation_stats': annotation_stats,
            'user': _get_user_dto(instance)
        })
    notify_on_document_changes(instance.document.pk, message)
Esempio n. 14
0
def process_document_changed(task: ExtendedTask,
                             doc_id: int,
                             system_fields_changed: FieldSpec = True,
                             generic_fields_changed: FieldSpec = True,
                             user_fields_changed: bool = True,
                             changed_by_user_id: int = None):
    from apps.document.repository.document_field_repository import DocumentFieldRepository

    dfr = DocumentFieldRepository()

    doc = Document.objects.get(pk=doc_id)  # type: Document
    changed_by_user = User.objects.get(pk=changed_by_user_id) if changed_by_user_id is not None else None
    if DocumentSystemField.status.specified_in(system_fields_changed):
        dfr.delete_hidden_field_values_if_needed(doc, event_sender=task)
    fire_document_changed(sender=task,
                          log=CeleryTaskLogger(task),
                          document=doc,
                          changed_by_user=changed_by_user,
                          document_initial_load=False,
                          system_fields_changed=system_fields_changed,
                          generic_fields_changed=generic_fields_changed,
                          user_fields_changed=user_fields_changed)
Esempio n. 15
0
def _notify_field_value_saved(instance: FieldValue, deleted=False):
    try:
        field_value = {
            'document': instance.document_id,
            'project_id': instance.document.project_id,
            'field__code': instance.field.code,
            'value': instance.value if not deleted else None
        }
    except ObjectDoesNotExist:
        logger = get_django_logger()
        logger.warning(f'_notify_field_value_saved is called for '
                       f'field {instance.field_id}, that was probably deleted')
        return

    annotation_stats = DocumentFieldRepository(
    ).get_annotation_stats_by_field_value(instance)

    message = ChannelMessage(
        message_types.CHANNEL_MSG_TYPE_FIELD_VALUE_SAVED, {
            'field_value': field_value,
            'annotation_stats': annotation_stats,
            'user': _get_user_dto(instance)
        })
    notify_on_document_changes(instance.document.pk, message)
    def get_document_fields(data: DocumentNotificationSource,
                            document_type: DocumentType,
                            subscription: DocumentNotificationSubscription,
                            updated_values: Optional[Dict[str, Any]]) -> None:
        """
        Get document field values - those stored in Document object itself
        and those listed in subscription. Fills data.field_values dictionary.

        Make document fields codes follow RawDB "pattern": i.e.
        "document_name" instead of "name" or "assignee_name" instead of "assignee.name".
        We rename field codes because existing templates reference these "RawDB codes"
        :param data: notification's data
        :param document_type: DocumentType for associated document
        :param subscription: DocumentNotificationSubscription (refers to extra fields to obtain)
        :param updated_values: values that actually changed
        """
        updated_values = updated_values or {}
        repo = DocumentFieldRepository()
        fields_to_get = set()
        if subscription.user_fields:
            fields_to_get = {
                f.code
                for f in subscription.user_fields.all()
                if f.document_type == document_type
            }
        if not fields_to_get:
            doc_user_field_dict = repo.get_document_field_code_by_id(
                document_type.pk)
            if updated_values:
                fields_to_get = {
                    doc_user_field_dict[f]
                    for f in doc_user_field_dict
                    if doc_user_field_dict[f] in updated_values
                }
        if subscription.event == DocumentAssignedEvent.code:
            fields_to_get.update({
                DOCUMENT_FIELD_CODE_ASSIGNEE, DOCUMENT_FIELD_CODE_ASSIGNEE_ID,
                DOCUMENT_FIELD_CODE_ASSIGNEE_NAME
            })
        if subscription.event == DocumentLoadedEvent.code:
            obligatory_fields = {
                DOCUMENT_FIELD_CODE_NAME, DOCUMENT_FIELD_CODE_PROJECT,
                DOCUMENT_FIELD_CODE_PROJECT_NAME
            }
            updated_values.update({v: None for v in obligatory_fields})
            fields_to_get.update(obligatory_fields)

        # updated field is not chosen among fields to show
        if not any(code in fields_to_get for code in updated_values):
            data.field_values = {}
            return

        if subscription.generic_fields:
            fields_to_get.update(set(subscription.generic_fields))
            fields_to_get.update(NotificationRenderer.default_generic_fields)
        else:
            fields_to_get.update(NotificationRenderer.default_generic_fields)
        doc_field_values = repo.get_document_own_and_field_values(
            data.document, fields_to_get)
        doc_field_values.update(data.field_values)
        doc_field_values = {
            f: doc_field_values[f]
            for f in doc_field_values if f in fields_to_get
        }
        data.field_values = {
            f: data.field_values[f]
            for f in data.field_values if f in fields_to_get
        }

        # make field codes "RawDB style"
        for key in doc_field_values:
            new_key = DOC_FIELD_TO_CACHE_FIELD.get(key) or key
            if new_key not in data.field_values:
                data.field_values[new_key] = doc_field_values[key]
Esempio n. 17
0
    def get_value(self,
                  log: ProcessLogger,
                  field: DocumentField,
                  doc: Document,
                  cur_field_code_to_value: Dict[str, Any],
                  location_text: Optional[str],
                  location_start: int = 0,
                  location_end: int = 0) -> Optional[FieldValueDTO]:

        try:
            conf = getattr(field, DST_FIELD_SIMILARITY_CONFIG_ATTR
                           )  # type: Optional[DocumentSimilarityConfig]
        except DocumentSimilarityConfig.DoesNotExist:
            conf = None

        if conf:
            conf.self_validate()

        similarity_threshold = conf.similarity_threshold if conf else DEFAULT_SIMILARITY_TRESHOLD
        feature_vector_fields = field.depends_on_fields.all()
        date_constraint_field_code = conf.date_constraint_field.code if conf and conf.date_constraint_field else None
        date_constraint_days = conf.date_constraint_days if conf else DEFAULT_DATE_CONSTRAINT_DAYS
        document_type = doc.document_type

        feature_vector_field_codes = {f.code for f in feature_vector_fields}

        doc_field_values = dict(cur_field_code_to_value)
        doc_field_values[FIELD_CODE_DOC_ID] = doc.pk

        if date_constraint_field_code:
            doc_date = doc_field_values.get(date_constraint_field_code)
            date_start = doc_date - timedelta(days=date_constraint_days)
            date_end = doc_date + timedelta(days=date_constraint_days)

            doc_ids_query = FieldValue.objects \
                .filter(field__code=date_constraint_field_code) \
                .filter(value__gte=date_start) \
                .filter(value__lte=date_end) \
                .filter(document__document_type_id=document_type.pk) \
                .exclude(document_id=doc.pk) \
                .values_list('document_id', flat=True)
        else:
            doc_date = doc.history.last().history_date
            date_start = doc_date - timedelta(days=date_constraint_days)
            date_end = doc_date + timedelta(days=date_constraint_days)

            doc_ids_query = Document.history \
                .filter(history_type='+',
                        history_date__gte=date_start,
                        history_date__lte=date_end,
                        document_type_id=document_type.pk) \
                .exclude(id=doc.pk) \
                .values_list('pk', flat=True)

        try:
            vectorizer = document_feature_vector_pipeline(
                feature_vector_fields, use_field_codes=True)

            field_repo = DocumentFieldRepository()

            field_values_list = list()

            for doc_id, field_values in field_repo \
                    .get_field_code_to_python_value_multiple_docs(document_type_id=document_type.pk,
                                                                  doc_ids=doc_ids_query,
                                                                  field_codes_only=feature_vector_field_codes):
                d = dict(field_values)
                d[FIELD_CODE_DOC_ID] = doc_id
                field_values_list.append(d)

            if not field_values_list:
                return None

            field_values_list = [doc_field_values] + field_values_list
            feature_vectors = vectorizer.fit_transform(field_values_list)
            doc_feature_vectors = feature_vectors[0]
        except ValueError as ve:
            if 'empty vocabulary' in str(ve):
                log.info(
                    f'Similarity: {field.code}: Vectorization got "empty vocabulary" probably no one of the docs '
                    f'contains any value in the feature vector fields.')
                return None
            raise ve

        similarities = cosine_similarity(doc_feature_vectors, feature_vectors)

        # TODO: Think about removing usage of other_field_values_list here and switching it to generator
        # to avoid storing the list of all field values. We only need feature vectors but they have no doc id.
        res = set()  # type: Set[int]
        for y, field_values in enumerate(field_values_list):
            other_doc_pk = field_values[FIELD_CODE_DOC_ID]
            if doc.pk == other_doc_pk:
                continue
            similarity = similarities[0, y]
            if similarity < similarity_threshold:
                continue
            res.add(other_doc_pk)
            self._maybe_save_reverse_similarity_value(
                log=log, field=field, document=doc, other_doc_id=other_doc_pk)

        if res:
            field_value = sorted(res)[0]
            return FieldValueDTO(field_value)
        return None
Esempio n. 18
0
 def get_docfieldrepo_methods() -> List[str]:
     path = 'apps.document.repository.document_field_repository.DocumentFieldRepository.'
     repo = DocumentFieldRepository()
     return [
         path + m for m in ModelStatsSetsManager.get_class_methods(repo)
     ]
Esempio n. 19
0
class DocumentWithFieldsDTOSerializer(BaseDocumentSerializer):
    """
    Serializer for document review page with detailed document field values
    """
    field_repo = DocumentFieldRepository()

    field_values = serializers.SerializerMethodField()
    notes = DocumentNoteDetailSerializer(source='documentnote_set', many=True)
    prev_id = serializers.SerializerMethodField()
    next_id = serializers.SerializerMethodField()
    sections = serializers.SerializerMethodField()

    class Meta:
        model = Document
        fields = [
            'pk', 'name', 'document_type', 'file_size', 'status',
            'status_data', 'available_statuses_data', 'assignee',
            'assign_date', 'assignee_data', 'available_assignees_data',
            'description', 'title', 'full_text', 'notes', 'field_values',
            'prev_id', 'next_id', 'sections', 'cluster_id'
        ]

    def get_neighbours(self, document, use_saved_filter=True):
        prev_id = next_id = None
        user = self.context['request'].user
        project = document.project
        from apps.rawdb.api.v1 import DocumentsAPIView

        ids = DocumentsAPIView.simulate_get(user,
                                            project,
                                            use_saved_filter=use_saved_filter)

        if document.pk in ids:
            pos = ids.index(document.pk)
        else:
            return self.get_neighbours(document, use_saved_filter=False)

        prev_ids = ids[:pos]
        if prev_ids:
            prev_id = prev_ids[-1]
        next_ids = ids[pos + 1:]
        if next_ids:
            next_id = next_ids[0]
        return prev_id, next_id

    def get_prev_id(self, obj):
        return self.get_neighbours(obj)[0]

    def get_next_id(self, obj):
        return self.get_neighbours(obj)[1]

    def get_sections(self, obj):
        if isinstance(obj.metadata, dict) and 'sections' in obj.metadata:
            return obj.metadata['sections']

    def get_field_values(self, doc: Document):
        fvals = self.field_repo.get_document_field_val_dtos(doc_id=doc.pk)
        for code in fvals:
            fvals[code] = dataclasses.asdict(fvals[code])
        return fvals

    def update(self, instance: Document, validated_data):
        with transaction.atomic():
            system_fields_changed = list()

            new_status = validated_data.get('status')
            if new_status is not None and new_status.pk != instance.status_id:
                is_active = instance.status and instance.status.is_active
                if new_status.is_active != is_active:
                    field_ids = self.field_repo.get_doc_field_ids_with_values(
                        instance.pk)
                    DocumentField.objects \
                        .filter(document_type_id=instance.document_type_id, pk__in=Subquery(field_ids)) \
                        .update(dirty=True)
                system_fields_changed.append(DocumentSystemField.status.value)

            user = self.context['request'].user  # type: User
            new_assignee = validated_data.get('assignee')
            prev_assignee = instance.assignee
            if new_assignee is None and prev_assignee is not None:
                validated_data['assign_date'] = None
                system_fields_changed.append(
                    DocumentSystemField.assignee.value)
            elif new_assignee is not None and (
                    prev_assignee is None
                    or new_assignee.pk != prev_assignee.pk):
                validated_data['assign_date'] = datetime.datetime.now(
                    tz=user.get_time_zone())
                system_fields_changed.append(
                    DocumentSystemField.assignee.value)

            res = super().update(instance, validated_data)

            plan_process_document_changed(
                doc_id=instance.pk,
                system_fields_changed=system_fields_changed,
                generic_fields_changed=False,
                user_fields_changed=False,
                changed_by_user_id=user.pk)
            return res