Ejemplo n.º 1
0
    def handle(self, *args, **options):
        interactive = options.get('interactive')
        if interactive:
            confirm = input("""
You have requested a flush of the search index.
This will IRREVERSIBLY DESTROY all data currently indexed by Elasticsearch.
Are you sure you want to do this?

Type 'yes' to continue, or 'no' to cancel: """)
        else:
            confirm = 'yes'

        if confirm != 'yes':
            sys.exit(1)

        start_reindex = datetime.datetime.now()
        logger.info('Reindex starting at %s' % start_reindex)

        call_command('delete_index', **options)
        call_command('create_index', **options)
        call_command('set_mappings', **options)

        logger.info('Preparing index data')

        classes = get_all_revision_classes()
        for class_ in classes:
            revisions = class_.objects \
                .filter(metadata__document__is_indexable=True) \
                .select_related()

            actions = []
            logger.info(
                'Starting gathering data for documents of type {}'.format(
                    class_.__name__))
            for revision in revisions:
                actions.append(build_index_data(revision))

            bulk(elastic,
                 actions,
                 chunk_size=settings.ELASTIC_BULK_SIZE,
                 request_timeout=600)

        end_reindex = datetime.datetime.now()
        logger.info('Reindex ending at %s' % end_reindex)
Ejemplo n.º 2
0
    def handle(self, *args, **options):
        interactive = options.get('interactive')
        if interactive:
            confirm = input("""
You have requested a flush of the search index.
This will IRREVERSIBLY DESTROY all data currently indexed by Elasticsearch.
Are you sure you want to do this?

Type 'yes' to continue, or 'no' to cancel: """)
        else:
            confirm = 'yes'

        if confirm != 'yes':
            sys.exit(1)

        start_reindex = datetime.datetime.now()
        logger.info('Reindex starting at %s' % start_reindex)

        call_command('delete_index', **options)
        call_command('create_index', **options)
        call_command('set_mappings', **options)

        logger.info('Preparing index data')

        classes = get_all_revision_classes()
        for class_ in classes:
            revisions = class_.objects \
                .filter(metadata__document__is_indexable=True) \
                .select_related()

            actions = []
            logger.info('Starting gathering data for documents of type {}'.format(class_.__name__))
            for revision in revisions:
                actions.append(build_index_data(revision))

            bulk(
                elastic,
                actions,
                chunk_size=settings.ELASTIC_BULK_SIZE,
                request_timeout=600)

        end_reindex = datetime.datetime.now()
        logger.info('Reindex ending at %s' % end_reindex)
Ejemplo n.º 3
0
    def link_to_revisions(self, revisions):
        """Set the given revisions as related documents.

        The revisions MUST be valid:
         - belong to the same category
         - be transmittable objects

        """
        trs_revisions = []
        ids = []
        index_data = []
        for revision in revisions:
            trs_revisions.append(
                ExportedRevision(
                    document=revision.document,
                    transmittal=self,
                    revision=revision.revision,
                    title=revision.document.title,
                    status=revision.status,
                    return_code=revision.get_final_return_code(),
                    comments=revision.trs_comments))
            ids.append(revision.id)

            # Update ES index to make sure the "can_be_transmitted"
            # filter is up to date
            index_datum = build_index_data(revision)
            index_datum['_source']['can_be_transmitted'] = False
            index_data.append(index_datum)

        with transaction.atomic():
            ExportedRevision.objects.bulk_create(trs_revisions)

            # Mark revisions as transmitted
            Revision = type(revisions[0])
            Revision.objects \
                .filter(id__in=ids) \
                .update(already_transmitted=True)

            bulk_actions(index_data)
Ejemplo n.º 4
0
    def link_to_revisions(self, revisions):
        """Set the given revisions as related documents.

        The revisions MUST be valid:
         - belong to the same category
         - be transmittable objects

        """
        ids = []
        index_data = []
        for revision in revisions:
            ids.append(revision.id)

            # Update ES index to make sure the "can_be_transmitted"
            # filter is up to date
            index_datum = build_index_data(revision)
            index_datum['_source']['can_be_transmitted'] = False
            index_datum['_source']['last_review_closed'] = False
            index_data.append(index_datum)
        with transaction.atomic():
            today = timezone.now()
            later = today + datetime.timedelta(days=self.EXTERNAL_REVIEW_DURATION)

            # Mark revisions as transmitted
            Revision = type(revisions[0])
            Revision.objects \
                .filter(id__in=ids) \
                .update(
                    transmittal=self,
                    transmittal_sent_date=timezone.now(),
                    external_review_due_date=Case(
                        When(purpose_of_issue='FR', then=Value(later)),
                        When(purpose_of_issue='FI', then=Value(None)),
                    ))
            for rev in Revision.objects.filter(id__in=ids):
                rev.transmittals.add(self)
            bulk_actions(index_data)
Ejemplo n.º 5
0
    def link_to_revisions(self, revisions):
        """Set the given revisions as related documents.

        The revisions MUST be valid:
         - belong to the same category
         - be transmittable objects

        """
        ids = []
        index_data = []
        for revision in revisions:
            ids.append(revision.id)

            # Update ES index to make sure the "can_be_transmitted"
            # filter is up to date
            index_datum = build_index_data(revision)
            index_datum['_source']['can_be_transmitted'] = False
            index_datum['_source']['last_review_closed'] = False
            index_data.append(index_datum)
        with transaction.atomic():
            today = timezone.now()
            later = today + datetime.timedelta(days=self.EXTERNAL_REVIEW_DURATION)

            # Mark revisions as transmitted
            Revision = type(revisions[0])
            Revision.objects \
                .filter(id__in=ids) \
                .update(
                    transmittal=self,
                    transmittal_sent_date=timezone.now(),
                    external_review_due_date=Case(
                        When(purpose_of_issue='FR', then=Value(later)),
                        When(purpose_of_issue='FI', then=Value(None)),
                    ))
            for rev in Revision.objects.filter(id__in=ids):
                rev.transmittals.add(self)
            bulk_actions(index_data)
Ejemplo n.º 6
0
    def link_to_revisions(self, revisions):
        """Set the given revisions as related documents.

        The revisions MUST be valid:
         - belong to the same category
         - be transmittable objects

        """
        ids = []
        index_data = []
        for revision in revisions:
            ids.append(revision.id)

            # Update ES index to make sure the "can_be_transmitted"
            # filter is up to date
            index_datum = build_index_data(revision)
            index_datum['_source']['can_be_transmitted'] = False
            index_datum['_source']['last_review_closed'] = False
            index_data.append(index_datum)
        with transaction.atomic():
            Revision = type(revisions[0])
            for rev in Revision.objects.filter(id__in=ids):
                rev.transmittals.add(self)
            bulk_actions(index_data)