def handle(self, **options):
        if __debug__:
            raise CommandError("You should run this with 'python -O'")

        export_id = options.pop('export_id')
        export_archive_path = options.pop('export_path')
        processes = options.pop('processes')
        force_upload = options.pop('force_upload')

        export_instance = get_properly_wrapped_export_instance(export_id)

        if not export_archive_path or not os.path.exists(export_archive_path):
            confirm = input("""
                No export archive provided. Do you want to download the latest one? [y/N]
                """)
            if not confirm == "y":
                raise CommandError(
                    "Export path missing: {}".format(export_archive_path))

            export_archive_path = self._download_export(export_instance)

        extract_to = tempfile.mkdtemp()
        total_docs, unprocessed_pages = self._get_unprocessed_pages(
            export_archive_path, extract_to)

        print('{} pages still to process'.format(len(unprocessed_pages)))

        exporter = MultiprocessExporter(export_instance, total_docs, processes)
        error_pages, successful_pages = self._process_pages(
            exporter, unprocessed_pages)

        final_path = self.compile_final_zip(error_pages, export_archive_path,
                                            export_instance, successful_pages)

        if force_upload or not error_pages:
            print('Uploading final archive',
                  '(forced)' if force_upload and error_pages else '')
            exporter.upload(final_path, clean=not error_pages)
        else:
            print(
                self.style.ERROR(
                    'Not all pages processed successfully.\n'
                    'You can re-run the command on the final archive to try again: {}\n'
                    'NOTE: final archive not uploaded. '
                    'Use --force-upload to upload even with errors'.format(
                        final_path)))
        shutil.rmtree(extract_to)
        self.stdout.write(
            self.style.SUCCESS('Rebuild Complete and payload uploaded'))
    def handle(self, **options):
        if __debug__:
            raise CommandError("You should run this with 'python -O'")

        export_id = options.pop('export_id')
        export_archive_path = options.pop('export_path')
        processes = options.pop('processes')
        force_upload = options.pop('force_upload')

        export_instance = get_properly_wrapped_export_instance(export_id)

        if not export_archive_path or not os.path.exists(export_archive_path):
            confirm = input(
                """
                No export archive provided. Do you want to download the latest one? [y/N]
                """
            )
            if not confirm == "y":
                raise CommandError("Export path missing: {}".format(export_archive_path))

            export_archive_path = self._download_export(export_instance)

        extract_to = tempfile.mkdtemp()
        total_docs, unprocessed_pages = self._get_unprocessed_pages(export_archive_path, extract_to)

        print('{} pages still to process'.format(len(unprocessed_pages)))

        exporter = MultiprocessExporter(export_instance, total_docs, processes)
        error_pages, successful_pages = self._process_pages(
            exporter, unprocessed_pages
        )

        final_path = self.compile_final_zip(
            error_pages, export_archive_path, export_instance, successful_pages
        )

        if force_upload or not error_pages:
            print('Uploading final archive', '(forced)' if force_upload and error_pages else '')
            exporter.upload(final_path, clean=not error_pages)
        else:
            print(self.style.ERROR(
                'Not all pages processed successfully.\n'
                'You can re-run the command on the final archive to try again: {}\n'
                'NOTE: final archive not uploaded. '
                'Use --force-upload to upload even with errors'.format(final_path))
            )
        shutil.rmtree(extract_to)
        self.stdout.write(self.style.SUCCESS('Rebuild Complete and payload uploaded'))
    def handle(self, export_id, **options):
        export_instance = get_properly_wrapped_export_instance(export_id)

        if export_instance.type != FORM_EXPORT:
            raise CommandError("Unsupported export type: %s" % export_instance.type)

        filters = export_instance.get_filters()
        if any(isinstance(filter_, FormExportFilterBuilder.date_filter_class) for filter_ in filters):
            raise CommandError("Export already has a date filter and so must be fully rebuilt.")

        export_archive_path = download_export(export_instance, download_path=options.get('download_path'))
        last_run_meta = get_last_run_meta(export_instance, export_archive_path)
        last_form_id, last_form_received_on, last_page_number = last_run_meta

        print("Exporting data since '%s'" % last_form_received_on)
        filters.append(FormExportFilterBuilder.date_filter_class(gt=last_form_received_on))
        if last_form_id:
            filters.append(NOT(TermFilter('_id', last_form_id)))
        total_docs = get_export_size(export_instance, filters)
        exporter = MultiprocessExporter(
            export_instance, total_docs, options['processes'],
            existing_archive_path=options['download_path'], keep_file=True
        )
        paginator = OutputPaginator(export_id, last_page_number + 1)

        logger.info('Starting data dump of {} docs'.format(total_docs))
        run_multiprocess_exporter(exporter, filters, paginator, 1000000)