Exemple #1
0
def rebuild_export(config, schema, output_dir, last_access_cutoff=None, filter=None):
    if output_dir == "couch":
        saved = get_saved_export_and_delete_copies(config.index)
        if last_access_cutoff and saved and saved.last_accessed and \
                saved.last_accessed < last_access_cutoff:
            # ignore exports that haven't been accessed since last_access_cutoff
            return

    try:
        files = schema.get_export_files(format=config.format, filter=filter)
    except SchemaMismatchException:
        # fire off a delayed force update to prevent this from happening again
        rebuild_schemas.delay(config.index)
        raise ExportRebuildError(u'Schema mismatch for {}. Rebuilding tables...'.format(config.filename))

    with files:
        payload = files.file.payload
        if output_dir == "couch":
            if not saved:
                saved = SavedBasicExport(configuration=config)
            else:
                saved.configuration = config

            if saved.last_accessed is None:
                saved.last_accessed = datetime.utcnow()
            saved.last_updated = datetime.utcnow()
            saved.save()
            saved.set_payload(payload)
        else:
            with open(os.path.join(output_dir, config.filename), "wb") as f:
                f.write(payload)
Exemple #2
0
def export_for_group(export_id_or_group, output_dir):
    if isinstance(export_id_or_group, basestring):
        try:
            config = GroupExportConfiguration.get(export_id_or_group)
        except ResourceNotFound:
            raise Exception("Couldn't find an export with id %s" %
                            export_id_or_group)
    else:
        config = export_id_or_group

    for config, schema in config.all_exports:
        try:
            files = schema.get_export_files(format=config.format)
        except SchemaMismatchException, e:
            # fire off a delayed force update to prevent this from happening again
            rebuild_schemas.delay(config.index)
            continue

        payload = files.file.payload
        if output_dir == "couch":
            saved = SavedBasicExport.view("couchexport/saved_exports",
                                          key=json.dumps(config.index),
                                          include_docs=True,
                                          reduce=False).one()
            if not saved:
                saved = SavedBasicExport(configuration=config)
            else:
                saved.configuration = config
            saved.last_updated = datetime.utcnow()
            saved.save()
            saved.set_payload(payload)

        else:
            with open(os.path.join(output_dir, config.filename), "wb") as f:
                f.write(payload)
def export_for_group(export_id_or_group, output_dir):
    if isinstance(export_id_or_group, basestring):
        try:
            config = GroupExportConfiguration.get(export_id_or_group)
        except ResourceNotFound:
            raise Exception("Couldn't find an export with id %s" % export_id_or_group)
    else:
        config = export_id_or_group

    for config, schema in config.all_exports:
        try:
            tmp, _ = schema.get_export_files(format=config.format)
        except SchemaMismatchException, e:
            # fire off a delayed force update to prevent this from happening again
            rebuild_schemas.delay(config.index)
            continue

        payload = Temp(tmp).payload
        if output_dir == "couch":
            saved = SavedBasicExport.view("couchexport/saved_exports", 
                                          key=json.dumps(config.index),
                                          include_docs=True,
                                          reduce=False).one()
            if not saved: 
                saved = SavedBasicExport(configuration=config)
            else:
                saved.configuration = config
            saved.last_updated = datetime.utcnow()
            saved.save()
            saved.set_payload(payload)

        else:
            with open(os.path.join(output_dir, config.filename), "wb") as f:
                f.write(payload)
Exemple #4
0
    def generate_bulk_file(self, update_progress=None):
        update_progress = update_progress or (lambda x: x)
        configs = list()
        schemas = list()
        checkpoints = list()

        for export_object in self.export_objects:
            config, schema, checkpoint = export_object.get_export_components(
                filter=self.export_filter)
            configs.append(config)
            schemas.append(schema)
            checkpoints.append(checkpoint)

        writer = get_writer(self.format)

        # generate the headers for the bulk excel file
        headers = self.generate_table_headers(schemas, checkpoints)

        fd, path = tempfile.mkstemp()
        with os.fdopen(fd, 'wb') as tmp:
            writer.open(headers, tmp)

            # now that the headers are set, lets build the rows
            for i, config in enumerate(configs):
                try:
                    for doc in config.get_docs():
                        if self.export_objects[i].transform:
                            doc = self.export_objects[i].transform(doc)
                        table = get_formatted_rows(doc,
                                                   schemas[i],
                                                   separator=self.separator,
                                                   include_headers=isinstance(
                                                       self, CustomBulkExport))
                        if isinstance(self, CustomBulkExport):
                            table = self.export_objects[i].trim(table, doc)
                        if table and table[0]:
                            # if an export only contains data from repeats and a form has no repeats
                            # then the table list will be empty
                            table = self.export_objects[i].parse_tables(table)
                            writer.write(table)
                except SchemaMismatchException:
                    # fire off a delayed force update to prevent this from happening again
                    rebuild_schemas.delay(self.export_objects[i].index)
                    writer.write([(self.export_objects[i].table_name, [
                        FormattedRow([
                            ugettext(
                                'There was an error generating this export. '
                                'If the problem persists please report an issue.'
                            )
                        ],
                                     separator=self.separator)
                    ])])
                update_progress(i + 1)
            writer.close()
        return path
Exemple #5
0
    def generate_bulk_file(self, update_progress=None):
        update_progress = update_progress or (lambda x: x)
        configs = list()
        schemas = list()
        checkpoints = list()

        for export_object in self.export_objects:
            config, schema, checkpoint = export_object.get_export_components(filter=self.export_filter)
            configs.append(config)
            schemas.append(schema)
            checkpoints.append(checkpoint)

        writer = get_writer(self.format)

        # generate the headers for the bulk excel file
        headers = self.generate_table_headers(schemas, checkpoints)

        fd, path = tempfile.mkstemp()
        with os.fdopen(fd, 'wb') as tmp:
            writer.open(headers, tmp)

            # now that the headers are set, lets build the rows
            for i, config in enumerate(configs):
                try:
                    for doc in config.get_docs():
                        if self.export_objects[i].transform:
                            doc = self.export_objects[i].transform(doc)
                        table = get_formatted_rows(
                            doc, schemas[i], separator=self.separator,
                            include_headers=isinstance(self, CustomBulkExport))
                        if isinstance(self, CustomBulkExport):
                            table = self.export_objects[i].trim(table, doc)
                        if table and table[0]:
                            # if an export only contains data from repeats and a form has no repeats
                            # then the table list will be empty
                            table = self.export_objects[i].parse_tables(table)
                            writer.write(table)
                except SchemaMismatchException:
                    # fire off a delayed force update to prevent this from happening again
                    rebuild_schemas.delay(self.export_objects[i].index)
                    writer.write(
                        [(self.export_objects[i].table_name, [
                            FormattedRow([
                                ugettext(
                                    'There was an error generating this export. '
                                    'If the problem persists please report an issue.'
                                )],
                                separator=self.separator)
                        ])]
                    )
                update_progress(i+1)
            writer.close()
        return path
Exemple #6
0
def rebuild_export(config, schema, last_access_cutoff=None, filter=None):

    saved_export = get_saved_export_and_delete_copies(config.index)
    if _should_not_rebuild_export(saved_export, last_access_cutoff):
        return

    try:
        files = schema.get_export_files(format=config.format, filter=filter)
    except SchemaMismatchException:
        # fire off a delayed force update to prevent this from happening again
        rebuild_schemas.delay(config.index)
        raise ExportRebuildError('Schema mismatch for {}. Rebuilding tables...'.format(config.filename))

    with files:
        _save_export_payload(files, saved_export, config, is_safe=schema.is_safe)
Exemple #7
0
def rebuild_export(config, schema, last_access_cutoff=None, filter=None):

    saved_export = get_saved_export_and_delete_copies(config.index)
    if _should_not_rebuild_export(saved_export, last_access_cutoff):
        return

    try:
        files = schema.get_export_files(format=config.format, filter=filter)
    except SchemaMismatchException:
        # fire off a delayed force update to prevent this from happening again
        rebuild_schemas.delay(config.index)
        raise ExportRebuildError('Schema mismatch for {}. Rebuilding tables...'.format(config.filename))

    with files:
        _save_export_payload(files, saved_export, config, is_safe=schema.is_safe)
Exemple #8
0
    if async:
        return export_object.export_data_async(
            filter=filter,
            filename=filename,
            previous_export_id=previous_export_id,
            format=format,
            max_column_size=max_column_size,
        )
    else:
        if not next:
            next = export.ExcelExportReport.get_url(domain=domain)
        try:
            resp = export_object.download_data(format, filter=filter, limit=limit)
        except SchemaMismatchException, e:
            rebuild_schemas.delay(export_object.index)
            messages.error(
                request,
                "Sorry, the export failed for %s, please try again later" \
                    % export_object.name
            )
            return HttpResponseRedirect(next)
        if resp:
            return resp
        else:
            messages.error(request, "Sorry, there was no data found for the tag '%s'." % export_object.name)
            return HttpResponseRedirect(next)

@login_or_digest
@require_form_export_permission
@require_GET
Exemple #9
0
        return export_object.export_data_async(
            filter=filter,
            filename=filename,
            previous_export_id=previous_export_id,
            format=format,
            max_column_size=max_column_size,
        )
    else:
        if not next:
            next = export.ExcelExportReport.get_url(domain=domain)
        try:
            resp = export_object.download_data(format,
                                               filter=filter,
                                               limit=limit)
        except SchemaMismatchException, e:
            rebuild_schemas.delay(export_object.index)
            messages.error(
                request,
                "Sorry, the export failed for %s, please try again later" \
                    % export_object.name
            )
            return HttpResponseRedirect(next)
        if resp:
            return resp
        else:
            messages.error(
                request, "Sorry, there was no data found for the tag '%s'." %
                export_object.name)
            return HttpResponseRedirect(next)

Exemple #10
0
def _export_default_or_custom_data(request, domain, export_id=None, bulk_export=False, safe_only=False):
    async = request.GET.get('async') == 'true'
    next = request.GET.get("next", "")
    format = request.GET.get("format", "")
    export_type = request.GET.get("type", "form")
    previous_export_id = request.GET.get("previous_export", None)
    filename = request.GET.get("filename", None)
    max_column_size = int(request.GET.get("max_column_size", 2000))

    filter = util.create_export_filter(request, domain, export_type=export_type)
    if bulk_export:
        try:
            is_custom = json.loads(request.GET.get("is_custom", "false"))
            export_tags = json.loads(request.GET.get("export_tags", "null") or "null")
        except ValueError:
            return HttpResponseBadRequest()


        export_helper = (CustomBulkExportHelper if is_custom else ApplicationBulkExportHelper)(
            domain=domain,
            safe_only=safe_only
        )

        return export_helper.prepare_export(export_tags, filter)

    elif export_id:
        # this is a custom export
        try:
            export_object = CustomExportHelper.make(request, export_type, domain, export_id).custom_export
            if safe_only and not export_object.is_safe:
                return HttpResponseForbidden()
        except ResourceNotFound:
            raise Http404()
    elif safe_only:
        return HttpResponseForbidden()
    else:
        if not async:
            # this function doesn't support synchronous export without a custom export object
            # if we ever want that (i.e. for HTML Preview) then we just need to give
            # FakeSavedExportSchema a download_data function (called below)
            return HttpResponseBadRequest()
        try:
            export_tag = json.loads(request.GET.get("export_tag", "null") or "null")
        except ValueError:
            return HttpResponseBadRequest()
        assert(export_tag[0] == domain)
        # hack - also filter instances here rather than mess too much with trying to make this
        # look more like a FormExportSchema
        if export_type == 'form':
            filter &= SerializableFunction(instances)
        export_object = FakeSavedExportSchema(index=export_tag)

    if not filename:
        filename = export_object.name
    filename += ' ' + date.today().isoformat()

    if async:
        return export_object.export_data_async(
            filter=filter,
            filename=filename,
            previous_export_id=previous_export_id,
            format=format,
            max_column_size=max_column_size,
        )
    else:
        if not next:
            next = export.ExcelExportReport.get_url(domain=domain)
        try:
            resp = export_object.download_data(format, filter=filter)
        except SchemaMismatchException, e:
            rebuild_schemas.delay(export_object.index)
            messages.error(
                request,
                "Sorry, the export failed for %s, please try again later" \
                    % export_object.name
            )
            return HttpResponseRedirect(next)
        if resp:
            return resp
        else:
            messages.error(request, "Sorry, there was no data found for the tag '%s'." % export_object.name)
            return HttpResponseRedirect(next)