Exemplo n.º 1
0
 def _config(self,
             col_names=None,
             search_column=None,
             case_type=None,
             search_field='case_id',
             named_columns=False,
             create_new_cases=True,
             type_fields=None):
     col_names = col_names or self.default_headers
     case_type = case_type or self.default_case_type
     search_column = search_column or col_names[0]
     type_fields = type_fields if type_fields is not None else [
         'plain'
     ] * len(col_names)
     return ImporterConfig(
         couch_user_id=self.couch_user._id,
         case_type=case_type,
         excel_fields=col_names,
         case_fields=[''] * len(col_names),
         custom_fields=col_names,
         type_fields=type_fields,
         search_column=search_column,
         search_field=search_field,
         named_columns=named_columns,
         create_new_cases=create_new_cases,
         key_column='',
         value_column='',
     )
Exemplo n.º 2
0
    def handle(self, *args, **options):
        if len(args) != 4:
            raise CommandError('Usage is import_cases %s' % self.args)

        start = datetime.now()
        export_file, config_file, domain, user_id = args
        if '@' in user_id:
            user = WebUser.get_by_username(user_id)
        else:
            user = WebUser.get(user_id)
        if not user.is_member_of(domain):
            raise CommandError("%s can't access %s" % (user, domain))

        with open(config_file, 'r') as f:
            config = ImporterConfig.from_json(f.read())

        config.couch_user_id = user._id
        spreadsheet = ExcelFile(export_file, True)
        print json.dumps(do_import(spreadsheet, config, domain))
        print 'finished in %s seconds' % (datetime.now() - start).seconds
Exemplo n.º 3
0
def excel_commit(request, domain):
    config = ImporterConfig.from_request(request)

    excel_id = request.session.get(EXCEL_SESSION_ID)

    excel_ref = DownloadBase.get(excel_id)
    spreadsheet = importer_util.get_spreadsheet(excel_ref, config.named_columns)

    if not spreadsheet:
        return _spreadsheet_expired(request, domain)

    if spreadsheet.has_errors:
        messages.error(request, _('The session containing the file you '
                                  'uploaded has expired - please upload '
                                  'a new one.'))
        return HttpResponseRedirect(base.ImportCases.get_url(domain=domain) + "?error=cache")

    download = DownloadBase()
    download.set_task(bulk_import_async.delay(
        download.download_id,
        config,
        domain,
        excel_id,
    ))

    try:
        del request.session[EXCEL_SESSION_ID]
    except KeyError:
        pass

    return render(request, "importer/excel_commit.html", {
                                'download_id': download.download_id,
                                'template': 'importer/partials/import_status.html',
                                'domain': domain,
                                'report': {
                                    'name': 'Import: Completed'
                                 },
                                'slug': base.ImportCases.slug})