Esempio n. 1
0
 def test_validate_call_columns(self):
     with get_spreadsheet(
             self.get_path('missing_call_columns_case_upload',
                           'xlsx')) as spreadsheet:
         errors = CallColumnsValidator.run(spreadsheet)
     self.assertEqual(errors[0],
                      'Need at least one Call column for Calls 1-6')
Esempio n. 2
0
 def test_unexpected_sheet(self):
     with get_spreadsheet(
             self.get_path('unexpected_sheet_case_upload',
                           'xlsx')) as spreadsheet:
         errors = RequiredColumnsValidator.run(spreadsheet)
     self.assertEqual(errors, [
         'Unexpected sheet uploaded. Either Rch_id or admission_id should be present'
     ])
Esempio n. 3
0
    def post(self, request, *args, **kwargs):
        handle = request.FILES['bulk_upload_file']
        extension = os.path.splitext(handle.name)[1][1:].strip().lower()
        if extension not in valid_extensions:
            messages.error(request, _("Please choose a file with one of the following extensions: "
                                      "{}").format(", ".join(valid_extensions)))
            return self.get(request, *args, **kwargs)

        meta = transient_file_store.write_file(handle, handle.name, self.domain)
        file_id = meta.identifier

        f = transient_file_store.get_tempfile_ref_for_contents(file_id)
        try:
            open_spreadsheet_download_ref(f)
        except SpreadsheetFileExtError:
            messages.error(request, _("File does not appear to be an Excel file. Please choose another file."))
            return self.get(request, *args, **kwargs)

        from corehq.apps.app_manager.views.media_utils import interpolate_media_path
        from corehq.apps.hqmedia.view_helpers import validate_multimedia_paths_rows, update_multimedia_paths

        # Get rows, filtering out header, no-ops, and any extra "Usages" columns
        rows = []
        with get_spreadsheet(f) as spreadsheet:
            for row in list(spreadsheet.iter_rows())[1:]:
                if row[1]:
                    rows.append(row[:2])

        (errors, warnings) = validate_multimedia_paths_rows(self.app, rows)
        if len(errors):
            for msg in errors:
                messages.error(request, msg, extra_tags='html')
            return self.get(request, *args, **kwargs)

        paths = {
            row[0]: interpolate_media_path(row[1]) for row in rows if row[1]
        }
        successes = update_multimedia_paths(self.app, paths)
        self.app.save()

        # Force all_media to reset
        self.app.all_media.reset_cache(self.app)
        self.app.all_media_paths.reset_cache(self.app)

        # Warn if any old paths remain in app (because they're used in a place this function doesn't know about)
        warnings = []
        self.app.remove_unused_mappings()
        app_paths = {m.path: True for m in self.app.all_media()}
        for old_path, new_path in paths.items():
            if old_path in app_paths:
                warnings.append(_("Could not completely update path <code>{}</code>, "
                                  "please check app for remaining references.").format(old_path))

        for msg in successes:
            messages.success(request, msg, extra_tags='html')
        for msg in warnings:
            messages.warning(request, msg, extra_tags='html')
        return self.get(request, *args, **kwargs)
Esempio n. 4
0
    def handle(self, export_file, config_file, domain, **options):
        start = datetime.utcnow()

        with open(config_file, 'r', encoding='utf-8') as f:
            config = ImporterConfig.from_json(f.read())

        with get_spreadsheet(export_file) as spreadsheet:
            print(
                json.dumps(do_import(spreadsheet, config, domain),
                           default=json_handler))
            print('finished in %s seconds' %
                  (datetime.utcnow() - start).seconds)
Esempio n. 5
0
    def test_validate_required_columns(self):
        with get_spreadsheet(
                self.get_path('missing_columns_rch_case_upload',
                              'xlsx')) as spreadsheet:
            errors = RequiredColumnsValidator.run(spreadsheet)
        # extract missing columns from message
        missing_columns = set(
            errors[0].removeprefix('Missing columns ').split(', '))
        expected_missing_columns = set(REQUIRED_COLUMNS)
        self.assertEqual(missing_columns, expected_missing_columns)

        with get_spreadsheet(
                self.get_path('missing_columns_sncu_case_upload',
                              'xlsx')) as spreadsheet:
            errors = RequiredColumnsValidator.run(spreadsheet)
        # extract missing columns from message
        missing_columns = set(
            errors[0].removeprefix('Missing columns ').split(', '))
        expected_missing_columns = set(REQUIRED_COLUMNS)
        expected_missing_columns.add(NEWBORN_WEIGHT_COLUMN)
        self.assertEqual(missing_columns, expected_missing_columns)
Esempio n. 6
0
    def handle(self, export_file, config_file, domain, user_id, **options):
        start = datetime.utcnow()

        if '@' in user_id:
            user = WebUser.get_by_username(user_id)
        else:
            user = WebUser.get(user_id)
        if not user.is_member_of(domain):
            raise CommandError("%s can't access %s" % (user, domain))

        with open(config_file, 'r', encoding='utf-8') as f:
            config = ImporterConfig.from_json(f.read())

        config.couch_user_id = user._id
        with get_spreadsheet(export_file) as spreadsheet:
            print(json.dumps(do_import(spreadsheet, config, domain),
                             default=json_handler))
            print('finished in %s seconds' % (datetime.utcnow() - start).seconds)
Esempio n. 7
0
 def _assert_missing_values_for_sheet(self, spreadsheet_name,
                                      required_values):
     with get_spreadsheet(self.get_path(spreadsheet_name,
                                        'xlsx')) as spreadsheet:
         for row_num, raw_row in enumerate(spreadsheet.iter_row_dicts()):
             if row_num == 0:
                 continue  # skip first row (header row)
             fields_to_update = raw_row.copy()
             if RCH_BENEFICIARY_IDENTIFIER in fields_to_update:
                 fields_to_update['external_id'] = fields_to_update.pop(
                     RCH_BENEFICIARY_IDENTIFIER)
             elif SNCU_BENEFICIARY_IDENTIFIER in fields_to_update:
                 fields_to_update['external_id'] = fields_to_update.pop(
                     SNCU_BENEFICIARY_IDENTIFIER)
             fields_to_update, errors = RequiredValueValidator.run(
                 row_num, raw_row, fields_to_update, {})
             self.assertEqual([error.title for error in errors],
                              ['Missing required column(s)'])
             self.assertEqual(
                 [error.message for error in errors],
                 [f"Required columns are {', '.join(required_values)}"])
Esempio n. 8
0
    def handle(self, *args, **options):
        if len(args) != 4:
            raise CommandError('Usage is import_cases %s' % self.args)

        start = datetime.utcnow()
        export_file, config_file, domain, user_id = args
        if '@' in user_id:
            user = WebUser.get_by_username(user_id)
        else:
            user = WebUser.get(user_id)
        if not user.is_member_of(domain):
            raise CommandError("%s can't access %s" % (user, domain))

        with open(config_file, 'r') as f:
            config = ImporterConfig.from_json(f.read())

        config.couch_user_id = user._id
        with get_spreadsheet(export_file) as spreadsheet:
            print json.dumps(do_import(spreadsheet, config, domain),
                             default=json_handler)
            print 'finished in %s seconds' % (datetime.utcnow() - start).seconds
Esempio n. 9
0
    def handle(self, *args, **options):
        if len(args) != 4:
            raise CommandError('Usage is import_cases %s' % self.args)

        start = datetime.utcnow()
        export_file, config_file, domain, user_id = args
        if '@' in user_id:
            user = WebUser.get_by_username(user_id)
        else:
            user = WebUser.get(user_id)
        if not user.is_member_of(domain):
            raise CommandError("%s can't access %s" % (user, domain))

        with open(config_file, 'r') as f:
            config = ImporterConfig.from_json(f.read())

        config.couch_user_id = user._id
        with get_spreadsheet(export_file) as spreadsheet:
            print json.dumps(do_import(spreadsheet, config, domain),
                             default=json_handler)
            print 'finished in %s seconds' % (datetime.utcnow() -
                                              start).seconds
Esempio n. 10
0
def update_multimedia_paths(request, domain, app_id):
    if not request.FILES:
        return json_response(
            {'error': _("Please choose an Excel file to import.")})

    handle = request.FILES['file']

    extension = os.path.splitext(handle.name)[1][1:].strip().lower()
    if extension not in ALLOWED_EXTENSIONS:
        return json_response({
            'error':
            _("Please choose a file with one of the following extensions: "
              "{}").format(", ".join(ALLOWED_EXTENSIONS))
        })

    meta = transient_file_store.write_file(handle, handle.name, domain)
    file_id = meta.identifier

    f = transient_file_store.get_tempfile_ref_for_contents(file_id)
    try:
        open_spreadsheet_download_ref(f)
    except SpreadsheetFileExtError:
        return json_response({
            'error':
            _("File does not appear to be an Excel file. Please choose another file."
              )
        })

    app = get_app(domain, app_id)
    from corehq.apps.app_manager.views.media_utils import interpolate_media_path
    from corehq.apps.hqmedia.view_helpers import validate_multimedia_paths_rows, update_multimedia_paths

    # Get rows, filtering out header, no-ops, and any extra "Usages" columns
    rows = []
    with get_spreadsheet(f) as spreadsheet:
        for row in list(spreadsheet.iter_rows())[1:]:
            if row[1]:
                rows.append(row[:2])

    (errors, warnings) = validate_multimedia_paths_rows(app, rows)
    if len(errors):
        return json_response({
            'complete': 1,
            'errors': errors,
        })

    paths = {row[0]: interpolate_media_path(row[1]) for row in rows if row[1]}
    successes = update_multimedia_paths(app, paths)
    app.save()

    # Force all_media to reset
    app.all_media.reset_cache(app)
    app.all_media_paths.reset_cache(app)

    # Warn if any old paths remain in app (because they're used in a place this function doesn't know about)
    warnings = []
    app.remove_unused_mappings()
    app_paths = {m.path: True for m in app.all_media()}
    for old_path, new_path in paths.items():
        if old_path in app_paths:
            warnings.append(
                _("Could not completely update path <code>{}</code>, "
                  "please check app for remaining references.").format(
                      old_path))

    return json_response({
        'complete': 1,
        'successes': successes,
        'warnings': warnings,
    })
Esempio n. 11
0
 def get_spreadsheet(self):
     return get_spreadsheet(self.get_tempfile())
Esempio n. 12
0
def update_multimedia_paths(request, domain, app_id):
    if not request.FILES:
        return json_response({
            'error': _("Please choose an Excel file to import.")
        })

    handle = request.FILES['file']

    extension = os.path.splitext(handle.name)[1][1:].strip().lower()
    if extension not in ALLOWED_EXTENSIONS:
        return json_response({
            'error': _("Please choose a file with one of the following extensions: "
                       "{}").format(", ".join(ALLOWED_EXTENSIONS))
        })

    meta = transient_file_store.write_file(handle, handle.name, domain)
    file_id = meta.identifier

    f = transient_file_store.get_tempfile_ref_for_contents(file_id)
    try:
        open_spreadsheet_download_ref(f)
    except SpreadsheetFileExtError:
        return json_response({
            'error': _("File does not appear to be an Excel file. Please choose another file.")
        })

    app = get_app(domain, app_id)
    from corehq.apps.app_manager.views.media_utils import interpolate_media_path
    from corehq.apps.hqmedia.view_helpers import validate_multimedia_paths_rows, update_multimedia_paths

    # Get rows, filtering out header, no-ops, and any extra "Usages" columns
    rows = []
    with get_spreadsheet(f) as spreadsheet:
        for row in list(spreadsheet.iter_rows())[1:]:
            if row[1]:
                rows.append(row[:2])

    (errors, warnings) = validate_multimedia_paths_rows(app, rows)
    if len(errors):
        return json_response({
            'complete': 1,
            'errors': errors,
        })

    paths = {
        row[0]: interpolate_media_path(row[1]) for row in rows if row[1]
    }
    successes = update_multimedia_paths(app, paths)
    app.save()

    # Force all_media to reset
    app.all_media.reset_cache(app)
    app.all_media_paths.reset_cache(app)

    # Warn if any old paths remain in app (because they're used in a place this function doesn't know about)
    warnings = []
    app.remove_unused_mappings()
    app_paths = {m.path: True for m in app.all_media()}
    for old_path, new_path in six.iteritems(paths):
        if old_path in app_paths:
            warnings.append(_("Could not completely update path <code>{}</code>, "
                              "please check app for remaining references.").format(old_path))

    return json_response({
        'complete': 1,
        'successes': successes,
        'warnings': warnings,
    })
Esempio n. 13
0
 def get_spreadsheet(self):
     return get_spreadsheet(self.get_tempfile())