Ejemplo n.º 1
0
def process_ui_translation_upload(app, trans_file):

    workbook = WorkbookJSONReader(trans_file)
    translations = workbook.get_worksheet(title='translations')

    commcare_ui_strings = load_translations('en', 2).keys()
    default_trans = get_default_translations_for_download(app)
    lang_with_defaults = app.langs[get_index_for_defaults(app.langs)]

    trans_dict = defaultdict(dict)
    # Use this to hard fail and not update any translations
    error_properties = []
    # Use this to pass warnings without failing hard
    warnings = []
    for row in translations:
        if row["property"] not in commcare_ui_strings:
            # Add a warning for  unknown properties, but still add them to the translation dict
            warnings.append(row["property"] + " is not a known CommCare UI string, but we added it anyway")
        for lang in app.langs:
            if row.get(lang):
                all_parameters = re.findall("\$.*?}", row[lang])
                for param in all_parameters:
                    if not re.match("\$\{[0-9]+}", param):
                        error_properties.append(row["property"] + ' - ' + row[lang])
                if not (lang_with_defaults == lang and
                        row[lang] == default_trans.get(row["property"], "")):
                    trans_dict[lang].update({row["property"]: row[lang]})

    return trans_dict, error_properties, warnings
Ejemplo n.º 2
0
    def test_archive_forms_wrong_domain(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, BASIC_XLSX))

        response = archive_forms_old('wrong_domain', 'user1', self.username,
                                     list(uploaded_file.get_worksheet()))

        self.assertEqual(len(response['errors']), len(self.xforms),
                         "Error when wrong domain")
Ejemplo n.º 3
0
    def test_archive_forms_basic(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, BASIC_XLSX))

        response = archive_forms_old(self.domain_name, self.user, list(uploaded_file.get_worksheet()))

        # Need to re-get instance from DB to get updated attributes
        for key, _id in self.XFORMS.iteritems():
            self.assertEqual(XFormInstance.get(_id).doc_type, 'XFormArchived')

        self.assertEqual(len(response['success']), len(self.xforms))
Ejemplo n.º 4
0
class FixtureWorkbook(object):
    """
    Helper class for working with the fixture workbook
    """
    def __init__(self, file_or_filename):
        try:
            self.workbook = WorkbookJSONReader(file_or_filename)
        except AttributeError:
            raise FixtureUploadError(
                _("Error processing your Excel (.xlsx) file"))
        except InvalidFileException:
            raise FixtureUploadError(
                _("Invalid file-format. Please upload a valid xlsx file."))

    def get_types_sheet(self):
        try:
            return self.workbook.get_worksheet(title='types')
        except WorksheetNotFound as e:
            raise FixtureUploadError(
                _("Workbook does not contain a sheet called '%(title)s'") %
                {'title': e.title})

    def get_data_sheet(self, data_type):
        return self.workbook.get_worksheet(data_type.tag)

    def get_all_type_sheets(self):
        type_sheets = []
        seen_tags = set()
        for number_of_fixtures, dt in enumerate(self.get_types_sheet()):
            table_definition = FixtureTableDefinition.from_row(dt)
            if table_definition.table_id in seen_tags:
                raise DuplicateFixtureTagException(
                    _(FAILURE_MESSAGES['duplicate_tag']).format(
                        tag=table_definition.table_id))

            seen_tags.add(table_definition.table_id)
            type_sheets.append(table_definition)
        return type_sheets

    def validate(self):
        self.get_types_sheet()
        self.get_all_type_sheets()
Ejemplo n.º 5
0
    def test_archive_forms_missing(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, MISSING_XLSX))

        response = archive_forms_old(self.domain_name, self.user, list(uploaded_file.get_worksheet()))

        for key, _id in self.XFORMS.iteritems():
            self.assertEqual(XFormInstance.get(_id).doc_type, 'XFormArchived')

        self.assertEqual(len(response['success']), len(self.xforms))
        self.assertEqual(len(response['errors']), 1,
                         "One error for trying to archive a missing form")
Ejemplo n.º 6
0
class _FixtureWorkbook(object):
    """
    Helper class for working with the fixture workbook
    """
    def __init__(self, file_or_filename):
        try:
            self.workbook = WorkbookJSONReader(file_or_filename)
        except AttributeError:
            # todo: I don't know what would cause this error and it's a bad message
            raise FixtureUploadError(
                [_("Error processing your Excel (.xlsx) file")])
        except InvalidExcelFileException:
            raise FixtureUploadError([FAILURE_MESSAGES['not_excel_file']])
        except HeaderValueError as e:
            raise FixtureUploadError([unicode(e)])
        except JSONReaderError as e:
            raise FixtureUploadError([unicode(e)])

    def get_types_sheet(self):
        try:
            return self.workbook.get_worksheet(title='types')
        except WorksheetNotFound:
            raise FixtureUploadError([FAILURE_MESSAGES['no_types_sheet']])

    def get_data_sheet(self, data_type_tag):
        return self.workbook.get_worksheet(data_type_tag)

    def get_all_type_sheets(self):
        type_sheets = []
        seen_tags = set()
        for number_of_fixtures, dt in enumerate(self.get_types_sheet()):
            table_definition = _FixtureTableDefinition.from_row(dt)
            if table_definition.table_id in seen_tags:
                raise FixtureUploadError([
                    _(FAILURE_MESSAGES['duplicate_tag']).format(
                        tag=table_definition.table_id)
                ])

            seen_tags.add(table_definition.table_id)
            type_sheets.append(table_definition)
        return type_sheets
Ejemplo n.º 7
0
    def test_archive_forms_basic(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, BASIC_XLSX))

        with drop_connected_signals(xform_archived):
            response = archive_forms_old(DOMAIN_NAME, 'user1', self.username, list(uploaded_file.get_worksheet()))
            print response

        # Need to re-get instance from DB to get updated attributes
        for key, _id in self.XFORMS.iteritems():
            self.assertTrue(FormAccessors(DOMAIN_NAME).get_form(_id).is_archived)

        self.assertEqual(len(response['success']), len(self.xforms))
Ejemplo n.º 8
0
    def test_archive_forms_missing(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, MISSING_XLSX))

        with drop_connected_signals(xform_archived):
            response = archive_forms_old(DOMAIN_NAME, 'user1', self.username, list(uploaded_file.get_worksheet()))

        for key, _id in self.XFORMS.iteritems():
            self.assertTrue(FormAccessors(DOMAIN_NAME).get_form(_id).is_archived)

        self.assertEqual(len(response['success']), len(self.xforms))
        self.assertEqual(len(response['errors']), 1,
                         "One error for trying to archive a missing form")
Ejemplo n.º 9
0
class FixtureWorkbook(object):
    """
    Helper class for working with the fixture workbook
    """

    def __init__(self, file_or_filename):
        try:
            self.workbook = WorkbookJSONReader(file_or_filename)
        except AttributeError:
            raise FixtureUploadError(_("Error processing your Excel (.xlsx) file"))
        except InvalidFileException:
            raise FixtureUploadError(_("Invalid file-format. Please upload a valid xlsx file."))

    def get_types_sheet(self):
        try:
            return self.workbook.get_worksheet(title="types")
        except WorksheetNotFound as e:
            raise FixtureUploadError(_("Workbook does not contain a sheet called '%(title)s'") % {"title": e.title})

    def get_data_sheet(self, data_type):
        return self.workbook.get_worksheet(data_type.tag)

    def get_all_type_sheets(self):
        type_sheets = []
        seen_tags = set()
        for number_of_fixtures, dt in enumerate(self.get_types_sheet()):
            table_definition = FixtureTableDefinition.from_row(dt)
            if table_definition.table_id in seen_tags:
                raise DuplicateFixtureTagException(
                    _(FAILURE_MESSAGES["duplicate_tag"]).format(tag=table_definition.table_id)
                )

            seen_tags.add(table_definition.table_id)
            type_sheets.append(table_definition)
        return type_sheets

    def validate(self):
        self.get_types_sheet()
        self.get_all_type_sheets()
Ejemplo n.º 10
0
    def test_archive_forms_basic(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, BASIC_XLSX))

        with drop_connected_signals(xform_archived):
            response = archive_forms_old(DOMAIN_NAME, 'user1', self.username,
                                         list(uploaded_file.get_worksheet()))
            print response

        # Need to re-get instance from DB to get updated attributes
        for key, _id in self.XFORMS.iteritems():
            self.assertTrue(
                FormAccessors(DOMAIN_NAME).get_form(_id).is_archived)

        self.assertEqual(len(response['success']), len(self.xforms))
Ejemplo n.º 11
0
    def test_archive_forms_missing(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, MISSING_XLSX))

        with drop_connected_signals(xform_archived):
            response = archive_forms_old(DOMAIN_NAME, 'user1', self.username,
                                         list(uploaded_file.get_worksheet()))

        for key, _id in self.XFORMS.iteritems():
            self.assertTrue(
                FormAccessors(DOMAIN_NAME).get_form(_id).is_archived)

        self.assertEqual(len(response['success']), len(self.xforms))
        self.assertEqual(len(response['errors']), 1,
                         "One error for trying to archive a missing form")
Ejemplo n.º 12
0
def process_ui_translation_upload(app, trans_file):

    workbook = WorkbookJSONReader(trans_file)
    translations = workbook.get_worksheet(title='translations')

    default_trans = get_default_translations_for_download(app)
    lang_with_defaults = app.langs[get_index_for_defaults(app.langs)]
    trans_dict = defaultdict(dict)
    error_properties = []
    for row in translations:
        for lang in app.langs:
            if row.get(lang):
                all_parameters = re.findall("\$.*?}", row[lang])
                for param in all_parameters:
                    if not re.match("\$\{[0-9]+}", param):
                        error_properties.append(row["property"] + ' - ' + row[lang])
                if not (lang_with_defaults == lang
                        and row[lang] == default_trans.get(row["property"], "")):
                    trans_dict[lang].update({row["property"]: row[lang]})
    return trans_dict, error_properties
Ejemplo n.º 13
0
class UploadCommCareUsers(BaseManageCommCareUserView):
    template_name = 'users/upload_commcare_users.html'
    urlname = 'upload_commcare_users'
    page_title = ugettext_noop("Bulk Upload Mobile Workers")

    @method_decorator(
        requires_privilege_with_fallback(privileges.BULK_USER_MANAGEMENT))
    def dispatch(self, request, *args, **kwargs):
        return super(UploadCommCareUsers,
                     self).dispatch(request, *args, **kwargs)

    @property
    def page_context(self):
        request_params = self.request.GET if self.request.method == 'GET' else self.request.POST
        context = {
            'bulk_upload': {
                "help_site": {
                    "address": BULK_MOBILE_HELP_SITE,
                    "name": _("CommCare Help Site"),
                },
                "download_url":
                reverse("download_commcare_users", args=(self.domain, )),
                "adjective":
                _("mobile worker"),
                "plural_noun":
                _("mobile workers"),
            },
            'show_secret_settings': request_params.get("secret", False),
        }
        context.update({
            'bulk_upload_form': get_bulk_upload_form(context),
        })
        return context

    def post(self, request, *args, **kwargs):
        """View's dispatch method automatically calls this"""
        upload = request.FILES.get('bulk_upload_file')
        try:
            self.workbook = WorkbookJSONReader(upload)
        except (InvalidFileException, BadZipfile):
            try:
                csv.DictReader(
                    io.StringIO(upload.read().decode('ascii'), newline=None))
                return HttpResponseBadRequest(
                    "CommCare HQ no longer supports CSV upload. "
                    "Please convert to Excel 2007 or higher (.xlsx) "
                    "and try again.")
            except UnicodeDecodeError:
                return HttpResponseBadRequest("Unrecognized format")
        except JSONReaderError as e:
            messages.error(request,
                           'Your upload was unsuccessful. %s' % e.message)
            return self.get(request, *args, **kwargs)
        except HeaderValueError as e:
            return HttpResponseBadRequest(
                "Upload encountered a data type error: %s" % e.message)

        try:
            self.user_specs = self.workbook.get_worksheet(title='users')
        except WorksheetNotFound:
            try:
                self.user_specs = self.workbook.get_worksheet()
            except WorksheetNotFound:
                return HttpResponseBadRequest("Workbook has no worksheets")

        try:
            self.group_specs = self.workbook.get_worksheet(title='groups')
        except WorksheetNotFound:
            self.group_specs = []

        self.location_specs = []
        if Domain.get_by_name(self.domain).commtrack_enabled:
            try:
                self.location_specs = self.workbook.get_worksheet(
                    title='locations')
            except WorksheetNotFound:
                # if there is no sheet for locations (since this was added
                # later and is optional) we don't error
                pass

        try:
            check_headers(self.user_specs)
        except UserUploadError as e:
            messages.error(request, _(e.message))
            return HttpResponseRedirect(
                reverse(UploadCommCareUsers.urlname, args=[self.domain]))

        # convert to list here because iterator destroys the row once it has
        # been read the first time
        self.user_specs = list(self.user_specs)

        for user_spec in self.user_specs:
            try:
                user_spec['username'] = enforce_string_type(
                    user_spec['username'])
            except StringTypeRequiredError:
                messages.error(
                    request,
                    _("Error: Expected username to be a Text type for username {0}"
                      ).format(user_spec['username']))
                return HttpResponseRedirect(
                    reverse(UploadCommCareUsers.urlname, args=[self.domain]))

        try:
            check_existing_usernames(self.user_specs, self.domain)
        except UserUploadError as e:
            messages.error(request, _(e.message))
            return HttpResponseRedirect(
                reverse(UploadCommCareUsers.urlname, args=[self.domain]))

        try:
            check_duplicate_usernames(self.user_specs)
        except UserUploadError as e:
            messages.error(request, _(e.message))
            return HttpResponseRedirect(
                reverse(UploadCommCareUsers.urlname, args=[self.domain]))

        task_ref = expose_cached_download(payload=None,
                                          expiry=1 * 60 * 60,
                                          file_extension=None)
        task = bulk_upload_async.delay(self.domain, self.user_specs,
                                       list(self.group_specs),
                                       list(self.location_specs))
        task_ref.set_task(task)
        return HttpResponseRedirect(
            reverse(UserUploadStatusView.urlname,
                    args=[self.domain, task_ref.download_id]))
Ejemplo n.º 14
0
class UploadCommCareUsers(BaseManageCommCareUserView):
    template_name = 'users/upload_commcare_users.html'
    urlname = 'upload_commcare_users'
    page_title = ugettext_noop("Bulk Upload Mobile Workers")

    @method_decorator(requires_privilege_with_fallback(privileges.BULK_USER_MANAGEMENT))
    def dispatch(self, request, *args, **kwargs):
        return super(UploadCommCareUsers, self).dispatch(request, *args, **kwargs)

    @property
    def page_context(self):
        request_params = expect_GET(self.request)
        context = {
            'bulk_upload': {
                "help_site": {
                    "address": BULK_MOBILE_HELP_SITE,
                    "name": _("CommCare Help Site"),
                },
                "download_url": reverse(
                    "download_commcare_users", args=(self.domain,)),
                "adjective": _("mobile worker"),
                "plural_noun": _("mobile workers"),
            },
            'show_secret_settings': request_params.get("secret", False),
        }
        context.update({
            'bulk_upload_form': get_bulk_upload_form(context),
        })
        return context

    def post(self, request, *args, **kwargs):
        """View's dispatch method automatically calls this"""
        upload = request.FILES.get('bulk_upload_file')
        try:
            self.workbook = WorkbookJSONReader(upload)
        except (InvalidFileException, BadZipfile):
            try:
                csv.DictReader(io.StringIO(upload.read().decode('ascii'),
                                           newline=None))
                return HttpResponseBadRequest(
                    "CommCare HQ no longer supports CSV upload. "
                    "Please convert to Excel 2007 or higher (.xlsx) "
                    "and try again."
                )
            except UnicodeDecodeError:
                return HttpResponseBadRequest("Unrecognized format")
        except JSONReaderError as e:
            messages.error(request,
                           'Your upload was unsuccessful. %s' % e.message)
            return self.get(request, *args, **kwargs)
        except HeaderValueError as e:
            return HttpResponseBadRequest("Upload encountered a data type error: %s"
                                          % e.message)

        try:
            self.user_specs = self.workbook.get_worksheet(title='users')
        except WorksheetNotFound:
            try:
                self.user_specs = self.workbook.get_worksheet()
            except WorksheetNotFound:
                return HttpResponseBadRequest("Workbook has no worksheets")

        try:
            self.group_specs = self.workbook.get_worksheet(title='groups')
        except WorksheetNotFound:
            self.group_specs = []

        self.location_specs = []
        if Domain.get_by_name(self.domain).commtrack_enabled:
            try:
                self.location_specs = self.workbook.get_worksheet(title='locations')
            except WorksheetNotFound:
                # if there is no sheet for locations (since this was added
                # later and is optional) we don't error
                pass

        try:
            check_headers(self.user_specs)
        except UserUploadError as e:
            messages.error(request, _(e.message))
            return HttpResponseRedirect(reverse(UploadCommCareUsers.urlname, args=[self.domain]))

        task_ref = expose_cached_download(payload=None, expiry=1*60*60, file_extension=None)
        task = bulk_upload_async.delay(
            self.domain,
            list(self.user_specs),
            list(self.group_specs),
            list(self.location_specs)
        )
        task_ref.set_task(task)
        return HttpResponseRedirect(
            reverse(
                UserUploadStatusView.urlname,
                args=[self.domain, task_ref.download_id]
            )
        )
Ejemplo n.º 15
0
    def clean_message_bank_file(self):
        value = self.cleaned_data.get("message_bank_file")

        if not value:
            raise ValidationError(_("Please choose a file."))

        try:
            workbook = WorkbookJSONReader(value)
        except InvalidFileException:
            raise ValidationError(_("Invalid format. Please convert to Excel 2007 or higher (.xlsx) and try again."))

        try:
            worksheet = workbook.get_worksheet()
        except WorksheetNotFound:
            raise ValidationError(_("Workbook has no worksheets."))

        message_ids = {}
        messages = []
        row_num = 2
        for row in worksheet:
            if "ID" not in row:
                raise ValidationError(_("Column 'ID' not found."))
            if "Message" not in row:
                raise ValidationError(_("Column 'Message' not found."))

            msg_id = row.get("ID")
            text = row.get("Message")

            try:
                assert isinstance(msg_id, basestring)
                msg_id = msg_id.strip()
                assert len(msg_id) > 1
                assert msg_id[0].upper() in "ABCDEFGH"
            except Exception:
                raise ValidationError(_("Invalid ID at row %(row_num)s") % {"row_num" : row_num})

            if msg_id in message_ids:
                raise ValidationError(_("Duplicate ID at row %(row_num)s") % {"row_num" : row_num})

            try:
                assert isinstance(text, basestring)
                text = text.strip()
                assert len(text) > 0
            except Exception:
                raise ValidationError(_("Invalid Message at row %(row_num)s") % {"row_num" : row_num})

            try:
                msg_id.encode("ascii")
            except Exception:
                raise ValidationError(_("ID at row %(row_num)s contains invalid character(s)") % {"row_num" : row_num})

            try:
                text.encode("ascii")
            except Exception:
                raise ValidationError(_("Message at row %(row_num)s contains invalid character(s)") % {"row_num" : row_num})

            if len(text) > 160:
                raise ValidationError(_("Message at row %(row_num)s is longer than 160 characters.") % {"row_num" : row_num})

            messages.append({
                "msg_id" : msg_id,
                "text" : text,
            })
            message_ids[msg_id] = True
            row_num += 1

        return messages
Ejemplo n.º 16
0
    def test_archive_forms_wrong_domain(self):
        uploaded_file = WorkbookJSONReader(join(BASE_PATH, BASIC_XLSX))

        response = archive_forms_old('wrong_domain', 'user1', self.username, list(uploaded_file.get_worksheet()))

        self.assertEqual(len(response['errors']), len(self.xforms), "Error when wrong domain")
Ejemplo n.º 17
0
    def test_excel_formula_reading(self):
        formula_filepath = self.get_path('formula_sheet', 'xlsx')
        workbook = WorkbookJSONReader(formula_filepath)
        results = list(workbook.get_worksheet('Sheet1'))

        self.assertEqual(results[0]['formula'], 2)  # Instead of =SUM(1,1)
Ejemplo n.º 18
0
    def clean_message_bank_file(self):
        value = self.cleaned_data.get("message_bank_file")

        if not value:
            raise ValidationError(_("Please choose a file."))

        try:
            workbook = WorkbookJSONReader(value)
        except InvalidExcelFileException:
            raise ValidationError(_("Invalid format. Please convert to Excel 2007 or higher (.xlsx) and try again."))

        try:
            worksheet = workbook.get_worksheet()
        except WorksheetNotFound:
            raise ValidationError(_("Workbook has no worksheets."))

        message_ids = {}
        messages = []
        row_num = 2
        for row in worksheet:
            if "ID" not in row:
                raise ValidationError(_("Column 'ID' not found."))
            if "Message" not in row:
                raise ValidationError(_("Column 'Message' not found."))

            msg_id = row.get("ID")
            text = row.get("Message")

            try:
                assert isinstance(msg_id, basestring)
                msg_id = msg_id.strip()
                assert len(msg_id) > 1
                assert msg_id[0].upper() in "ABCDEFGH"
            except Exception:
                raise ValidationError(_("Invalid ID at row %(row_num)s") % {"row_num" : row_num})

            if msg_id in message_ids:
                raise ValidationError(_("Duplicate ID at row %(row_num)s") % {"row_num" : row_num})

            try:
                assert isinstance(text, basestring)
                text = text.strip()
                assert len(text) > 0
            except Exception:
                raise ValidationError(_("Invalid Message at row %(row_num)s") % {"row_num" : row_num})

            try:
                msg_id.encode("ascii")
            except Exception:
                raise ValidationError(_("ID at row %(row_num)s contains invalid character(s)") % {"row_num" : row_num})

            try:
                text.encode("ascii")
            except Exception:
                raise ValidationError(_("Message at row %(row_num)s contains invalid character(s)") % {"row_num" : row_num})

            if len(text) > 160:
                raise ValidationError(_("Message at row %(row_num)s is longer than 160 characters.") % {"row_num" : row_num})

            messages.append({
                "msg_id" : msg_id,
                "text" : text,
            })
            message_ids[msg_id] = True
            row_num += 1

        return messages