Пример #1
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    type_sheets = workbook.get_all_type_sheets()
    total_tables = len(type_sheets)
    return_val.number_of_fixtures = total_tables

    def _update_progress(table_count, item_count, items_in_table):
        if task:
            processed = table_count * 10 + (10 * item_count / items_in_table)
            DownloadBase.set_progress(task, processed, 10 * total_tables)

    with CouchTransaction() as transaction:
        for table_number, table_def in enumerate(type_sheets):
            data_type, delete, err = _create_data_type(domain, table_def,
                                                       replace, transaction)
            return_val.errors.extend(err)
            if delete:
                continue
            transaction.save(data_type)
            data_types.append(data_type)

            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {
                    field.field_name: _process_item_field(field, di)
                    for field in type_fields
                }

                item_attributes = di.get('property', {})
                old_data_item, delete, err = _process_data_item(
                    domain, replace, data_type, di, item_fields,
                    item_attributes, sort_key)
                return_val.errors.extend(err)
                if delete:
                    old_data_item.recursive_delete(transaction)
                    continue
                transaction.save(old_data_item)

                err = _process_group_ownership(di, old_data_item,
                                               group_memoizer, transaction)
                return_val.errors.extend(err)

                err = _process_user_ownership(di, old_data_item, transaction)
                return_val.errors.extend(err)

                err = _process_location_ownership(di, old_data_item,
                                                  get_location, transaction)
                return_val.errors.extend(err)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Пример #2
0
    def _run_upload(self, request, workbook):
        group_memoizer = GroupMemoizer(self.domain)

        data_types = workbook.get_worksheet(title='types')

        def _get_or_raise(container, attr, message):
            try:
                return container[attr]
            except KeyError:
                raise Exception(message.format(attr=attr))
        with CouchTransaction() as transaction:
            for dt in data_types:
                err_msg = "Workbook 'types' has no column '{attr}'"
                data_type = FixtureDataType(
                    domain=self.domain,
                    name=_get_or_raise(dt, 'name', err_msg),
                    tag=_get_or_raise(dt, 'tag', err_msg),
                    fields=_get_or_raise(dt, 'field', err_msg),
                )
                transaction.save(data_type)
                data_items = workbook.get_worksheet(data_type.tag)
                for sort_key, di in enumerate(data_items):
                    data_item = FixtureDataItem(
                        domain=self.domain,
                        data_type_id=data_type.get_id,
                        fields=di['field'],
                        sort_key=sort_key
                    )
                    transaction.save(data_item)
                    for group_name in di.get('group', []):
                        group = group_memoizer.by_name(group_name)
                        if group:
                            data_item.add_group(group, transaction=transaction)
                        else:
                            messages.error(request, "Unknown group: %s" % group_name)
                    for raw_username in di.get('user', []):
                        username = normalize_username(raw_username, self.domain)
                        user = CommCareUser.get_by_username(username)
                        if user:
                            data_item.add_user(user)
                        else:
                            messages.error(request, "Unknown user: %s" % raw_username)
            for data_type in transaction.preview_save(cls=FixtureDataType):
                for duplicate in FixtureDataType.by_domain_tag(domain=self.domain, tag=data_type.tag):
                    duplicate.recursive_delete(transaction)
Пример #3
0
    def _run_upload(self, request, workbook):
        group_memoizer = GroupMemoizer(self.domain)

        data_types = workbook.get_worksheet(title='types')

        with CouchTransaction() as transaction:
            for dt in data_types:
                data_type = FixtureDataType(
                    domain=self.domain,
                    name=dt['name'],
                    tag=dt['tag'],
                    fields=dt['field'],
                )
                transaction.save(data_type)
                data_items = workbook.get_worksheet(data_type.tag)
                for di in data_items:
                    data_item = FixtureDataItem(
                        domain=self.domain,
                        data_type_id=data_type.get_id,
                        fields=di['field']
                    )
                    transaction.save(data_item)
                    for group_name in di.get('group', []):
                        group = group_memoizer.by_name(group_name)
                        if group:
                            data_item.add_group(group, transaction=transaction)
                        else:
                            messages.error(request, "Unknown group: %s" % group_name)
                    for raw_username in di.get('user', []):
                        username = normalize_username(raw_username, self.domain)
                        user = CommCareUser.get_by_username(username)
                        if user:
                            data_item.add_user(user)
                        else:
                            messages.error(request, "Unknown user: %s" % raw_username)
            for data_type in transaction.preview_save(cls=FixtureDataType):
                for duplicate in FixtureDataType.by_domain_tag(domain=self.domain, tag=data_type.tag):
                    duplicate.recursive_delete(transaction)
Пример #4
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10 * item_count / items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=tag,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created.")
                        % {'UID': table_def.uid}
                    )
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError):
                data_type = new_data_type
            transaction.save(data_type)
            data_types.append(data_type)
            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(
                            field_list=[FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=six.text_type(di['field'][field.field_name]),
                                properties={}
                            )]
                        )
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=six.text_type(field_prop_combos[x]),
                                properties={prop: six.text_type(prop_dict[prop][x]) for prop in prop_dict}
                            )
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list
                        )

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key
                )
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain \
                            or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created.")
                            % {'UID': di['UID']}
                        )
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added")
                            % {'name': group_name}
                        )

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username), domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username}
                        )
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added")
                            % {'name': raw_username}
                        )

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Пример #5
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10 * item_count /
                                                items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=tag,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes)
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created."
                          ) % {'UID': table_def.uid})
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError):
                data_type = new_data_type
            transaction.save(data_type)
            data_types.append(data_type)
            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(field_list=[
                            FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=six.text_type(di['field'][
                                    field.field_name]),
                                properties={})
                        ])
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=six.text_type(
                                    field_prop_combos[x]),
                                properties={
                                    prop: six.text_type(prop_dict[prop][x])
                                    for prop in prop_dict
                                })
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list)

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key)
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain \
                            or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created."
                              ) % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added"
                              ) % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username),
                                                      domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added"
                              ) % {'name': raw_username})

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Пример #6
0
def run_upload(request, domain, workbook):
    return_val = {
        "unknown_groups": [], 
        "unknown_users": [], 
        "number_of_fixtures": 0,
    }
    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise Exception("Workbook 'types' has no column '{attr}'".format(attr=attr))
   
    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        for number_of_fixtures, dt in enumerate(data_types):
            tag = _get_or_raise(dt, 'tag')
            type_definition_fields = _get_or_raise(dt, 'field')

            new_data_type = FixtureDataType(
                    domain=domain,
                    is_global=dt.get('is_global', False),
                    name=_get_or_raise(dt, 'name'),
                    tag=_get_or_raise(dt, 'tag'),
                    fields=type_definition_fields,
            )
            try:
                if dt['UID']:
                    data_type = FixtureDataType.get(dt['UID'])
                else:
                    data_type = new_data_type
                    pass
                data_type.fields = type_definition_fields
                data_type.is_global = dt.get('is_global', False)
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    messages.error(request, _("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': dt['UID']})
                if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y":
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields = di['field']
                for field in type_definition_fields:
                    if not item_fields.has_key(field):
                        raise Exception(_("Workbook '%(tag)s' does not contain the column " +
                                          "'%(field)s' specified in its 'types' definition") % {'tag': tag, 'field': field})
                item_fields_list = di['field'].keys()
                for field in item_fields_list:
                    if not field in type_definition_fields:
                        raise Exception(_("""Workbook '%(tag)s' has an extra column 
                                          '%(field)s' that's not defined in its 'types' definition""") % {'tag': tag, 'field': field})                
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    sort_key=sort_key
                )
                try:
                    if di['UID']:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = di['field']   
                    if old_data_item.domain != domain:
                        old_data_item = new_data_item
                        messages.error(request, _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID'] })
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    assert old_data_item.data_type_id == data_type.get_id
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue               
                except (ResourceNotFound, KeyError) as e:
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.get_groups()
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.get_users()
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                        group = group_memoizer.by_name(group_name)
                        if group:
                            old_data_item.add_group(group, transaction=transaction)
                        else:
                            messages.error(request, _("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name})

                for raw_username in di.get('user', []):
                        try:
                            username = normalize_username(raw_username, domain)
                        except ValidationError:
                            messages.error(request, _("Invalid username: '******'. Row is not added") % {'name': raw_username})
                            continue
                        user = CommCareUser.get_by_username(username)
                        if user:
                            old_data_item.add_user(user)
                        else:
                            messages.error(request, _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username})

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val
Пример #7
0
def run_upload(domain, workbook, replace=False, task=None):
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location(domain)

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10. * item_count /
                                                items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=table_def.table_id,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes)
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created."
                          ) % {'UID': table_def.uid})
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = list(workbook.get_data_sheet(data_type))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys() if 'field' in di else []
                not_in_sheet, not_in_types = diff_lists(
                    item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_no_field_column"]).format(
                            tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_extra_column"]).format(
                            tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that this item has all the properties listed in its 'types' definition
                item_attributes_list = di['property'].keys(
                ) if 'property' in di else []
                not_in_sheet, not_in_types = diff_lists(
                    item_attributes_list, data_type.item_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_no_field_column"]).format(
                            tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_extra_column"]).format(
                            tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(
                            sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = _(
                                FAILURE_MESSAGES["sheet_has_no_property"]
                            ).format(tag=tag,
                                     property=not_in_sheet[0],
                                     field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        if len(not_in_types) > 0:
                            error_message = _(
                                FAILURE_MESSAGES["sheet_has_extra_property"]
                            ).format(tag=tag,
                                     property=not_in_types[0],
                                     field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = _(
                                FAILURE_MESSAGES["invalid_field_with_property"]
                            ).format(field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = _(
                                    FAILURE_MESSAGES["invalid_property"]
                                ).format(field=field.field_name, prop=prop)
                                raise ExcelMalformatException(error_message)
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = _(FAILURE_MESSAGES[
                                    "wrong_field_property_combos"]).format(
                                        field=field.field_name, prop=prop)
                                raise ExcelMalformatException(error_message)

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(field_list=[
                            FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][
                                    field.field_name]),
                                properties={})
                        ])
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={
                                    prop: unicode(prop_dict[prop][x])
                                    for prop in prop_dict
                                })
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list)

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key)
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created."
                              ) % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added"
                              ) % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username),
                                                      domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added"
                              ) % {'name': raw_username})

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    return return_val
Пример #8
0
def run_upload(domain, workbook, replace=False, task=None):
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)

    pre_populate_location_groups(group_memoizer, domain)

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10. * item_count / items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=table_def.table_id,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(_("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': table_def.uid})
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = list(workbook.get_data_sheet(data_type))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys() if 'field' in di else []
                not_in_sheet, not_in_types = diff_lists(item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(FAILURE_MESSAGES["has_no_field_column"]).format(tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(FAILURE_MESSAGES["has_extra_column"]).format(tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that this item has all the properties listed in its 'types' definition
                item_attributes_list = di['property'].keys() if 'property' in di else []
                not_in_sheet, not_in_types = diff_lists(item_attributes_list, data_type.item_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(FAILURE_MESSAGES["has_no_field_column"]).format(tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(FAILURE_MESSAGES["has_extra_column"]).format(tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = _(FAILURE_MESSAGES["sheet_has_no_property"]).format(
                                tag=tag,
                                property=not_in_sheet[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(error_message)
                        if len(not_in_types) > 0:
                            error_message = _(FAILURE_MESSAGES["sheet_has_extra_property"]).format(
                                tag=tag,
                                property=not_in_types[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(error_message)
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = _(FAILURE_MESSAGES["invalid_field_with_property"]).format(field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = _(FAILURE_MESSAGES["invalid_property"]).format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(error_message)
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = _(FAILURE_MESSAGES["wrong_field_property_combos"]).format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(error_message)

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(
                            field_list=[FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][field.field_name]),
                                properties={}
                            )]
                        )
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={prop: unicode(prop_dict[prop][x]) for prop in prop_dict}
                            )
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list
                        )

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key
                )
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(_("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(_("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username), domain)
                    except ValidationError:
                        return_val.errors.append(_("Invalid username: '******'. Row is not added") % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(_("Unknown user: '******'. But the row is successfully added") % {'name': raw_username})

    return return_val
Пример #9
0
def run_upload_api(request, domain, workbook):
    return_val = {
        "unknown_groups": [],
        "unknown_users": [],
        "number_of_fixtures": 0,
    }
    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise Exception("Workbook 'types' has no column '{attr}'".format(attr=attr))
   
    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        for number_of_fixtures, dt in enumerate(data_types):
            tag = _get_or_raise(dt, 'tag')
            data_type_results = FixtureDataType.by_domain_tag(domain, tag)
            if len(data_type_results) == 0:
                data_type = FixtureDataType(
                    domain=domain,
                    name=_get_or_raise(dt, 'name'),
                    tag=_get_or_raise(dt, 'tag'),
                    fields=_get_or_raise(dt, 'field'),
                )
                transaction.save(data_type)
            else:
                for x in data_type_results:
                    data_type = x

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=di['field'],
                    sort_key=sort_key
                )
                try:
                    old_data_item = FixtureDataItem.get(di['UID'])
                    assert old_data_item.domain == domain
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    assert old_data_item.data_type_id == data_type.get_id
                    if di.get(DELETE_HEADER) in ("Y", "y"):
                        old_data_item.recursive_delete(transaction)
                        continue
                    old_data_item.fields = di['field']
                    transaction.save(old_data_item)
                except (AttributeError, KeyError, ResourceNotFound,
                        AssertionError):
                    old_data_item = new_data_item
                    transaction.save(old_data_item)

                old_groups = old_data_item.get_groups()
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.get_users()
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                        group = group_memoizer.by_name(group_name)
                        if group:
                            old_data_item.add_group(group, transaction=transaction)
                        else:
                            messages.error(request, "Unknown group: %s" % group_name)

                for raw_username in di.get('user', []):
                        username = normalize_username(raw_username, domain)
                        user = CommCareUser.get_by_username(username)
                        if user:
                            old_data_item.add_user(user)
                        else:
                            messages.error(request, "Unknown user: %s" % raw_username)

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val
Пример #10
0
def run_upload(request, domain, workbook, replace=False):
    return_val = {
        "unknown_groups": [], 
        "unknown_users": [], 
        "number_of_fixtures": 0,
    }
    failure_messages = {
        "has_no_column": "Workbook 'types' has no column '{column_name}'.",
        "has_no_field_column": "Excel-sheet '{tag}' does not contain the column '{field}' "
                               "as specified in its 'types' definition",
        "has_extra_column": "Excel-sheet '{tag}' has an extra column" + 
                            "'{field}' that's not defined in its 'types' definition",
        "wrong_property_syntax": "Properties should be specified as 'field 1: property 1'. In 'types' sheet, " +
                            "'{prop_key}' for field '{field}' is not correctly formatted",
        "sheet_has_no_property": "Excel-sheet '{tag}' does not contain property " +
                            "'{property}' of the field '{field}' as specified in its 'types' definition",
        "sheet_has_extra_property": "Excel-sheet '{tag}'' has an extra property " +
                            "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting", 
        "invalid_field_with_property": "Fields with attributes should be numbered as 'field: {field} integer",
        "invalid_property": "Attribute should be written as '{field}: {prop} interger'",
        "wrong_field_property_combos": "Number of values for field '{field}' and attribute '{prop}' should be same",
        "replace_with_UID": "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row."
    }

    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise ExcelMalformatException(_(failure_messages["has_no_column"].format(column_name=attr)))

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)
   
    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        fixtures_tags = []
        type_sheets = []
        for number_of_fixtures, dt in enumerate(data_types):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                tag = _get_or_raise(dt, 'tag')
            if tag in fixtures_tags:
                error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet."
                raise DuplicateFixtureTagException(_(error_message.format(tag=tag)))
            fixtures_tags.append(tag)
            type_sheets.append(dt)
        for number_of_fixtures, dt in enumerate(type_sheets):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                messages.info(request, _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed."))
                tag = _get_or_raise(dt, 'tag')

            type_definition_fields = _get_or_raise(dt, 'field')
            type_fields_with_properties = []
            for count, field in enumerate(type_definition_fields):
                prop_key = "field " + str(count + 1)
                if dt.has_key(prop_key):
                    try:
                        property_list = dt[prop_key]["property"]
                    except KeyError:
                        error_message = failure_messages["wrong_property_syntax"].format(
                            prop_key=prop_key,
                            field=field
                        )
                        raise ExcelMalformatException(_(error_message))
                else:
                    property_list = []
                field_with_prop = FixtureTypeField(
                    field_name=field,
                    properties=property_list
                )
                type_fields_with_properties.append(field_with_prop)

            new_data_type = FixtureDataType(
                domain=domain,
                is_global=dt.get('is_global', False),
                tag=tag,
                fields=type_fields_with_properties,
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif 'UID' in dt and dt['UID']:
                    data_type = FixtureDataType.get(dt['UID'])
                else:
                    data_type = new_data_type
                    pass
                if replace:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type
                data_type.fields = type_fields_with_properties
                data_type.is_global = dt.get('is_global', False)
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    messages.error(request, _("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': dt['UID']})
                if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y":
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys()
                not_in_sheet, not_in_types = diff_lists(item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = failure_messages["has_no_field_column"].format(tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(_(error_message))
                if len(not_in_types) > 0:
                    error_message = failure_messages["has_extra_column"].format(tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(_(error_message))

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = failure_messages["sheet_has_no_property"].format(
                                tag=tag,
                                property=not_in_sheet[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(_(error_message))
                        if len(not_in_types) > 0:
                            error_message = failure_messages["sheet_has_extra_property"].format(
                                tag=tag,
                                property=not_in_types[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(_(error_message))
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = failure_messages["invalid_field_with_property"].format(field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = failure_messages["invalid_property"].format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(_(error_message))
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = failure_messages["wrong_field_property_combos"].format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(_(error_message))

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(
                            field_list=[FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][field.field_name]),
                                properties={}
                            )]
                        )
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={prop: unicode(prop_dict[prop][x]) for prop in prop_dict}
                            )
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list
                        )

                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    sort_key=sort_key
                )
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields   
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        messages.error(request, _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue               
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        messages.error(request, _("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(raw_username, domain)
                    except ValidationError:
                        messages.error(request, _("Invalid username: '******'. Row is not added") % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        messages.error(request, _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username})

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val
Пример #11
0
def run_upload(request, domain, workbook, replace=False):
    return_val = {
        "unknown_groups": [],
        "unknown_users": [],
        "number_of_fixtures": 0,
    }
    failure_messages = {
        "has_no_column":
        "Workbook 'types' has no column '{column_name}'.",
        "has_no_field_column":
        "Excel-sheet '{tag}' does not contain the column '{field}' "
        "as specified in its 'types' definition",
        "has_extra_column":
        "Excel-sheet '{tag}' has an extra column" +
        "'{field}' that's not defined in its 'types' definition",
        "wrong_property_syntax":
        "Properties should be specified as 'field 1: property 1'. In 'types' sheet, "
        + "'{prop_key}' for field '{field}' is not correctly formatted",
        "sheet_has_no_property":
        "Excel-sheet '{tag}' does not contain property " +
        "'{property}' of the field '{field}' as specified in its 'types' definition",
        "sheet_has_extra_property":
        "Excel-sheet '{tag}'' has an extra property " +
        "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting",
        "invalid_field_with_property":
        "Fields with attributes should be numbered as 'field: {field} integer",
        "invalid_property":
        "Attribute should be written as '{field}: {prop} interger'",
        "wrong_field_property_combos":
        "Number of values for field '{field}' and attribute '{prop}' should be same",
        "replace_with_UID":
        "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row."
    }

    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise ExcelMalformatException(
                _(failure_messages["has_no_column"].format(column_name=attr)))

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)

    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        fixtures_tags = []
        type_sheets = []
        for number_of_fixtures, dt in enumerate(data_types):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                tag = _get_or_raise(dt, 'tag')
            if tag in fixtures_tags:
                error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet."
                raise DuplicateFixtureTagException(
                    _(error_message.format(tag=tag)))
            fixtures_tags.append(tag)
            type_sheets.append(dt)
        for number_of_fixtures, dt in enumerate(type_sheets):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                messages.info(
                    request,
                    _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed."
                      ))
                tag = _get_or_raise(dt, 'tag')

            type_definition_fields = _get_or_raise(dt, 'field')
            type_fields_with_properties = []
            for count, field in enumerate(type_definition_fields):
                prop_key = "field " + str(count + 1)
                if dt.has_key(prop_key):
                    try:
                        property_list = dt[prop_key]["property"]
                    except KeyError:
                        error_message = failure_messages[
                            "wrong_property_syntax"].format(prop_key=prop_key,
                                                            field=field)
                        raise ExcelMalformatException(_(error_message))
                else:
                    property_list = []
                field_with_prop = FixtureTypeField(field_name=field,
                                                   properties=property_list)
                type_fields_with_properties.append(field_with_prop)

            new_data_type = FixtureDataType(
                domain=domain,
                is_global=dt.get('is_global', False),
                tag=tag,
                fields=type_fields_with_properties,
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif 'UID' in dt and dt['UID']:
                    data_type = FixtureDataType.get(dt['UID'])
                else:
                    data_type = new_data_type
                    pass
                if replace:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type
                data_type.fields = type_fields_with_properties
                data_type.is_global = dt.get('is_global', False)
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    messages.error(
                        request,
                        _("'%(UID)s' is not a valid UID. But the new type is created."
                          ) % {'UID': dt['UID']})
                if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y":
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys()
                not_in_sheet, not_in_types = diff_lists(
                    item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = failure_messages[
                        "has_no_field_column"].format(tag=tag,
                                                      field=not_in_sheet[0])
                    raise ExcelMalformatException(_(error_message))
                if len(not_in_types) > 0:
                    error_message = failure_messages[
                        "has_extra_column"].format(tag=tag,
                                                   field=not_in_types[0])
                    raise ExcelMalformatException(_(error_message))

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(
                            sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = failure_messages[
                                "sheet_has_no_property"].format(
                                    tag=tag,
                                    property=not_in_sheet[0],
                                    field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        if len(not_in_types) > 0:
                            error_message = failure_messages[
                                "sheet_has_extra_property"].format(
                                    tag=tag,
                                    property=not_in_types[0],
                                    field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = failure_messages[
                                "invalid_field_with_property"].format(
                                    field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = failure_messages[
                                    "invalid_property"].format(
                                        field=field.field_name, prop=prop)
                                raise ExcelMalformatException(_(error_message))
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = failure_messages[
                                    "wrong_field_property_combos"].format(
                                        field=field.field_name, prop=prop)
                                raise ExcelMalformatException(_(error_message))

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(field_list=[
                            FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][
                                    field.field_name]),
                                properties={})
                        ])
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={
                                    prop: unicode(prop_dict[prop][x])
                                    for prop in prop_dict
                                })
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list)

                new_data_item = FixtureDataItem(domain=domain,
                                                data_type_id=data_type.get_id,
                                                fields=item_fields,
                                                sort_key=sort_key)
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        messages.error(
                            request,
                            _("'%(UID)s' is not a valid UID. But the new item is created."
                              ) % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        messages.error(
                            request,
                            _("Unknown group: '%(name)s'. But the row is successfully added"
                              ) % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username),
                                                      domain)
                    except ValidationError:
                        messages.error(
                            request,
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        messages.error(
                            request,
                            _("Unknown user: '******'. But the row is successfully added"
                              ) % {'name': raw_username})

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val