def _run_fixture_upload(domain, workbook, replace=False, task=None): from corehq.apps.users.bulkupload import GroupMemoizer return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location_getter(domain) data_types = [] type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10 * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) with CouchTransaction() as transaction: for table_number, table_def in enumerate(type_sheets): data_type, delete, err = _create_data_type(domain, table_def, replace, transaction) return_val.errors.extend(err) if delete: continue transaction.save(data_type) data_types.append(data_type) data_items = list(workbook.get_data_sheet(data_type.tag)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) type_fields = data_type.fields item_fields = { field.field_name: _process_item_field(field, di) for field in type_fields } item_attributes = di.get('property', {}) old_data_item, delete, err = _process_data_item( domain, replace, data_type, di, item_fields, item_attributes, sort_key) return_val.errors.extend(err) if delete: old_data_item.recursive_delete(transaction) continue transaction.save(old_data_item) err = _process_group_ownership(di, old_data_item, group_memoizer, transaction) return_val.errors.extend(err) err = _process_user_ownership(di, old_data_item, transaction) return_val.errors.extend(err) err = _process_location_ownership(di, old_data_item, get_location, transaction) return_val.errors.extend(err) clear_fixture_quickcache(domain, data_types) clear_fixture_cache(domain) return return_val
def update_tables(request, domain, data_type_id, test_patch={}): """ receives a JSON-update patch like following { "_id":"0920fe1c6d4c846e17ee33e2177b36d6", "tag":"growth", "view_link":"/a/gsid/fixtures/view_lookup_tables/?table_id:0920fe1c6d4c846e17ee33e2177b36d6", "is_global":false, "fields":{"genderr":{"update":"gender"},"grade":{}} } """ if data_type_id: try: data_type = FixtureDataType.get(data_type_id) except ResourceNotFound: raise Http404() assert (data_type.doc_type == FixtureDataType._doc_type) assert (data_type.domain == domain) if request.method == 'GET': return json_response(strip_json(data_type)) elif request.method == 'DELETE': with CouchTransaction() as transaction: data_type.recursive_delete(transaction) return json_response({}) elif not request.method == 'PUT': return HttpResponseBadRequest() if request.method == 'POST' or request.method == "PUT": fields_update = test_patch or _to_kwargs(request) fields_patches = fields_update["fields"] data_tag = fields_update["tag"] is_global = fields_update["is_global"] with CouchTransaction() as transaction: if data_type_id: data_type = update_types(fields_patches, domain, data_type_id, data_tag, is_global, transaction) update_items(fields_patches, domain, data_type_id, transaction) else: if FixtureDataType.fixture_tag_exists(domain, data_tag): return HttpResponseBadRequest("DuplicateFixture") else: data_type = create_types(fields_patches, domain, data_tag, is_global, transaction) return json_response(strip_json(data_type))
def obj_delete(self, bundle, **kwargs): try: data_item = FixtureDataItem.get(kwargs['pk']) except ResourceNotFound: raise NotFound('Lookup table item not found') with CouchTransaction() as transaction: data_item.recursive_delete(transaction) return ImmediateHttpResponse(response=HttpAccepted())
def add_owner(self, owner, owner_type, transaction=None): assert (owner.domain == self.domain) with transaction or CouchTransaction() as transaction: o = FixtureOwnership(domain=self.domain, owner_type=owner_type, owner_id=owner.get_id, data_item_id=self.get_id) transaction.save(o) return o
def data_types(request, domain, data_type_id): if data_type_id: try: data_type = FixtureDataType.get(data_type_id) except ResourceNotFound: raise Http404() assert(data_type.doc_type == FixtureDataType._doc_type) assert(data_type.domain == domain) if request.method == 'GET': return json_response(strip_json(data_type)) elif request.method == 'PUT': new = FixtureDataType(domain=domain, **_to_kwargs(request)) for attr in 'tag', 'name', 'fields': setattr(data_type, attr, getattr(new, attr)) data_type.save() return json_response(strip_json(data_type)) elif request.method == 'DELETE': with CouchTransaction() as transaction: data_type.recursive_delete(transaction) return json_response({}) elif data_type_id is None: if request.method == 'POST': data_type = FixtureDataType(domain=domain, **_to_kwargs(request)) data_type.save() return json_response(strip_json(data_type)) elif request.method == 'GET': return json_response([strip_json(x) for x in FixtureDataType.by_domain(domain)]) elif request.method == 'DELETE': with CouchTransaction() as transaction: for data_type in FixtureDataType.by_domain(domain): data_type.recursive_delete(transaction) return json_response({}) return HttpResponseBadRequest()
def data_items(request, domain, data_type_id, data_item_id): def prepare_item(item): ret = strip_json(item, disallow=['data_type_id']) if request.GET.get('groups') == 'true': ret['groups'] = [] for group in item.get_groups(): ret['groups'].append(strip_json(group)) if request.GET.get('users') == 'true': ret['users'] = [] for user in item.get_users(): ret['users'].append(prepare_user(user)) return ret if request.method == 'POST' and data_item_id is None: o = FixtureDataItem(domain=domain, data_type_id=data_type_id, **_to_kwargs(request)) o.save() return json_response(strip_json(o, disallow=['data_type_id'])) elif request.method == 'GET' and data_item_id is None: return json_response([ prepare_item(x) for x in sorted(FixtureDataItem.by_data_type(domain, data_type_id), key=lambda x: x.sort_key) ]) elif request.method == 'GET' and data_item_id: try: o = FixtureDataItem.get(data_item_id) except ResourceNotFound: raise Http404() assert(o.domain == domain and o.data_type.get_id == data_type_id) return json_response(prepare_item(o)) elif request.method == 'PUT' and data_item_id: original = FixtureDataItem.get(data_item_id) new = FixtureDataItem(domain=domain, **_to_kwargs(request)) for attr in 'fields',: setattr(original, attr, getattr(new, attr)) original.save() return json_response(strip_json(original, disallow=['data_type_id'])) elif request.method == 'DELETE' and data_item_id: o = FixtureDataItem.get(data_item_id) assert(o.domain == domain and o.data_type.get_id == data_type_id) with CouchTransaction() as transaction: o.recursive_delete(transaction) return json_response({}) else: return HttpResponseBadRequest()
def _run_upload(self, request, workbook): group_memoizer = GroupMemoizer(self.domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr, message): try: return container[attr] except KeyError: raise Exception(message.format(attr=attr)) with CouchTransaction() as transaction: for dt in data_types: err_msg = "Workbook 'types' has no column '{attr}'" data_type = FixtureDataType( domain=self.domain, name=_get_or_raise(dt, 'name', err_msg), tag=_get_or_raise(dt, 'tag', err_msg), fields=_get_or_raise(dt, 'field', err_msg), ) transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): data_item = FixtureDataItem( domain=self.domain, data_type_id=data_type.get_id, fields=di['field'], sort_key=sort_key ) transaction.save(data_item) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: data_item.add_group(group, transaction=transaction) else: messages.error(request, "Unknown group: %s" % group_name) for raw_username in di.get('user', []): username = normalize_username(raw_username, self.domain) user = CommCareUser.get_by_username(username) if user: data_item.add_user(user) else: messages.error(request, "Unknown user: %s" % raw_username) for data_type in transaction.preview_save(cls=FixtureDataType): for duplicate in FixtureDataType.by_domain_tag(domain=self.domain, tag=data_type.tag): duplicate.recursive_delete(transaction)
def _run_fixture_upload(domain, workbook, replace=False, task=None): from corehq.apps.users.bulkupload import GroupMemoizer return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location_getter(domain) data_types = [] with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10 * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=tag, fields=table_def.fields, item_attributes=table_def.item_attributes) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': table_def.uid}) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): data_type = new_data_type transaction.save(data_type) data_types.append(data_type) data_items = list(workbook.get_data_sheet(data_type.tag)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=six.text_type(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=six.text_type( field_prop_combos[x]), properties={ prop: six.text_type(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain \ or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) old_locations = old_data_item.locations for location in old_locations: old_data_item.remove_location(location) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append( _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append( _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) for name in di.get('location', []): location_cache = get_location(name) if location_cache.is_error: return_val.errors.append(location_cache.message) else: old_data_item.add_location(location_cache.location, transaction=transaction) clear_fixture_quickcache(domain, data_types) clear_fixture_cache(domain) return return_val
def update_tables(request, domain, data_type_id, test_patch=None): """ receives a JSON-update patch like following { "_id":"0920fe1c6d4c846e17ee33e2177b36d6", "tag":"growth", "view_link":"/a/gsid/fixtures/view_lookup_tables/?table_id:0920fe1c6d4c846e17ee33e2177b36d6", "is_global":false, "fields":{"genderr":{"update":"gender"},"grade":{}} } """ if test_patch is None: test_patch = {} if data_type_id: try: data_type = FixtureDataType.get(data_type_id) except ResourceNotFound: raise Http404() assert(data_type.doc_type == FixtureDataType._doc_type) assert(data_type.domain == domain) if request.method == 'GET': return json_response(strip_json(data_type)) elif request.method == 'DELETE': with CouchTransaction() as transaction: data_type.recursive_delete(transaction) return json_response({}) elif not request.method == 'PUT': return HttpResponseBadRequest() if request.method == 'POST' or request.method == "PUT": fields_update = test_patch or _to_kwargs(request) fields_patches = fields_update["fields"] data_tag = fields_update["tag"] is_global = fields_update["is_global"] # validate tag and fields validation_errors = [] if is_identifier_invalid(data_tag): validation_errors.append(data_tag) for field_name, options in fields_update['fields'].items(): method = options.keys() if 'update' in method: field_name = options['update'] if is_identifier_invalid(field_name) and 'remove' not in method: validation_errors.append(field_name) validation_errors = map(lambda e: _("\"%s\" cannot include special characters or " "begin with \"xml\" or a number.") % e, validation_errors) if validation_errors: return json_response({ 'validation_errors': validation_errors, 'error_msg': _( "Could not update table because field names were not " "correctly formatted"), }) with CouchTransaction() as transaction: if data_type_id: data_type = update_types(fields_patches, domain, data_type_id, data_tag, is_global, transaction) update_items(fields_patches, domain, data_type_id, transaction) else: if FixtureDataType.fixture_tag_exists(domain, data_tag): return HttpResponseBadRequest("DuplicateFixture") else: data_type = create_types(fields_patches, domain, data_tag, is_global, transaction) return json_response(strip_json(data_type))
def run_upload(domain, workbook, replace=False, task=None): return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location(domain) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10. * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=table_def.table_id, fields=table_def.fields, item_attributes=table_def.item_attributes) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': table_def.uid}) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = list(workbook.get_data_sheet(data_type)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() if 'field' in di else [] not_in_sheet, not_in_types = diff_lists( item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["has_no_field_column"]).format( tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["has_extra_column"]).format( tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that this item has all the properties listed in its 'types' definition item_attributes_list = di['property'].keys( ) if 'property' in di else [] not_in_sheet, not_in_types = diff_lists( item_attributes_list, data_type.item_attributes) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["has_no_field_column"]).format( tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["has_extra_column"]).format( tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists( sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["sheet_has_no_property"] ).format(tag=tag, property=not_in_sheet[0], field=field.field_name) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["sheet_has_extra_property"] ).format(tag=tag, property=not_in_types[0], field=field.field_name) raise ExcelMalformatException(error_message) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = _( FAILURE_MESSAGES["invalid_field_with_property"] ).format(field=field.field_name) raise ExcelMalformatException(error_message) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = _( FAILURE_MESSAGES["invalid_property"] ).format(field=field.field_name, prop=prop) raise ExcelMalformatException(error_message) if len(sheet_props[prop]) != field_prop_len: error_message = _(FAILURE_MESSAGES[ "wrong_field_property_combos"]).format( field=field.field_name, prop=prop) raise ExcelMalformatException(error_message) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={ prop: unicode(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) old_locations = old_data_item.locations for location in old_locations: old_data_item.remove_location(location) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append( _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append( _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) for name in di.get('location', []): location_cache = get_location(name) if location_cache.is_error: return_val.errors.append(location_cache.message) else: old_data_item.add_location(location_cache.location, transaction=transaction) return return_val
def update_tables(request, domain, data_type_id): """ receives a JSON-update patch like following { "_id":"0920fe1c6d4c846e17ee33e2177b36d6", "tag":"growth", "view_link":"/a/gsid/fixtures/view_lookup_tables/?table_id:0920fe1c6d4c846e17ee33e2177b36d6", "is_global":false, "fields":{"genderr":{"update":"gender"},"grade":{}} } """ if data_type_id: try: data_type = FixtureDataType.get(data_type_id) except ResourceNotFound: raise Http404() if data_type.domain != domain: raise Http404() if request.method == 'GET': return json_response(strip_json(data_type)) elif request.method == 'DELETE': with CouchTransaction() as transaction: data_type.recursive_delete(transaction) clear_fixture_cache(domain) return json_response({}) elif not request.method == 'PUT': return HttpResponseBadRequest() if request.method == 'POST' or request.method == "PUT": fields_update = _to_kwargs(request) fields_patches = fields_update["fields"] data_tag = fields_update["tag"] is_global = fields_update["is_global"] description = fields_update["description"] # validate tag and fields validation_errors = [] if is_identifier_invalid("{}_list".format(data_tag)): validation_errors.append(data_tag) for field_name, options in fields_update['fields'].items(): method = list(options.keys()) if 'update' in method: field_name = options['update'] if is_identifier_invalid(field_name) and 'remove' not in method: validation_errors.append(field_name) validation_errors = [ _("\"%s\" cannot include special characters, begin or end with a space, " "or begin with \"xml\" or a number") % e for e in validation_errors ] if len(data_tag) < 1 or len(data_tag) > 31: validation_errors.append( _("Table ID must be between 1 and 31 characters.")) if validation_errors: return json_response({ 'validation_errors': validation_errors, 'error_msg': _("Could not update table because field names were not " "correctly formatted"), }) with CouchTransaction() as transaction: if data_type_id: data_type = _update_types(fields_patches, domain, data_type_id, data_tag, is_global, description, transaction) _update_items(fields_patches, domain, data_type_id, transaction) else: if FixtureDataType.fixture_tag_exists(domain, data_tag): return HttpResponseBadRequest("DuplicateFixture") else: data_type = _create_types(fields_patches, domain, data_tag, is_global, description, transaction) clear_fixture_cache(domain) return json_response(strip_json(data_type))
def run_upload(request, domain, workbook, replace=False): return_val = { "unknown_groups": [], "unknown_users": [], "number_of_fixtures": 0, } failure_messages = { "has_no_column": "Workbook 'types' has no column '{column_name}'.", "has_no_field_column": "Excel-sheet '{tag}' does not contain the column '{field}' " "as specified in its 'types' definition", "has_extra_column": "Excel-sheet '{tag}' has an extra column" + "'{field}' that's not defined in its 'types' definition", "wrong_property_syntax": "Properties should be specified as 'field 1: property 1'. In 'types' sheet, " + "'{prop_key}' for field '{field}' is not correctly formatted", "sheet_has_no_property": "Excel-sheet '{tag}' does not contain property " + "'{property}' of the field '{field}' as specified in its 'types' definition", "sheet_has_extra_property": "Excel-sheet '{tag}'' has an extra property " + "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting", "invalid_field_with_property": "Fields with attributes should be numbered as 'field: {field} integer", "invalid_property": "Attribute should be written as '{field}: {prop} interger'", "wrong_field_property_combos": "Number of values for field '{field}' and attribute '{prop}' should be same", "replace_with_UID": "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row." } group_memoizer = GroupMemoizer(domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr): try: return container[attr] except KeyError: raise ExcelMalformatException( _(failure_messages["has_no_column"].format(column_name=attr))) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) number_of_fixtures = -1 with CouchTransaction() as transaction: fixtures_tags = [] type_sheets = [] for number_of_fixtures, dt in enumerate(data_types): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: tag = _get_or_raise(dt, 'tag') if tag in fixtures_tags: error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet." raise DuplicateFixtureTagException( _(error_message.format(tag=tag))) fixtures_tags.append(tag) type_sheets.append(dt) for number_of_fixtures, dt in enumerate(type_sheets): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: messages.info( request, _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed." )) tag = _get_or_raise(dt, 'tag') type_definition_fields = _get_or_raise(dt, 'field') type_fields_with_properties = [] for count, field in enumerate(type_definition_fields): prop_key = "field " + str(count + 1) if dt.has_key(prop_key): try: property_list = dt[prop_key]["property"] except KeyError: error_message = failure_messages[ "wrong_property_syntax"].format(prop_key=prop_key, field=field) raise ExcelMalformatException(_(error_message)) else: property_list = [] field_with_prop = FixtureTypeField(field_name=field, properties=property_list) type_fields_with_properties.append(field_with_prop) new_data_type = FixtureDataType( domain=domain, is_global=dt.get('is_global', False), tag=tag, fields=type_fields_with_properties, ) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif 'UID' in dt and dt['UID']: data_type = FixtureDataType.get(dt['UID']) else: data_type = new_data_type pass if replace: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = type_fields_with_properties data_type.is_global = dt.get('is_global', False) assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type messages.error( request, _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': dt['UID']}) if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y": data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() not_in_sheet, not_in_types = diff_lists( item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = failure_messages[ "has_no_field_column"].format(tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages[ "has_extra_column"].format(tag=tag, field=not_in_types[0]) raise ExcelMalformatException(_(error_message)) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists( sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = failure_messages[ "sheet_has_no_property"].format( tag=tag, property=not_in_sheet[0], field=field.field_name) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages[ "sheet_has_extra_property"].format( tag=tag, property=not_in_types[0], field=field.field_name) raise ExcelMalformatException(_(error_message)) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = failure_messages[ "invalid_field_with_property"].format( field=field.field_name) raise ExcelMalformatException(_(error_message)) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = failure_messages[ "invalid_property"].format( field=field.field_name, prop=prop) raise ExcelMalformatException(_(error_message)) if len(sheet_props[prop]) != field_prop_len: error_message = failure_messages[ "wrong_field_property_combos"].format( field=field.field_name, prop=prop) raise ExcelMalformatException(_(error_message)) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={ prop: unicode(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) new_data_item = FixtureDataItem(domain=domain, data_type_id=data_type.get_id, fields=item_fields, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item messages.error( request, _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: messages.error( request, _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: messages.error( request, _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: messages.error( request, _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) return_val["number_of_fixtures"] = number_of_fixtures + 1 return return_val