def make_data_item(location_name, cost): """Make a fixture data item and assign it to location_name""" data_item = FixtureDataItem( domain=cls.domain, data_type_id=data_type.get_id, fields={ "cost": FieldList( field_list=[FixtureItemField( field_value=cost, properties={}, )] ), "location_name": FieldList( field_list=[FixtureItemField( field_value=location_name, properties={}, )] ), }, item_attributes={}, ) data_item.save() FixtureOwnership( domain=cls.domain, owner_id=cls.locations[location_name].location_id, owner_type='location', data_item_id=data_item.get_id ).save()
def test_fixture_data_item_to_dict(): data_item = FixtureDataItem( domain='test-domain', data_type_id='123456', fields={ 'id': FieldList(doc_type='FieldList', field_list=[ FixtureItemField(doc_type='FixtureItemField', field_value='789abc', properties={}) ]), 'name': FieldList(doc_type='FieldList', field_list=[ FixtureItemField(doc_type='FixtureItemField', field_value='John', properties={'lang': 'en'}), FixtureItemField(doc_type='FixtureItemField', field_value='Jan', properties={'lang': 'nld'}), FixtureItemField(doc_type='FixtureItemField', field_value='Jean', properties={'lang': 'fra'}), ]) }) dict_ = fixture_data_item_to_dict(data_item) assert_equal(dict_, {'id': '789abc', 'name': 'John'})
def _process_data_item(domain, replace, data_type, di, item_fields, item_attributes, sort_key): """Processes a FixtureDataItem from its excel upload. Returns a tuple with - (unsaved) FixtureDataItem instance - boolean flag to indiicate if the data item was deleted - a list of errors """ delete = False errors = [] new_data_item = FixtureDataItem(domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if (old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id): old_data_item = new_data_item errors.append( _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']}) if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": delete = True except (ResourceNotFound, KeyError): old_data_item = new_data_item return old_data_item, delete, errors
def setupFixtureData(cls): cls.fixture_data = { 'schedule1': '7', 'schedule2': '14', 'schedule3': '21', } cls.data_type = FixtureDataType( domain=cls.domain, tag=DAILY_SCHEDULE_FIXTURE_NAME, name=DAILY_SCHEDULE_FIXTURE_NAME, fields=[ FixtureTypeField(field_name=SCHEDULE_ID_FIXTURE, properties=[]), FixtureTypeField(field_name="doses_per_week", properties=[]), ], item_attributes=[], ) cls.data_type.save() cls.data_items = [] for _id, value in cls.fixture_data.iteritems(): data_item = FixtureDataItem( domain=cls.domain, data_type_id=cls.data_type.get_id, fields={ SCHEDULE_ID_FIXTURE: FieldList( field_list=[FixtureItemField(field_value=_id, )]), "doses_per_week": FieldList( field_list=[FixtureItemField(field_value=value, )]) }, item_attributes={}, ) data_item.save() cls.data_items.append(data_item)
def setUp(self): self.domain = 'qwerty' self.data_type = FixtureDataType(domain=self.domain, tag="contact", name="Contact", fields=['name', 'number']) self.data_type.save() self.data_item = FixtureDataItem(domain=self.domain, data_type_id=self.data_type.get_id, fields={ 'name': 'John', 'number': '+15555555555' }) self.data_item.save() self.user = CommCareUser.create(self.domain, 'rudolph', '***') self.fixture_ownership = FixtureOwnership( domain=self.domain, owner_id=self.user.get_id, owner_type='user', data_item_id=self.data_item.get_id) self.fixture_ownership.save()
def _create_data_item(self, cleanup=True): data_item = FixtureDataItem(domain=self.domain.name, data_type_id=self.data_type._id, fields={ "state_name": FieldList.wrap({ "field_list": [ { "field_value": "Tennessee", "properties": { "lang": "en" } }, { "field_value": "田納西", "properties": { "lang": "zh" } }, ] }) }, item_attributes={}, sort_key=1) data_item.save() if cleanup: self.addCleanup(data_item.delete) return data_item
def setupFixtureData(cls): cls.fixture_data = [ { SCHEDULE_ID_FIXTURE: 'schedule1', 'doses_per_week': '7', 'dose_count_ip_new_patient': '56', 'dose_count_ip_recurring_patient': '84', 'dose_count_cp_new_patient': '112', 'dose_count_cp_recurring_patient': '140', 'dose_count_outcome_due_new_patient': '168', 'dose_count_outcome_due_recurring_patient': '168', }, { SCHEDULE_ID_FIXTURE: 'schedule2', 'doses_per_week': '14', 'dose_count_ip_new_patient': '24', 'dose_count_ip_recurring_patient': '36', 'dose_count_cp_new_patient': '54', 'dose_count_cp_recurring_patient': '66', 'dose_count_outcome_due_new_patient': '78', 'dose_count_outcome_due_recurring_patient': '78', }, { SCHEDULE_ID_FIXTURE: 'schedule3', 'doses_per_week': '21', 'dose_count_ip_new_patient': '24', 'dose_count_ip_recurring_patient': '36', 'dose_count_cp_new_patient': '54', 'dose_count_cp_recurring_patient': '66', 'dose_count_outcome_due_new_patient': '78', 'dose_count_outcome_due_recurring_patient': '78', }, ] cls.data_type = FixtureDataType( domain=cls.domain, tag=DAILY_SCHEDULE_FIXTURE_NAME, name=DAILY_SCHEDULE_FIXTURE_NAME, fields=[ FixtureTypeField(field_name=SCHEDULE_ID_FIXTURE, properties=[]), FixtureTypeField(field_name="doses_per_week", properties=[]), ], item_attributes=[], ) cls.data_type.save() cls.data_items = [] for row in cls.fixture_data: data_item = FixtureDataItem( domain=cls.domain, data_type_id=cls.data_type.get_id, fields={ column_name: FieldList( field_list=[FixtureItemField(field_value=value, )]) for column_name, value in six.iteritems(row) }, item_attributes={}, ) data_item.save() cls.data_items.append(data_item)
def data_items(request, domain, data_type_id, data_item_id): def prepare_item(item): ret = strip_json(item, disallow=['data_type_id']) if request.GET.get('groups') == 'true': ret['groups'] = [] for group in item.get_groups(): ret['groups'].append(strip_json(group)) if request.GET.get('users') == 'true': ret['users'] = [] for user in item.get_users(): ret['users'].append(prepare_user(user)) return ret if request.method == 'POST' and data_item_id is None: o = FixtureDataItem(domain=domain, data_type_id=data_type_id, **_to_kwargs(request)) o.save() return json_response(strip_json(o, disallow=['data_type_id'])) elif request.method == 'GET' and data_item_id is None: return json_response([ prepare_item(x) for x in sorted(FixtureDataItem.by_data_type(domain, data_type_id), key=lambda x: x.sort_key) ]) elif request.method == 'GET' and data_item_id: try: o = FixtureDataItem.get(data_item_id) except ResourceNotFound: raise Http404() assert(o.domain == domain and o.data_type.get_id == data_type_id) return json_response(prepare_item(o)) elif request.method == 'PUT' and data_item_id: original = FixtureDataItem.get(data_item_id) new = FixtureDataItem(domain=domain, **_to_kwargs(request)) for attr in 'fields',: setattr(original, attr, getattr(new, attr)) original.save() return json_response(strip_json(original, disallow=['data_type_id'])) elif request.method == 'DELETE' and data_item_id: o = FixtureDataItem.get(data_item_id) assert(o.domain == domain and o.data_type.get_id == data_type_id) with CouchTransaction() as transaction: o.recursive_delete(transaction) return json_response({}) else: return HttpResponseBadRequest()
def setUp(self): # Reset table content for each test for item in [ FixtureDataItem( domain=self.domain, data_type_id=self.table._id, fields={ 'name': FieldList( field_list=[FixtureItemField(field_value='Io')]), 'planet': FieldList(field_list=[ FixtureItemField(field_value='Jupiter') ]), }, ), FixtureDataItem( domain=self.domain, data_type_id=self.table._id, fields={ 'name': FieldList(field_list=[ FixtureItemField(field_value='Europa') ]), 'planet': FieldList(field_list=[ FixtureItemField(field_value='Jupiter') ]), }, ), FixtureDataItem( domain=self.domain, data_type_id=self.table._id, fields={ 'name': FieldList(field_list=[ FixtureItemField(field_value='Callisto') ]), 'planet': FieldList(field_list=[ FixtureItemField(field_value='Jupiter') ]), }, ), ]: item.save()
def setUp(self): super(FixtureDataTest, self).setUp() self.domain = 'qwerty' self.tag = "district" delete_all_users() delete_all_fixture_data_types() self.data_type = FixtureDataType( domain=self.domain, tag=self.tag, name="Districts", fields=[ FixtureTypeField(field_name="state_name", properties=[]), FixtureTypeField(field_name="district_name", properties=["lang"]), FixtureTypeField(field_name="district_id", properties=[]), ], item_attributes=[], ) self.data_type.save() self.data_item = FixtureDataItem( domain=self.domain, data_type_id=self.data_type.get_id, fields={ "state_name": FieldList(field_list=[ FixtureItemField(field_value="Delhi_state", properties={}) ]), "district_name": FieldList(field_list=[ FixtureItemField(field_value="Delhi_in_HIN", properties={"lang": "hin"}), FixtureItemField(field_value="Delhi_in_ENG", properties={"lang": "eng"}) ]), "district_id": FieldList(field_list=[ FixtureItemField(field_value="Delhi_id", properties={}) ]) }, item_attributes={}, ) self.data_item.save() self.user = CommCareUser.create(self.domain, 'to_delete', '***', None, None) self.fixture_ownership = FixtureOwnership( domain=self.domain, owner_id=self.user.get_id, owner_type='user', data_item_id=self.data_item.get_id) self.fixture_ownership.save() get_fixture_data_types.clear(self.domain)
def setUp(self): self.domain = 'dirty-fields' self.data_type = FixtureDataType( domain=self.domain, tag='dirty_fields', name="Dirty Fields", fields=[ FixtureTypeField(field_name="will/crash", properties=[]), FixtureTypeField(field_name="space cadet", properties=[]), FixtureTypeField(field_name="yes\\no", properties=[]), FixtureTypeField(field_name="<with>", properties=[]), FixtureTypeField(field_name="<crazy / combo><d", properties=[]) ], item_attributes=[], ) self.data_type.save() self.data_item = FixtureDataItem( domain=self.domain, data_type_id=self.data_type.get_id, fields={ "will/crash": FieldList(field_list=[ FixtureItemField(field_value="yep", properties={}) ]), "space cadet": FieldList(field_list=[ FixtureItemField(field_value="major tom", properties={}) ]), "yes\\no": FieldList(field_list=[ FixtureItemField(field_value="no, duh", properties={}) ]), "<with>": FieldList(field_list=[ FixtureItemField(field_value="so fail", properties={}) ]), "<crazy / combo><d": FieldList(field_list=[ FixtureItemField(field_value="just why", properties={}) ]), "xmlbad": FieldList(field_list=[ FixtureItemField(field_value="badxml", properties={}) ]) }, item_attributes={}, ) self.data_item.save()
def setUp(self): self.domain = 'qwerty' self.tag = "district" self.data_type = FixtureDataType( domain=self.domain, tag=self.tag, name="Districts", fields=[ FixtureTypeField(field_name="state_name", properties=[]), FixtureTypeField(field_name="district_name", properties=["lang"]), FixtureTypeField(field_name="district_id", properties=[]) ]) self.data_type.save() self.data_item = FixtureDataItem( domain=self.domain, data_type_id=self.data_type.get_id, fields={ "state_name": FieldList(field_list=[ FixtureItemField(field_value="Delhi_state", properties={}) ]), "district_name": FieldList(field_list=[ FixtureItemField(field_value="Delhi_in_HIN", properties={"lang": "hin"}), FixtureItemField(field_value="Delhi_in_ENG", properties={"lang": "eng"}) ]), "district_id": FieldList(field_list=[ FixtureItemField(field_value="Delhi_id", properties={}) ]) }) self.data_item.save() self.user = CommCareUser.create(self.domain, 'to_delete', '***') self.fixture_ownership = FixtureOwnership( domain=self.domain, owner_id=self.user.get_id, owner_type='user', data_item_id=self.data_item.get_id) self.fixture_ownership.save()
def _make_data_item(cls, sort_key, name, seat, sigil): def field_list(value): return FieldList(field_list=[FixtureItemField(field_value=value, properties={})]) data_item = FixtureDataItem( domain=cls.domain, data_type_id=cls.data_type._id, fields={ "name": field_list(name), "seat": field_list(seat), "sigil": field_list(sigil), }, item_attributes={}, sort_key=sort_key, ) data_item.save() return data_item
def make_data_item(self, data_type, cost): data_item = FixtureDataItem( domain=self.domain, data_type_id=data_type._id, fields={ "cost": FieldList( field_list=[FixtureItemField( field_value=cost, properties={}, )] ), }, item_attributes={}, ) data_item.save() self.addCleanup(data_item.delete) return data_item
def _run_upload(self, request, workbook): group_memoizer = GroupMemoizer(self.domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr, message): try: return container[attr] except KeyError: raise Exception(message.format(attr=attr)) with CouchTransaction() as transaction: for dt in data_types: err_msg = "Workbook 'types' has no column '{attr}'" data_type = FixtureDataType( domain=self.domain, name=_get_or_raise(dt, 'name', err_msg), tag=_get_or_raise(dt, 'tag', err_msg), fields=_get_or_raise(dt, 'field', err_msg), ) transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): data_item = FixtureDataItem( domain=self.domain, data_type_id=data_type.get_id, fields=di['field'], sort_key=sort_key ) transaction.save(data_item) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: data_item.add_group(group, transaction=transaction) else: messages.error(request, "Unknown group: %s" % group_name) for raw_username in di.get('user', []): username = normalize_username(raw_username, self.domain) user = CommCareUser.get_by_username(username) if user: data_item.add_user(user) else: messages.error(request, "Unknown user: %s" % raw_username) for data_type in transaction.preview_save(cls=FixtureDataType): for duplicate in FixtureDataType.by_domain_tag(domain=self.domain, tag=data_type.tag): duplicate.recursive_delete(transaction)
def obj_create(self, bundle, request=None, **kwargs): data_type_id = bundle.data.get('data_type_id', None) if not data_type_id: raise BadRequest("data_type_id must be specified") try: FixtureDataType.get(data_type_id) except ResourceNotFound: raise NotFound('Lookup table not found') number_items = len( FixtureDataItem.by_data_type(kwargs['domain'], data_type_id)) bundle.obj = FixtureDataItem(bundle.data) bundle.obj.domain = kwargs['domain'] bundle.obj.sort_key = number_items + 1 bundle.obj.save() return bundle
def test_empty_fixture_data_item_to_dict(): data_item = FixtureDataItem( domain='test-domain', data_type_id='123456', fields={ 'id': FieldList( doc_type='FieldList', field_list=[] ), 'name': FieldList( doc_type='FieldList', field_list=[] ) } ) dict_ = fixture_data_item_to_dict(data_item) assert_equal(dict_, { 'id': None, 'name': None, })
def _setup_data_item(self, risk='risk1', sequence='1', message='message1'): data_item = FixtureDataItem( domain=self.domain_name, data_type_id=self.data_type.get_id, fields={ "risk_profile": FieldList(field_list=[ FixtureItemField(field_value=risk, properties={}) ]), "sequence": FieldList(field_list=[ FixtureItemField(field_value=sequence, properties={}) ]), "message": FieldList(field_list=[ FixtureItemField(field_value=message, properties={}) ]), }, item_attributes={}, ) data_item.save() self.addCleanup(data_item.delete)
def make_item_lists(tag, item_name): data_type = FixtureDataType( domain=DOMAIN, tag=tag, name="Provinces", fields=[FixtureTypeField(field_name="name", properties=[])], item_attributes=[], is_global=True ) data_type.save() data_item = FixtureDataItem( domain=DOMAIN, data_type_id=data_type.get_id, fields={ "name": FieldList( field_list=[FixtureItemField(field_value=item_name, properties={})] ) }, item_attributes={}, ) data_item.save() return data_type, data_item
def run_upload(domain, workbook, replace=False, task=None): return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location(domain) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10. * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=table_def.table_id, fields=table_def.fields, item_attributes=table_def.item_attributes) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': table_def.uid}) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = list(workbook.get_data_sheet(data_type)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() if 'field' in di else [] not_in_sheet, not_in_types = diff_lists( item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["has_no_field_column"]).format( tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["has_extra_column"]).format( tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that this item has all the properties listed in its 'types' definition item_attributes_list = di['property'].keys( ) if 'property' in di else [] not_in_sheet, not_in_types = diff_lists( item_attributes_list, data_type.item_attributes) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["has_no_field_column"]).format( tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["has_extra_column"]).format( tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists( sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["sheet_has_no_property"] ).format(tag=tag, property=not_in_sheet[0], field=field.field_name) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["sheet_has_extra_property"] ).format(tag=tag, property=not_in_types[0], field=field.field_name) raise ExcelMalformatException(error_message) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = _( FAILURE_MESSAGES["invalid_field_with_property"] ).format(field=field.field_name) raise ExcelMalformatException(error_message) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = _( FAILURE_MESSAGES["invalid_property"] ).format(field=field.field_name, prop=prop) raise ExcelMalformatException(error_message) if len(sheet_props[prop]) != field_prop_len: error_message = _(FAILURE_MESSAGES[ "wrong_field_property_combos"]).format( field=field.field_name, prop=prop) raise ExcelMalformatException(error_message) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={ prop: unicode(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) old_locations = old_data_item.locations for location in old_locations: old_data_item.remove_location(location) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append( _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append( _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) for name in di.get('location', []): location_cache = get_location(name) if location_cache.is_error: return_val.errors.append(location_cache.message) else: old_data_item.add_location(location_cache.location, transaction=transaction) return return_val
def run_upload(request, domain, workbook, replace=False): return_val = { "unknown_groups": [], "unknown_users": [], "number_of_fixtures": 0, } failure_messages = { "has_no_column": "Workbook 'types' has no column '{column_name}'.", "has_no_field_column": "Excel-sheet '{tag}' does not contain the column '{field}' " "as specified in its 'types' definition", "has_extra_column": "Excel-sheet '{tag}' has an extra column" + "'{field}' that's not defined in its 'types' definition", "wrong_property_syntax": "Properties should be specified as 'field 1: property 1'. In 'types' sheet, " + "'{prop_key}' for field '{field}' is not correctly formatted", "sheet_has_no_property": "Excel-sheet '{tag}' does not contain property " + "'{property}' of the field '{field}' as specified in its 'types' definition", "sheet_has_extra_property": "Excel-sheet '{tag}'' has an extra property " + "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting", "invalid_field_with_property": "Fields with attributes should be numbered as 'field: {field} integer", "invalid_property": "Attribute should be written as '{field}: {prop} interger'", "wrong_field_property_combos": "Number of values for field '{field}' and attribute '{prop}' should be same", "replace_with_UID": "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row." } group_memoizer = GroupMemoizer(domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr): try: return container[attr] except KeyError: raise ExcelMalformatException( _(failure_messages["has_no_column"].format(column_name=attr))) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) number_of_fixtures = -1 with CouchTransaction() as transaction: fixtures_tags = [] type_sheets = [] for number_of_fixtures, dt in enumerate(data_types): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: tag = _get_or_raise(dt, 'tag') if tag in fixtures_tags: error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet." raise DuplicateFixtureTagException( _(error_message.format(tag=tag))) fixtures_tags.append(tag) type_sheets.append(dt) for number_of_fixtures, dt in enumerate(type_sheets): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: messages.info( request, _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed." )) tag = _get_or_raise(dt, 'tag') type_definition_fields = _get_or_raise(dt, 'field') type_fields_with_properties = [] for count, field in enumerate(type_definition_fields): prop_key = "field " + str(count + 1) if dt.has_key(prop_key): try: property_list = dt[prop_key]["property"] except KeyError: error_message = failure_messages[ "wrong_property_syntax"].format(prop_key=prop_key, field=field) raise ExcelMalformatException(_(error_message)) else: property_list = [] field_with_prop = FixtureTypeField(field_name=field, properties=property_list) type_fields_with_properties.append(field_with_prop) new_data_type = FixtureDataType( domain=domain, is_global=dt.get('is_global', False), tag=tag, fields=type_fields_with_properties, ) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif 'UID' in dt and dt['UID']: data_type = FixtureDataType.get(dt['UID']) else: data_type = new_data_type pass if replace: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = type_fields_with_properties data_type.is_global = dt.get('is_global', False) assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type messages.error( request, _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': dt['UID']}) if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y": data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() not_in_sheet, not_in_types = diff_lists( item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = failure_messages[ "has_no_field_column"].format(tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages[ "has_extra_column"].format(tag=tag, field=not_in_types[0]) raise ExcelMalformatException(_(error_message)) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists( sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = failure_messages[ "sheet_has_no_property"].format( tag=tag, property=not_in_sheet[0], field=field.field_name) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages[ "sheet_has_extra_property"].format( tag=tag, property=not_in_types[0], field=field.field_name) raise ExcelMalformatException(_(error_message)) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = failure_messages[ "invalid_field_with_property"].format( field=field.field_name) raise ExcelMalformatException(_(error_message)) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = failure_messages[ "invalid_property"].format( field=field.field_name, prop=prop) raise ExcelMalformatException(_(error_message)) if len(sheet_props[prop]) != field_prop_len: error_message = failure_messages[ "wrong_field_property_combos"].format( field=field.field_name, prop=prop) raise ExcelMalformatException(_(error_message)) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={ prop: unicode(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) new_data_item = FixtureDataItem(domain=domain, data_type_id=data_type.get_id, fields=item_fields, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item messages.error( request, _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: messages.error( request, _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: messages.error( request, _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: messages.error( request, _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) return_val["number_of_fixtures"] = number_of_fixtures + 1 return return_val
def _run_fixture_upload(domain, workbook, replace=False, task=None): from corehq.apps.users.bulkupload import GroupMemoizer return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location_getter(domain) data_types = [] with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10 * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=tag, fields=table_def.fields, item_attributes=table_def.item_attributes) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': table_def.uid}) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): data_type = new_data_type transaction.save(data_type) data_types.append(data_type) data_items = list(workbook.get_data_sheet(data_type.tag)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=six.text_type(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=six.text_type( field_prop_combos[x]), properties={ prop: six.text_type(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain \ or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) old_locations = old_data_item.locations for location in old_locations: old_data_item.remove_location(location) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append( _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append( _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) for name in di.get('location', []): location_cache = get_location(name) if location_cache.is_error: return_val.errors.append(location_cache.message) else: old_data_item.add_location(location_cache.location, transaction=transaction) clear_fixture_quickcache(domain, data_types) clear_fixture_cache(domain) return return_val
def test_update_fixture(self): self.assertEqual([], get_fixture_data_types(self.linked_domain)) # Update linked domain update_fixture(self.domain_link, self.table.tag) # Linked domain should now have master domain's table and rows linked_types = get_fixture_data_types(self.linked_domain) self.assertEqual({'moons'}, {t.tag for t in linked_types}) self.assertEqual({self.linked_domain}, {t.domain for t in linked_types}) items = get_fixture_items_for_data_type(self.linked_domain, linked_types[0]._id) self.assertEqual({self.linked_domain}, {i.domain for i in items}) self.assertEqual({linked_types[0]._id}, {i.data_type_id for i in items}) self.assertEqual([ 'Callisto', 'Europa', 'Io', 'Jupiter', 'Jupiter', 'Jupiter', ], sorted([ i.fields[field_name].field_list[0].field_value for i in items for field_name in i.fields.keys() ])) # Master domain's table and rows should be untouched master_types = get_fixture_data_types(self.domain) self.assertEqual({'moons'}, {t.tag for t in master_types}) self.assertEqual({self.domain}, {t.domain for t in master_types}) master_items = get_fixture_items_for_data_type(self.domain, master_types[0]._id) self.assertEqual([ 'Callisto', 'Europa', 'Io', 'Jupiter', 'Jupiter', 'Jupiter', ], sorted([ i.fields[field_name].field_list[0].field_value for i in master_items for field_name in i.fields.keys() ])) # Update rows in master table and re-update linked domain master_items[-1].delete() # Callisto FixtureDataItem( domain=self.domain, data_type_id=self.table._id, fields={ 'name': FieldList( field_list=[FixtureItemField(field_value='Thalassa')]), 'planet': FieldList(field_list=[FixtureItemField( field_value='Neptune')]), }, ).save() FixtureDataItem( domain=self.domain, data_type_id=self.table._id, fields={ 'name': FieldList(field_list=[FixtureItemField(field_value='Naiad')]), 'planet': FieldList(field_list=[FixtureItemField( field_value='Neptune')]), }, ).save() clear_fixture_quickcache(self.domain, get_fixture_data_types(self.domain)) clear_fixture_cache(self.domain) update_fixture(self.domain_link, self.table.tag) # Linked domain should still have one table, with the new rows linked_types = get_fixture_data_types(self.linked_domain) self.assertEqual(1, len(linked_types)) self.assertEqual('moons', linked_types[0].tag) items = get_fixture_items_for_data_type(self.linked_domain, linked_types[0]._id) self.assertEqual(4, len(items)) self.assertEqual([ 'Europa', 'Io', 'Jupiter', 'Jupiter', 'Naiad', 'Neptune', 'Neptune', 'Thalassa', ], sorted([ i.fields[field_name].field_list[0].field_value for i in items for field_name in i.fields.keys() ]))