def test_custom_data_fields(self): for domain_name in [self.domain.name, self.domain2.name]: SQLCustomDataFieldsDefinition.get_or_create( domain_name, 'UserFields') self.domain.delete() self._assert_custom_data_fields_counts(self.domain.name, 0) self._assert_custom_data_fields_counts(self.domain2.name, 1)
def setUpClass(cls): super(TestLocationsExport, cls).setUpClass() cls.loc_fields = SQLCustomDataFieldsDefinition.get_or_create( cls.domain, LocationFieldsView.field_type) cls.loc_fields.set_fields( [SQLField(slug=slug) for slug in cls.custom_fields]) cls.loc_fields.save() cls.boston = cls.locations['Boston'] cls.boston.metadata = { field: '{}-试验'.format(field) for field in cls.custom_fields + ['不知道'] } cls.boston.external_id = 'external_id' cls.boston.latitude = Decimal('42.36') cls.boston.longitude = Decimal('71.06') cls.boston.save() exporter = LocationExporter(cls.domain) writer = MockExportWriter() exporter.write_data(writer) cls.headers = dict(exporter.get_headers()) cls.city_headers = cls.headers['city'][0] cls.boston_data = [ row for row in writer.data['city'] if row[0] == cls.boston.location_id ][0]
def get_location_data_model(domain): from .views import LocationFieldsView from corehq.apps.custom_data_fields.models import SQLCustomDataFieldsDefinition return SQLCustomDataFieldsDefinition.get_or_create( domain, LocationFieldsView.field_type, )
def _get_usercase_default_properties(domain): from corehq.apps.custom_data_fields.models import SQLCustomDataFieldsDefinition from corehq.apps.users.views.mobile.custom_data_fields import CUSTOM_USER_DATA_FIELD_TYPE fields_def = SQLCustomDataFieldsDefinition.get_or_create( domain, CUSTOM_USER_DATA_FIELD_TYPE) return [f.slug for f in fields_def.get_fields()]
def _get_location_data_fields(domain): from corehq.apps.locations.views import LocationFieldsView fields_definition = SQLCustomDataFieldsDefinition.get( domain, LocationFieldsView.field_type) if fields_definition: return fields_definition.get_fields() else: return []
def _parse_custom_properties(product): product_data_model = SQLCustomDataFieldsDefinition.get_or_create( domain, ProductFieldsView.field_type) product_data_fields = [f.slug for f in product_data_model.get_fields()] model_data = {} uncategorized_data = {} for prop, val in product.product_data.items(): if prop in product_data_fields: model_data['data: ' + prop] = encode_if_needed(val) else: uncategorized_data['uncategorized_data: ' + prop] = encode_if_needed(val) return model_data, uncategorized_data
def test_download_reupload_no_changes(self): # Make sure there's a bunch of data loc_fields = SQLCustomDataFieldsDefinition.get_or_create( self.domain, 'LocationFields') loc_fields.set_fields([ SQLField(slug='favorite_color'), SQLField(slug='language'), ]) loc_fields.save() self.locations['City111'].latitude = Decimal('42.36') self.locations['City111'].longitude = Decimal('71.06') self.locations['City111'].external_id = '123' self.locations['County11'].metadata = { 'favorite_color': 'purple', 'language': 'en' } self.locations['City111'].save() self.locations['County11'].external_id = '321' self.locations['County11'].metadata = {'favorite_color': 'blue'} self.locations['County11'].save() # Export locations exporter = LocationExporter(self.domain) writer = MockExportWriter() exporter.write_data(writer) # Re-upload that export worksheets = [] for sheet_title, headers in exporter.get_headers(): rows = [[val if val is not None else '' for val in row] for row in writer.data[sheet_title]] sheet = IteratorJSONReader(headers + rows) sheet.title = sheet_title worksheets.append(sheet) mock_importer = Mock() mock_importer.worksheets = worksheets with patch('corehq.apps.locations.models.SQLLocation.save') as save_location, \ patch('corehq.apps.locations.models.LocationType.save') as save_type: result = new_locations_import(self.domain, mock_importer, self.user) # The upload should succeed and not perform any updates assert_errors(result, []) self.assertFalse(save_location.called) self.assertFalse(save_type.called)
def get_custom_data_models(domain, limit_types=None): fields = {} for field_view in [LocationFieldsView, ProductFieldsView, UserFieldsView]: if limit_types and field_view.field_type not in limit_types: continue model = SQLCustomDataFieldsDefinition.get(domain, field_view.field_type) if model: fields[field_view.field_type] = [{ 'slug': field.slug, 'is_required': field.is_required, 'label': field.label, 'choices': field.choices, 'regex': field.regex, 'regex_msg': field.regex_msg, } for field in model.get_fields()] return fields
def update_custom_data_models(domain_link, limit_types=None): if domain_link.is_remote: master_results = remote_custom_data_models(domain_link, limit_types) else: master_results = local_custom_data_models(domain_link.master_domain, limit_types) for field_type, field_definitions in master_results.items(): model = SQLCustomDataFieldsDefinition.get_or_create(domain_link.linked_domain, field_type) model.set_fields([ SQLField( slug=field_def['slug'], is_required=field_def['is_required'], label=field_def['label'], choices=field_def['choices'], regex=field_def['regex'], regex_msg=field_def['regex_msg'], ) for field_def in field_definitions ]) model.save()
def product_fixture_generator_json(domain): if not SQLProduct.objects.filter(domain=domain).exists(): return None fields = [x for x in PRODUCT_FIELDS if x != CUSTOM_DATA_SLUG] fields.append('@id') custom_fields = SQLCustomDataFieldsDefinition.get(domain, 'ProductFields') if custom_fields: for f in custom_fields.get_fields(): fields.append(CUSTOM_DATA_SLUG + '/' + f.slug) uri = 'jr://fixture/{}'.format(ProductFixturesProvider.id) return { 'id': 'products', 'uri': uri, 'path': '/products/product', 'name': 'Products', 'structure': {f: {'name': f, 'no_option': True} for f in fields}, }
def test_sync_to_couch(self): obj = SQLCustomDataFieldsDefinition( domain='botswana', field_type='UserFields', ) obj.save( sync_to_couch=False ) # need to save for set_fields to work, but don't want to sync couch obj.set_fields([ SQLField( slug='color', is_required=True, label='Color', choices=['red', 'orange', 'yellow'], regex='', regex_msg='', ), SQLField( slug='size', is_required=False, label='Size', choices=[], regex='^[0-9]+$', regex_msg='Εισαγάγετε', ), ]) obj.save() self.assertIsNone( Command.diff_couch_and_sql(self.db.get(obj.couch_id), obj)) obj.field_type = 'ProductFields' obj.save() self.assertEquals('ProductFields', self.db.get(obj.couch_id)['field_type'])
def test_diff(self): # Start with identical data def_args = {'domain': 'some-domain', 'field_type': 'ProductFields'} field1_args = { 'slug': 'texture', 'is_required': True, 'label': 'Texture', 'choices': ['soft', 'spiky'] } obj = SQLCustomDataFieldsDefinition(**def_args) obj.save() obj.set_fields([SQLField(**field1_args)]) doc = CustomDataFieldsDefinition( fields=[CustomDataField(**field1_args)], **def_args, ).to_json() self.assertIsNone(Command.diff_couch_and_sql(doc, obj)) # Difference in top-level attribute doc['domain'] = 'other-domain' self.assertEqual( Command.diff_couch_and_sql(doc, obj), "domain: couch value 'other-domain' != sql value 'some-domain'") # Difference in numer of sub-models doc['domain'] = 'some-domain' field2_args = { 'slug': 'temp', 'is_required': False, 'label': 'F', 'choices': ['32', '212'] } doc['fields'] = [ CustomDataField(**field1_args).to_json(), CustomDataField(**field2_args).to_json() ] self.assertEqual(Command.diff_couch_and_sql(doc, obj), "fields: 2 in couch != 1 in sql") # Different in sub-model attribute field2_args['label'] = 'C' obj.set_fields([SQLField(**field1_args), SQLField(**field2_args)]) self.assertEqual(Command.diff_couch_and_sql(doc, obj), "label: couch value 'F' != sql value 'C'") # Difference in sub-model ordering field2_args['label'] = 'F' obj.set_fields([SQLField(**field2_args), SQLField(**field1_args)]) self.assertEqual( Command.diff_couch_and_sql(doc, obj).split("\n"), [ "slug: couch value 'texture' != sql value 'temp'", "is_required: couch value 'True' != sql value 'False'", "label: couch value 'Texture' != sql value 'F'", "choices: couch value '['soft', 'spiky']' != sql value '['32', '212']'", "slug: couch value 'temp' != sql value 'texture'", "is_required: couch value 'False' != sql value 'True'", "label: couch value 'F' != sql value 'Texture'", "choices: couch value '['32', '212']' != sql value '['soft', 'spiky']'", ]) # Identical data obj.set_fields([SQLField(**field1_args), SQLField(**field2_args)]) self.assertIsNone(Command.diff_couch_and_sql(doc, obj))
def parse_users(group_memoizer, domain, user_filters, task=None, total_count=None): from corehq.apps.users.views.mobile.custom_data_fields import UserFieldsView user_data_model = SQLCustomDataFieldsDefinition.get_or_create( domain, UserFieldsView.field_type ) location_cache = LocationIdToSiteCodeCache(domain) def _get_group_names(user): return sorted([ group_memoizer.get(id).name for id in Group.by_user_id(user.user_id, wrap=False) ], key=alphanumeric_sort_key) def _get_devices(user): """ Returns a comma-separated list of IMEI numbers of the user's devices, sorted with most-recently-used first """ return ', '.join([device.device_id for device in sorted( user.devices, key=lambda d: d.last_used, reverse=True )]) def _make_user_dict(user, group_names, location_cache): model_data, uncategorized_data = ( user_data_model.get_model_and_uncategorized(user.user_data) ) role = user.get_role(domain) activity = user.reporting_metadata location_codes = [] try: location_codes.append(location_cache.get(user.location_id)) except SQLLocation.DoesNotExist: pass for location_id in user.assigned_location_ids: # skip if primary location_id, as it is already added to the start of list above if location_id != user.location_id: try: location_codes.append(location_cache.get(location_id)) except SQLLocation.DoesNotExist: pass def _format_date(date): return date.strftime('%Y-%m-%d %H:%M:%S') if date else '' return { 'data': model_data, 'uncategorized_data': uncategorized_data, 'group': group_names, 'name': user.full_name, 'password': "******", # dummy display string for passwords 'phone-number': user.phone_number, 'email': user.email, 'username': user.raw_username, 'language': user.language, 'user_id': user._id, 'is_active': str(user.is_active), 'User IMEIs (read only)': _get_devices(user), 'location_code': location_codes, 'role': role.name if role else '', 'registered_on (read only)': _format_date(user.created_on), 'last_submission (read only)': _format_date(activity.last_submission_for_user.submission_date), 'last_sync (read only)': activity.last_sync_for_user.sync_date, } unrecognized_user_data_keys = set() user_groups_length = 0 max_location_length = 0 user_dicts = [] for n, user in enumerate(get_commcare_users_by_filters(domain, user_filters)): group_names = _get_group_names(user) user_dict = _make_user_dict(user, group_names, location_cache) user_dicts.append(user_dict) unrecognized_user_data_keys.update(user_dict['uncategorized_data']) user_groups_length = max(user_groups_length, len(group_names)) max_location_length = max(max_location_length, len(user_dict["location_code"])) if task: DownloadBase.set_progress(task, n, total_count) user_headers = [ 'username', 'password', 'name', 'phone-number', 'email', 'language', 'role', 'user_id', 'is_active', 'User IMEIs (read only)', 'registered_on (read only)', 'last_submission (read only)', 'last_sync (read only)'] user_data_fields = [f.slug for f in user_data_model.get_fields(include_system=False)] user_headers.extend(build_data_headers(user_data_fields)) user_headers.extend(build_data_headers( unrecognized_user_data_keys, header_prefix='uncategorized_data' )) user_headers.extend(json_to_headers( {'group': list(range(1, user_groups_length + 1))} )) if domain_has_privilege(domain, privileges.LOCATIONS): user_headers.extend(json_to_headers( {'location_code': list(range(1, max_location_length + 1))} )) def _user_rows(): for user_dict in user_dicts: row = dict(flatten_json(user_dict)) yield [row.get(header) or '' for header in user_headers] return user_headers, _user_rows()