def _consolidate(self): """ Consolidate valid TransitionRow requests into valid Transition objects In case of multiple TransitionRow requests, like in case of merge/split, combine them into one Transition """ for location_type_code, rows_for_site_code in self.transition_rows.items(): for site_code, rows in rows_for_site_code.items(): errors = [] for row in rows: errors.extend(row.validate()) if errors: self.errors.extend(errors) continue operation = self._valid_unique_operation(site_code, rows) if not operation: continue transition = self._consolidated_transition(location_type_code, operation, rows) if not transition: continue if self._is_valid_transition(transition): self.site_codes_to_be_deprecated.update(transition.old_site_codes) self.valid_transitions[location_type_code].append(transition) for old_username, new_username in transition.user_transitions.items(): if old_username: self.usernames.add(normalize_username(old_username, self.domain)) if new_username: self.usernames.add(normalize_username(new_username, self.domain)) # keep note of transition details for consolidated validations self.transiting_site_codes.update(transition.old_site_codes) self.transiting_site_codes.update(transition.new_site_codes) self.new_site_codes_for_location_type[location_type_code].update(transition.new_site_codes)
def update_usercase(domain, old_username, new_username): if "@" not in old_username: old_username = normalize_username(old_username, domain) if "@" not in new_username: new_username = normalize_username(new_username, domain) old_user = CouchUser.get_by_username(old_username) new_user = CouchUser.get_by_username(new_username) if old_user and new_user and old_user.is_commcare_user( ) and new_user.is_commcare_user(): old_user_usercase = old_user.get_usercase() new_user_usercase = new_user.get_usercase() # pick values that are not already present on the new user's usercase, populated already via HQ updates = {} for key in set(old_user_usercase.case_json.keys()) - set( new_user_usercase.case_json.keys()): updates[key] = old_user_usercase.case_json[key] if updates: case_block = CaseBlock(new_user_usercase.case_id, update=old_user_usercase.case_json, user_id=SYSTEM_USER_ID).as_text() submit_case_blocks([case_block], domain, user_id=SYSTEM_USER_ID) else: raise InvalidUserTransition( "Invalid Transition with old user %s and new user %s" % (old_username, new_username))
def validate_spec(self, spec): username = spec.get('username') if username: try: normalize_username(str(username), self.domain) except TypeError: pass except ValidationError: return self.error_message
def create_or_update_locations(domain, location_specs, log): location_cache = LocationCache() users = {} for row in location_specs: username = row.get('username') try: username = normalize_username(username, domain) except ValidationError: log['errors'].append(_("Username must be a valid email address: %s") % username) else: location_code = unicode(row.get('location-sms-code')) if username in users: user_mapping = users[username] else: user_mapping = UserLocMapping(username, domain, location_cache) users[username] = user_mapping if row.get('remove') == 'y': user_mapping.to_remove.add(location_code) else: user_mapping.to_add.add(location_code) for username, mapping in users.iteritems(): try: messages = mapping.save() log['errors'].extend(messages) except UserUploadError as e: log['errors'].append(_('Unable to update locations for {user} because {message}'.format( user=username, message=e )))
def create_or_update_locations(domain, location_specs, log): location_cache = LocationCache() users = {} for row in location_specs: username = row.get('username') try: username = normalize_username(username, domain) except ValidationError: log['errors'].append( _("Username must be a valid email address: %s") % username) else: location_code = unicode(row.get('location-sms-code')) if username in users: user_mapping = users[username] else: user_mapping = UserLocMapping(username, domain, location_cache) users[username] = user_mapping if row.get('remove') == 'y': user_mapping.to_remove.add(location_code) else: user_mapping.to_add.add(location_code) for username, mapping in users.iteritems(): try: messages = mapping.save() log['errors'].extend(messages) except UserUploadError as e: log['errors'].append( _('Unable to update locations for {user} because {message}'. format(user=username, message=e)))
def make_mobile_worker(cls, username, domain=None): domain = domain or cls.domain user = CommCareUser(username=normalize_username(username, domain), domain=domain) user.domain_membership = DomainMembership(domain=domain) UserESFake.save_doc(user._doc) return user
def _process_user_ownership(di, old_data_item, transaction): """Removes users from data items and add new ones based on the user column. Note the removal does not happen within a context of the "transaction" """ errors = [] domain = old_data_item.domain old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username} ) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: errors.append( _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username} ) return errors
def get_username(xml): try: username = re.search( r'<[Mm]eta>.*<username>(.*)</username>.*</[Mm]eta>', xml).group(1) return normalize_username(username) except: return None
def update_cases(self, domain, case_type, user_id): case_ids = self.find_case_ids_by_type(domain, case_type) accessor = CaseAccessors(domain) case_blocks = [] skip_count = 0 for case in accessor.iter_cases(case_ids): username_of_associated_mobile_workers = case.get_case_property( 'username') try: normalized_username = normalize_username( username_of_associated_mobile_workers, domain) except ValidationError: skip_count += 1 continue user_id_of_mobile_worker = username_to_user_id(normalized_username) if user_id_of_mobile_worker: case_blocks.append( self.case_block(case, user_id_of_mobile_worker)) else: skip_count += 1 print( f"{len(case_blocks)} to update in {domain}, {skip_count} cases have skipped due to unknown username." ) total = 0 for chunk in chunked(case_blocks, BATCH_SIZE): submit_case_blocks(chunk, domain, device_id=DEVICE_ID, user_id=user_id) total += len(chunk) print("Updated {} cases on domain {}".format(total, domain))
def test_add_hq_user_id_to_case(self): username = normalize_username("mobile_worker", self.domain) new_mobile_worker = CommCareUser.create(self.domain, username, "123", None, None) user_id = new_mobile_worker.user_id new_mobile_worker.save() checkin_case_id = uuid.uuid4().hex self.submit_case_block( True, checkin_case_id, user_id=user_id, case_type='checkin', update={"username": new_mobile_worker.raw_username, "hq_user_id": None} ) checkin_case_no_username_id = uuid.uuid4().hex self.submit_case_block( True, checkin_case_no_username_id, user_id=user_id, case_type='checkin', update={"hq_user_id": None} ) lab_result_case_id = uuid.uuid4().hex self.submit_case_block( True, lab_result_case_id, user_id=user_id, case_type='lab_result', update={"username": new_mobile_worker.raw_username, "hq_user_id": None} ) checkin_case = self.case_accessor.get_case(checkin_case_id) self.assertEqual('', checkin_case.get_case_property('hq_user_id')) self.assertEqual(checkin_case.username, 'mobile_worker') call_command('add_hq_user_id_to_case', self.domain, 'checkin') checkin_case = self.case_accessor.get_case(checkin_case_id) checkin_case_no_username = self.case_accessor.get_case(checkin_case_no_username_id) lab_result_case = self.case_accessor.get_case(lab_result_case_id) self.assertEqual(checkin_case.get_case_property('hq_user_id'), user_id) self.assertEqual(checkin_case_no_username.hq_user_id, '') self.assertEqual(lab_result_case.hq_user_id, '')
def setUp(self): super().setUp() delete_all_users() self.domain_obj = create_domain(self.domain) enable_usercase(self.domain) with trap_extra_setup(ConnectionError): self.es = get_es_new() initialize_index_and_mapping(self.es, CASE_SEARCH_INDEX_INFO) username = normalize_username("mobile_worker_1", self.domain) self.mobile_worker = CommCareUser.create(self.domain, username, "123", None, None) sync_user_cases(self.mobile_worker) self.checkin_case = CaseFactory(self.domain).create_case( case_type="checkin", owner_id=self.mobile_worker.get_id, update={"username": self.mobile_worker.raw_username}, ) send_to_elasticsearch( "case_search", transform_case_for_elasticsearch(self.checkin_case.to_json())) self.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index) self.case_accessor = CaseAccessors(self.domain)
def validate_spec(self, spec): try: username = normalize_username(spec.get('username'), self.domain) except ValidationError: return if username in self.existing_usernames: return self.error_message
def make_mobile_worker(cls, username, domain=None, metadata=None): user = CommCareUser.create( domain=domain or cls.domain, username=normalize_username(username), password="******", created_by=None, created_via=None, metadata=metadata, ) return user
def make_mobile_worker(cls, username, domain=None): domain = domain or cls.domain user = CommCareUser(username=normalize_username(username, domain), domain=domain) user.domain_membership = DomainMembership(domain=domain) doc = user._doc doc['username.exact'] = doc['username'] doc['base_username'] = username UserESFake.save_doc(doc) return user
def create_or_update_safe(username, password, uuid, date, registering_phone_id, domain, user_data, **kwargs): # check for uuid conflicts, if one exists, respond with the already-created user conflicting_user = CommCareUser.get_by_user_id(uuid) # we need to check for username conflicts, other issues # and make sure we send the appropriate conflict response to the phone try: username = normalize_username(username, domain) except ValidationError: raise Exception("Username (%s) is invalid: valid characters include [a-z], " "[0-9], period, underscore, and single quote" % username) if conflicting_user: # try to update. If there are username conflicts, we have to resolve them if conflicting_user.domain != domain: raise Exception("Found a conflicting user in another domain. This is not allowed!") saved = False to_append = 2 prefix, suffix = username.split("@") while not saved and to_append < MAX_DUPLICATE_USERS: try: conflicting_user.change_username(username) conflicting_user.password = password conflicting_user.date = date conflicting_user.device_id = registering_phone_id conflicting_user.user_data = user_data conflicting_user.save() saved = True except CouchUser.Inconsistent: username = "******" % { "pref": prefix, "count": to_append, "suff": suffix} to_append = to_append + 1 if not saved: raise Exception("There are over 1,000,000 users with that base name in your domain. REALLY?!? REALLY?!?!") return (conflicting_user, False) try: User.objects.get(username=username) except User.DoesNotExist: # Desired outcome pass else: # Come up with a suitable username prefix, suffix = username.split("@") username = get_unique_value(User.objects, "username", prefix, sep="", suffix="@%s" % suffix) couch_user = cls.create(domain, username, password, uuid=uuid, device_id=registering_phone_id, date=date, user_data=user_data ) return (couch_user, True)
def post(self, request): """View's dispatch method automatically calls this""" try: workbook = WorkbookJSONReader(request.file) except AttributeError: return HttpResponseBadRequest("Error processing your Excel (.xlsx) file") try: data_types = workbook.get_worksheet(title='types') except KeyError: return HttpResponseBadRequest("Workbook does not have a sheet called 'types'") for dt in data_types: print "DataType" print dt for di in workbook.get_worksheet(title=dt['tag']): print "DataItem" print di for dt in data_types: data_type = FixtureDataType( domain=self.domain, name=dt['name'], tag=dt['tag'], fields=dt['field'], ) data_type.save() data_items = workbook.get_worksheet(data_type.tag) for di in data_items: print di data_item = FixtureDataItem( domain=self.domain, data_type_id=data_type.get_id, fields=di['field'] ) data_item.save() for group_name in di.get('group', []): group = Group.by_name(self.domain, group_name) if group: data_item.add_group(group) else: messages.error(request, "Unknown group: %s" % group_name) for raw_username in di.get('user', []): username = normalize_username(raw_username, self.domain) user = CommCareUser.get_by_username(username) if user: data_item.add_user(user) else: messages.error(request, "Unknown user: %s" % raw_username) return HttpResponseRedirect(reverse('fixture_view', args=[self.domain]))
def setUp(self): super().setUp() delete_all_users() self.domain_obj = create_domain(self.domain) enable_usercase(self.domain) with trap_extra_setup(ConnectionError): self.es = get_es_new() initialize_index_and_mapping(self.es, CASE_SEARCH_INDEX_INFO) username = normalize_username("mobile_worker_1", self.domain) self.mobile_worker = CommCareUser.create(self.domain, username, "123", None, None) sync_usercases(self.mobile_worker, self.domain)
def set_up_auth_test(self): self._set_up_domain() try: self.user = CommCareUser.create( self.domain, username=normalize_username("danny", self.domain), password="******" ) except CommCareUser.Inconsistent: pass self.app = Application.new_app(self.domain, "My Crazy App") self.app.save() self.url = reverse(secure_post, args=[self.domain, self.app.get_id])
def get_id(type, origin, name): """ i.e. get_id('users', 'remote', 'barbara') """ if origin == 'remote': return remote_mapping[type][name] if (type, origin) == ('users', 'local'): username = normalize_username(name, domain) user = CommCareUser.get_by_username(username) if user: return user.user_id else: raise Exception('Unknown local user "%s"' % username) if (type, origin) == ('groups', 'local'): return Group.by_name(domain, name).get_id
def setUp(self): self._set_up_domain() try: self.user = CommCareUser.create( self.domain, username=normalize_username('danny', self.domain), password='******', ) except CommCareUser.Inconsistent: pass self.app = Application.new_app(self.domain, 'My Crazy App', '2.0') self.app.save() self.url = reverse(secure_post, args=[self.domain, self.app.get_id])
def _set_up_auth_test(self): self._set_up_domain() try: self.user = CommCareUser.create( self.domain, username=normalize_username('danny', self.domain), password='******', ) except CommCareUser.Inconsistent: pass self.app = Application.new_app(self.domain, 'My Crazy App') self.app.save() self.url = reverse(secure_post, args=[self.domain, self.app.get_id])
def _run_upload(self, request, workbook): group_memoizer = GroupMemoizer(self.domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr, message): try: return container[attr] except KeyError: raise Exception(message.format(attr=attr)) with CouchTransaction() as transaction: for dt in data_types: err_msg = "Workbook 'types' has no column '{attr}'" data_type = FixtureDataType( domain=self.domain, name=_get_or_raise(dt, 'name', err_msg), tag=_get_or_raise(dt, 'tag', err_msg), fields=_get_or_raise(dt, 'field', err_msg), ) transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): data_item = FixtureDataItem( domain=self.domain, data_type_id=data_type.get_id, fields=di['field'], sort_key=sort_key ) transaction.save(data_item) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: data_item.add_group(group, transaction=transaction) else: messages.error(request, "Unknown group: %s" % group_name) for raw_username in di.get('user', []): username = normalize_username(raw_username, self.domain) user = CommCareUser.get_by_username(username) if user: data_item.add_user(user) else: messages.error(request, "Unknown user: %s" % raw_username) for data_type in transaction.preview_save(cls=FixtureDataType): for duplicate in FixtureDataType.by_domain_tag(domain=self.domain, tag=data_type.tag): duplicate.recursive_delete(transaction)
def get_exising_users(self): usernames_without_ids = set() for row in self.all_specs: username = row.get('username') if row.get('user_id') or not username: continue try: usernames_without_ids.add(normalize_username(username, self.domain)) except ValidationError: pass existing_usernames = set() for usernames in chunked(usernames_without_ids, 500): existing_usernames.update(get_existing_usernames(usernames)) return existing_usernames
def _get_latest_enabled_build(domain, username, app_id, profile_id, location_flag_enabled): """ :return: enabled build for the app for a location or profile on basis of feature flag enabled with location flag taking precedence """ latest_enabled_build = None if location_flag_enabled: user = CommCareUser.get_by_username(normalize_username(username, domain)) user_location_id = user.location_id if user_location_id: parent_app_id = get_app(domain, app_id).copy_of latest_enabled_build = get_latest_app_release_by_location(domain, user_location_id, parent_app_id) if not latest_enabled_build: # Fall back to the old logic to support migration # ToDo: Remove this block once migration is complete if profile_id and toggles.RELEASE_BUILDS_PER_PROFILE.enabled(domain): latest_enabled_build = get_latest_enabled_build_for_profile(domain, profile_id) return latest_enabled_build
def setUpClass(cls): cls.report_id = '7b97e8b53d00d43ca126b10093215a9d' cls.report_config_uuid = 'a98c812873986df34fd1b4ceb45e6164ae9cc664' cls.domain = 'report-filter-test-domain' cls.user = CommCareUser( username=normalize_username('ralph', cls.domain), domain=cls.domain, language='en', ) update_toggle_cache(MOBILE_UCR.slug, cls.domain, True, NAMESPACE_DOMAIN) report_configuration = cls.make_report_config(cls.domain, cls.report_id) cls.report_configs_by_id = { cls.report_id: report_configuration } cls.app = Application.new_app(cls.domain, "Report Filter Test App", APP_V2) module = cls.app.add_module(ReportModule.new_module("Report Module", 'en')) module.report_configs.append( ReportAppConfig( report_id=cls.report_id, header={}, description="", graph_configs={ '7451243209119342931': ReportGraphConfig( series_configs={'count': {}} ) }, filters={'computed_owner_name_40cc88a0_1': MobileSelectFilter()}, uuid=cls.report_config_uuid, ) ) with mock_report_configurations(cls.report_configs_by_id): cls.suite = cls.app.create_suite() cls.data = [ {'color_94ec39e6': 'red', 'count': 2, 'computed_owner_name_40cc88a0': 'cory'}, {'color_94ec39e6': 'black', 'count': 1, 'computed_owner_name_40cc88a0': 'ctsims'}, {'color_94ec39e6': 'red', 'count': 3, 'computed_owner_name_40cc88a0': 'daniel'}, ] with mock_report_data(cls.data): with mock_report_configuration_get(cls.report_configs_by_id): with mock.patch('corehq.apps.app_manager.fixtures.mobile_ucr.get_apps_in_domain', lambda domain, include_remote: [cls.app]): fixture, = report_fixture_generator(cls.user, '2.0') cls.fixture = ElementTree.tostring(fixture)
def create_or_update_locations(domain, location_specs, log): location_cache = LocationCache() users = {} for row in location_specs: username = normalize_username(row.get('username'), domain) location_code = row.get('location-sms-code') if username in users: user_mapping = users[username] else: user_mapping = UserLocMapping(username, domain, location_cache) users[username] = user_mapping if row.get('remove') == 'y': user_mapping.to_remove.add(location_code) else: user_mapping.to_add.add(location_code) for username, mapping in users.iteritems(): mapping.save()
def setUp(self): self.domain = 'my-crazy-domain' create_domain(self.domain) try: self.user = CommCareUser.create( self.domain, username=normalize_username('danny', self.domain), password='******', ) except CommCareUser.Inconsistent: pass self.app = Application.new_app(self.domain, 'My Crazy App', '2.0') self.app.save() self.file_path = os.path.join( os.path.dirname(__file__), "data", 'bare_form.xml' ) self.url = reverse(secure_post, args=[self.domain, self.app.get_id])
def _run_upload(self, request, workbook): group_memoizer = GroupMemoizer(self.domain) data_types = workbook.get_worksheet(title='types') with CouchTransaction() as transaction: for dt in data_types: data_type = FixtureDataType( domain=self.domain, name=dt['name'], tag=dt['tag'], fields=dt['field'], ) transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for di in data_items: data_item = FixtureDataItem( domain=self.domain, data_type_id=data_type.get_id, fields=di['field'] ) transaction.save(data_item) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: data_item.add_group(group, transaction=transaction) else: messages.error(request, "Unknown group: %s" % group_name) for raw_username in di.get('user', []): username = normalize_username(raw_username, self.domain) user = CommCareUser.get_by_username(username) if user: data_item.add_user(user) else: messages.error(request, "Unknown user: %s" % raw_username) for data_type in transaction.preview_save(cls=FixtureDataType): for duplicate in FixtureDataType.by_domain_tag(domain=self.domain, tag=data_type.tag): duplicate.recursive_delete(transaction)
def create_or_update_locations(domain, location_specs, log): location_cache = LocationCache() users = {} for row in location_specs: username = row.get('username') try: username = normalize_username(username, domain) except ValidationError: log['errors'].append("Username must be a valid email address: %s" % username) else: location_code = unicode(row.get('location-sms-code')) if username in users: user_mapping = users[username] else: user_mapping = UserLocMapping(username, domain, location_cache) users[username] = user_mapping if row.get('remove') == 'y': user_mapping.to_remove.add(location_code) else: user_mapping.to_add.add(location_code) for username, mapping in users.iteritems(): mapping.save()
def create_or_update_users_and_groups(domain, user_specs, group_specs): ret = {"errors": [], "rows": []} group_memoizer = create_or_update_groups(domain, group_specs, log=ret) usernames = set() user_ids = set() allowed_groups = set(group_memoizer.groups) allowed_group_names = [group.name for group in allowed_groups] try: for row in user_specs: data, group_names, language, name, password, phone_number, user_id, username = ( row.get(k) for k in sorted(allowed_headers)) password = unicode(password) group_names = group_names or [] try: username = normalize_username(username, domain) except TypeError: username = None except ValidationError: ret['rows'].append({ 'username': username, 'row': row, 'flag': _('username cannot contain spaces or symbols'), }) continue status_row = { 'username': raw_username(username) if username else None, 'row': row, } if username in usernames or user_id in user_ids: status_row['flag'] = 'repeat' elif not username and not user_id: status_row['flag'] = 'missing-data' else: try: if username: usernames.add(username) if user_id: user_ids.add(user_id) if user_id: user = CommCareUser.get_by_user_id(user_id, domain) else: user = CommCareUser.get_by_username(username) if user: if user.domain != domain: raise UserUploadError( _('User with username %(username)r is somehow in domain %(domain)r' ) % { 'username': user.username, 'domain': user.domain }) if username and user.username != username: user.change_username(username) if password: user.set_password(password) status_row['flag'] = 'updated' else: if not password: raise UserUploadError( _("Cannot create a new user with a blank password" )) user = CommCareUser.create(domain, username, password, uuid=user_id or '') status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(name) if data: user.user_data.update(data) if language: user.language = language user.save() if password: # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on user.get_django_user().check_password(password) for group_id in Group.by_user(user, wrap=False): group = group_memoizer.get(group_id) if group.name not in group_names: group.remove_user(user, save=False) for group_name in group_names: if group_name not in allowed_group_names: raise UserUploadError( _("Can't add to group '%s' " "(try adding it to your spreadsheet)") % group_name) group_memoizer.by_name(group_name).add_user(user, save=False) except UserUploadError as e: status_row['flag'] = '%s' % e ret["rows"].append(status_row) finally: group_memoizer.save_all() return ret
def create_or_update_users_and_groups(upload_domain, user_specs, upload_user, group_memoizer=None, update_progress=None): domain_info_by_domain = {} ret = {"errors": [], "rows": []} current = 0 try: for row in user_specs: if update_progress: update_progress(current) current += 1 log_user_create = False log_role_update = False username = row.get('username') domain = row.get('domain') or upload_domain username = normalize_username(str(username), domain) if username else None status_row = { 'username': username, 'row': row, } domain_info = get_domain_info(domain, upload_domain, user_specs, domain_info_by_domain, group_memoizer) try: for validator in domain_info.validators: validator(row) except UserUploadError as e: status_row['flag'] = str(e) ret['rows'].append(status_row) continue data = row.get('data', {}) email = row.get('email') group_names = list(map(str, row.get('group') or [])) language = row.get('language') name = row.get('name') password = row.get('password') phone_number = row.get('phone-number') uncategorized_data = row.get('uncategorized_data', {}) user_id = row.get('user_id') location_codes = row.get('location_code', []) if 'location_code' in row else None location_codes = format_location_codes(location_codes) role = row.get('role', None) profile = row.get('user_profile', None) web_user = row.get('web_user') try: password = str(password) if password else None is_active = spec_value_to_boolean_or_none(row, 'is_active') is_account_confirmed = spec_value_to_boolean_or_none( row, 'is_account_confirmed') send_account_confirmation_email = spec_value_to_boolean_or_none( row, 'send_confirmation_email') remove_web_user = spec_value_to_boolean_or_none( row, 'remove_web_user') if user_id: user = CommCareUser.get_by_user_id(user_id, domain) if not user: raise UserUploadError( _("User with ID '{user_id}' not found").format( user_id=user_id, domain=domain)) check_changing_username(user, username) # note: explicitly not including "None" here because that's the default value if not set. # False means it was set explicitly to that value if is_account_confirmed is False and not web_user: raise UserUploadError( _("You can only set 'Is Account Confirmed' to 'False' on a new User." )) if is_password(password): user.set_password(password) # overwrite password in results so we do not save it to the db status_row['row']['password'] = '******' status_row['flag'] = 'updated' else: kwargs = {} if is_account_confirmed is not None and not web_user: kwargs['is_account_confirmed'] = is_account_confirmed user = CommCareUser.create( domain, username, password, created_by=upload_user, created_via=USER_CHANGE_VIA_BULK_IMPORTER, commit=False, **kwargs) log_user_create = True status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(str(name)) # Add in existing data. Don't use metadata - we don't want to add profile-controlled fields. for key, value in user.user_data.items(): if key not in data: data[key] = value if profile: profile_obj = domain_info.profiles_by_name[profile] data[PROFILE_SLUG] = profile_obj.id for key in profile_obj.fields.keys(): user.pop_metadata(key) try: user.update_metadata(data) except ValueError as e: raise UserUploadError(str(e)) if uncategorized_data: user.update_metadata(uncategorized_data) # Clear blank user data so that it can be purged by remove_unused_custom_fields_from_users_task for key in dict(data, **uncategorized_data): value = user.metadata[key] if value is None or value == '': user.pop_metadata(key) if language: user.language = language if email: user.email = email.lower() if is_active is not None: user.is_active = is_active if domain_info.can_assign_locations and location_codes is not None: # Do this here so that we validate the location code before we # save any other information to the user, this way either all of # the user's information is updated, or none of it # Do not update location info if the column is not included at all location_ids = find_location_id(location_codes, domain_info.location_cache) locations_updated, primary_loc_removed = check_modified_user_loc( location_ids, user.location_id, user.assigned_location_ids) if primary_loc_removed: user.unset_location(commit=False) if locations_updated: user.reset_locations(location_ids, commit=False) if role: role_qualified_id = domain_info.roles_by_name[ role].get_qualified_id() user_current_role = user.get_role(domain=domain) log_role_update = not ( user_current_role and user_current_role.get_qualified_id() == role_qualified_id) if log_role_update: user.set_role(domain, role_qualified_id) if web_user: user.update_metadata({'login_as_user': web_user}) user.save() if log_user_create: user.log_user_create(upload_user, USER_CHANGE_VIA_BULK_IMPORTER) if log_role_update: log_user_role_update(domain, user, upload_user, USER_CHANGE_VIA_BULK_IMPORTER) if web_user: check_can_upload_web_users(upload_user) current_user = CouchUser.get_by_username(web_user) if remove_web_user: remove_web_user_from_domain(domain, current_user, username, upload_user) else: check_user_role(username, role) if not current_user and is_account_confirmed: raise UserUploadError( _("You can only set 'Is Account Confirmed' to 'True' on an existing Web User. {web_user} is a new username." ).format(web_user=web_user)) if current_user and not current_user.is_member_of( domain) and is_account_confirmed: current_user.add_as_web_user( domain, role=role_qualified_id, location_id=user.location_id) elif not current_user or not current_user.is_member_of( domain): create_or_update_web_user_invite( web_user, domain, role_qualified_id, upload_user, user.location_id, send_email=send_account_confirmation_email) elif current_user.is_member_of(domain): # edit existing user in the domain current_user.set_role(domain, role_qualified_id) if location_codes is not None: if user.location_id: current_user.set_location( domain, user.location_id) else: current_user.unset_location(domain) current_user.save() if send_account_confirmation_email and not web_user: send_account_confirmation_if_necessary(user) if is_password(password): # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on # Passing use_primary_db=True because of https://dimagi-dev.atlassian.net/browse/ICDS-465 user.get_django_user( use_primary_db=True).check_password(password) for group in domain_info.group_memoizer.by_user_id( user.user_id): if group.name not in group_names: group.remove_user(user) for group_name in group_names: domain_info.group_memoizer.by_name(group_name).add_user( user, save=False) except (UserUploadError, CouchUser.Inconsistent) as e: status_row['flag'] = str(e) ret["rows"].append(status_row) finally: try: for domain_info in domain_info_by_domain.values(): domain_info.group_memoizer.save_all() except BulkSaveError as e: _error_message = ( "Oops! We were not able to save some of your group changes. " "Please make sure no one else is editing your groups " "and try again.") logging.exception(('BulkSaveError saving groups. ' 'User saw error message "%s". Errors: %s') % (_error_message, e.errors)) ret['errors'].append(_error_message) return ret
def run_upload_api(request, domain, workbook): return_val = { "unknown_groups": [], "unknown_users": [], "number_of_fixtures": 0, } group_memoizer = GroupMemoizer(domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr): try: return container[attr] except KeyError: raise Exception("Workbook 'types' has no column '{attr}'".format(attr=attr)) number_of_fixtures = -1 with CouchTransaction() as transaction: for number_of_fixtures, dt in enumerate(data_types): tag = _get_or_raise(dt, 'tag') data_type_results = FixtureDataType.by_domain_tag(domain, tag) if len(data_type_results) == 0: data_type = FixtureDataType( domain=domain, name=_get_or_raise(dt, 'name'), tag=_get_or_raise(dt, 'tag'), fields=_get_or_raise(dt, 'field'), ) transaction.save(data_type) else: for x in data_type_results: data_type = x data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=di['field'], sort_key=sort_key ) try: old_data_item = FixtureDataItem.get(di['UID']) assert old_data_item.domain == domain assert old_data_item.doc_type == FixtureDataItem._doc_type assert old_data_item.data_type_id == data_type.get_id if di.get(DELETE_HEADER) in ("Y", "y"): old_data_item.recursive_delete(transaction) continue old_data_item.fields = di['field'] transaction.save(old_data_item) except (AttributeError, KeyError, ResourceNotFound, AssertionError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.get_groups() for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.get_users() for user in old_users: old_data_item.remove_user(user) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: messages.error(request, "Unknown group: %s" % group_name) for raw_username in di.get('user', []): username = normalize_username(raw_username, domain) user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: messages.error(request, "Unknown user: %s" % raw_username) return_val["number_of_fixtures"] = number_of_fixtures + 1 return return_val
def run_upload(domain, workbook, replace=False, task=None): return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location(domain) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10. * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=table_def.table_id, fields=table_def.fields, item_attributes=table_def.item_attributes) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': table_def.uid}) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = list(workbook.get_data_sheet(data_type)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() if 'field' in di else [] not_in_sheet, not_in_types = diff_lists( item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["has_no_field_column"]).format( tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["has_extra_column"]).format( tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that this item has all the properties listed in its 'types' definition item_attributes_list = di['property'].keys( ) if 'property' in di else [] not_in_sheet, not_in_types = diff_lists( item_attributes_list, data_type.item_attributes) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["has_no_field_column"]).format( tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["has_extra_column"]).format( tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists( sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = _( FAILURE_MESSAGES["sheet_has_no_property"] ).format(tag=tag, property=not_in_sheet[0], field=field.field_name) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _( FAILURE_MESSAGES["sheet_has_extra_property"] ).format(tag=tag, property=not_in_types[0], field=field.field_name) raise ExcelMalformatException(error_message) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = _( FAILURE_MESSAGES["invalid_field_with_property"] ).format(field=field.field_name) raise ExcelMalformatException(error_message) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = _( FAILURE_MESSAGES["invalid_property"] ).format(field=field.field_name, prop=prop) raise ExcelMalformatException(error_message) if len(sheet_props[prop]) != field_prop_len: error_message = _(FAILURE_MESSAGES[ "wrong_field_property_combos"]).format( field=field.field_name, prop=prop) raise ExcelMalformatException(error_message) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={ prop: unicode(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) old_locations = old_data_item.locations for location in old_locations: old_data_item.remove_location(location) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append( _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append( _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) for name in di.get('location', []): location_cache = get_location(name) if location_cache.is_error: return_val.errors.append(location_cache.message) else: old_data_item.add_location(location_cache.location, transaction=transaction) return return_val
def _run_fixture_upload(domain, workbook, replace=False, task=None): from corehq.apps.users.bulkupload import GroupMemoizer return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location_getter(domain) data_types = [] with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10 * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=tag, fields=table_def.fields, item_attributes=table_def.item_attributes ) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': table_def.uid} ) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): data_type = new_data_type transaction.save(data_type) data_types.append(data_type) data_items = list(workbook.get_data_sheet(data_type.tag)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList( field_list=[FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=six.text_type(di['field'][field.field_name]), properties={} )] ) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=six.text_type(field_prop_combos[x]), properties={prop: six.text_type(prop_dict[prop][x]) for prop in prop_dict} ) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list ) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key ) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain \ or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']} ) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) old_locations = old_data_item.locations for location in old_locations: old_data_item.remove_location(location) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append( _("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name} ) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username} ) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append( _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username} ) for name in di.get('location', []): location_cache = get_location(name) if location_cache.is_error: return_val.errors.append(location_cache.message) else: old_data_item.add_location(location_cache.location, transaction=transaction) clear_fixture_quickcache(domain, data_types) clear_fixture_cache(domain) return return_val
def run_upload(request, domain, workbook, replace=False): return_val = { "unknown_groups": [], "unknown_users": [], "number_of_fixtures": 0, } failure_messages = { "has_no_column": "Workbook 'types' has no column '{column_name}'.", "has_no_field_column": "Excel-sheet '{tag}' does not contain the column '{field}' " "as specified in its 'types' definition", "has_extra_column": "Excel-sheet '{tag}' has an extra column" + "'{field}' that's not defined in its 'types' definition", "wrong_property_syntax": "Properties should be specified as 'field 1: property 1'. In 'types' sheet, " + "'{prop_key}' for field '{field}' is not correctly formatted", "sheet_has_no_property": "Excel-sheet '{tag}' does not contain property " + "'{property}' of the field '{field}' as specified in its 'types' definition", "sheet_has_extra_property": "Excel-sheet '{tag}'' has an extra property " + "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting", "invalid_field_with_property": "Fields with attributes should be numbered as 'field: {field} integer", "invalid_property": "Attribute should be written as '{field}: {prop} interger'", "wrong_field_property_combos": "Number of values for field '{field}' and attribute '{prop}' should be same", "replace_with_UID": "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row." } group_memoizer = GroupMemoizer(domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr): try: return container[attr] except KeyError: raise ExcelMalformatException(_(failure_messages["has_no_column"].format(column_name=attr))) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) number_of_fixtures = -1 with CouchTransaction() as transaction: fixtures_tags = [] type_sheets = [] for number_of_fixtures, dt in enumerate(data_types): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: tag = _get_or_raise(dt, 'tag') if tag in fixtures_tags: error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet." raise DuplicateFixtureTagException(_(error_message.format(tag=tag))) fixtures_tags.append(tag) type_sheets.append(dt) for number_of_fixtures, dt in enumerate(type_sheets): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: messages.info(request, _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed.")) tag = _get_or_raise(dt, 'tag') type_definition_fields = _get_or_raise(dt, 'field') type_fields_with_properties = [] for count, field in enumerate(type_definition_fields): prop_key = "field " + str(count + 1) if dt.has_key(prop_key): try: property_list = dt[prop_key]["property"] except KeyError: error_message = failure_messages["wrong_property_syntax"].format( prop_key=prop_key, field=field ) raise ExcelMalformatException(_(error_message)) else: property_list = [] field_with_prop = FixtureTypeField( field_name=field, properties=property_list ) type_fields_with_properties.append(field_with_prop) new_data_type = FixtureDataType( domain=domain, is_global=dt.get('is_global', False), tag=tag, fields=type_fields_with_properties, ) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif 'UID' in dt and dt['UID']: data_type = FixtureDataType.get(dt['UID']) else: data_type = new_data_type pass if replace: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = type_fields_with_properties data_type.is_global = dt.get('is_global', False) assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type messages.error(request, _("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': dt['UID']}) if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y": data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() not_in_sheet, not_in_types = diff_lists(item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = failure_messages["has_no_field_column"].format(tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages["has_extra_column"].format(tag=tag, field=not_in_types[0]) raise ExcelMalformatException(_(error_message)) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists(sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = failure_messages["sheet_has_no_property"].format( tag=tag, property=not_in_sheet[0], field=field.field_name ) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages["sheet_has_extra_property"].format( tag=tag, property=not_in_types[0], field=field.field_name ) raise ExcelMalformatException(_(error_message)) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = failure_messages["invalid_field_with_property"].format(field=field.field_name) raise ExcelMalformatException(_(error_message)) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = failure_messages["invalid_property"].format( field=field.field_name, prop=prop ) raise ExcelMalformatException(_(error_message)) if len(sheet_props[prop]) != field_prop_len: error_message = failure_messages["wrong_field_property_combos"].format( field=field.field_name, prop=prop ) raise ExcelMalformatException(_(error_message)) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList( field_list=[FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][field.field_name]), properties={} )] ) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={prop: unicode(prop_dict[prop][x]) for prop in prop_dict} ) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list ) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, sort_key=sort_key ) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item messages.error(request, _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: messages.error(request, _("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(raw_username, domain) except ValidationError: messages.error(request, _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: messages.error(request, _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username}) return_val["number_of_fixtures"] = number_of_fixtures + 1 return return_val
def create_or_update_users_and_groups(domain, user_specs, group_specs): group_memoizer = GroupMemoizer(domain) ret = {"errors": [], "rows": []} for row in group_specs: group_name, case_sharing, reporting = row['name'], row['case-sharing'], row['reporting'] try: group = group_memoizer.get_or_create_group(group_name) except MultipleResultsFound: ret["errors"].append("Multiple groups named: %s" % group_name) else: group.case_sharing = case_sharing group.reporting = reporting usernames = set() user_ids = set() try: for row in user_specs: data, group_names, name, password, phone_number, user_id, username = ( row.get(k) for k in sorted(allowed_headers) ) if isinstance(password, float): # almost certainly what was intended password = unicode(int(password)) group_names = group_names or [] try: username = normalize_username(username, domain) except TypeError: username = None status_row = {'username': raw_username(username) if username else None} status_row['row'] = row if username in usernames or user_id in user_ids: status_row['flag'] = 'repeat' elif not username and not user_id: status_row['flag'] = 'missing-data' else: try: if username: usernames.add(username) if user_id: user_ids.add(user_id) if user_id: user = CommCareUser.get_by_user_id(user_id, domain) else: user = CommCareUser.get_by_username(username) if user: if user.domain != domain: raise Exception('User with username %r is somehow in domain %r' % (user.username, user.domain)) if username and user.username != username: user.change_username(username) if password: user.set_password(password) status_row['flag'] = 'updated' else: if not password: raise Exception("Cannot create a new user with a blank password") user = CommCareUser.create(domain, username, password, uuid=user_id or '') status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(name) if data: user.user_data.update(data) user.save() if password: # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on user.get_django_user().check_password(password) for group in Group.by_user(user): if group.name not in group_names: group = group_memoizer.get_or_create_group(group.name) group.remove_user(user) for group_name in group_names: try: group_memoizer.get_group(group_name).add_user(user) except Exception: raise Exception("Can't add to group '%s' (try adding it to your spreadsheet)" % group_name) except Exception, e: if isinstance(e, DatabaseError): transaction.rollback() status_row['flag'] = 'error: %s' % e ret["rows"].append(status_row) finally: group_memoizer.save_all() return ret
def get_username(reg): username = re.search(r'<username>(.*)</username>', reg).group(1) return normalize_username(username)
def create_or_update_users_and_groups(upload_domain, user_specs, upload_user, group_memoizer=None, update_progress=None): domain_info_by_domain = {} def _get_domain_info(domain): domain_info = domain_info_by_domain.get(domain) if domain_info: return domain_info if domain == upload_domain: domain_group_memoizer = group_memoizer or GroupMemoizer(domain) else: domain_group_memoizer = GroupMemoizer(domain) domain_group_memoizer.load_all() can_assign_locations = domain_has_privilege(domain, privileges.LOCATIONS) location_cache = None if can_assign_locations: location_cache = SiteCodeToLocationCache(domain) domain_obj = Domain.get_by_name(domain) allowed_group_names = [group.name for group in domain_group_memoizer.groups] roles_by_name = {role.name: role for role in UserRole.by_domain(domain)} domain_user_specs = [spec for spec in user_specs if spec.get('domain', upload_domain) == domain] validators = get_user_import_validators( domain_obj, domain_user_specs, allowed_group_names, list(roles_by_name), upload_domain ) domain_info = DomainInfo( validators, can_assign_locations, location_cache, roles_by_name, domain_group_memoizer ) domain_info_by_domain[domain] = domain_info return domain_info ret = {"errors": [], "rows": []} current = 0 try: for row in user_specs: if update_progress: update_progress(current) current += 1 username = row.get('username') domain = row.get('domain') or upload_domain username = normalize_username(str(username), domain) if username else None status_row = { 'username': username, 'row': row, } domain_info = _get_domain_info(domain) try: for validator in domain_info.validators: validator(row) except UserUploadError as e: status_row['flag'] = str(e) ret['rows'].append(status_row) continue data = row.get('data') email = row.get('email') group_names = list(map(str, row.get('group') or [])) language = row.get('language') name = row.get('name') password = row.get('password') phone_number = row.get('phone-number') uncategorized_data = row.get('uncategorized_data') user_id = row.get('user_id') location_codes = row.get('location_code') or [] if location_codes and not isinstance(location_codes, list): location_codes = [location_codes] # ignore empty location_codes = [code for code in location_codes if code] role = row.get('role', None) web_user = row.get('web_user') try: password = str(password) if password else None is_active = spec_value_to_boolean_or_none(row, 'is_active') is_account_confirmed = spec_value_to_boolean_or_none(row, 'is_account_confirmed') send_account_confirmation_email = spec_value_to_boolean_or_none(row, 'send_confirmation_email') remove_web_user = spec_value_to_boolean_or_none(row, 'remove_web_user') if user_id: user = CommCareUser.get_by_user_id(user_id, domain) if not user: raise UserUploadError(_( "User with ID '{user_id}' not found" ).format(user_id=user_id, domain=domain)) if username and user.username != username: raise UserUploadError(_( 'Changing usernames is not supported: %(username)r to %(new_username)r' ) % {'username': user.username, 'new_username': username}) # note: explicitly not including "None" here because that's the default value if not set. # False means it was set explicitly to that value if is_account_confirmed is False and not web_user: raise UserUploadError(_( "You can only set 'Is Account Confirmed' to 'False' on a new User." )) if is_password(password): user.set_password(password) # overwrite password in results so we do not save it to the db status_row['row']['password'] = '******' status_row['flag'] = 'updated' else: kwargs = {} if is_account_confirmed is not None and not web_user: kwargs['is_account_confirmed'] = is_account_confirmed user = CommCareUser.create(domain, username, password, created_by=upload_user, created_via=USER_CHANGE_VIA_BULK_IMPORTER, commit=False, **kwargs) status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(str(name)) if data: user.user_data.update(data) if uncategorized_data: user.user_data.update(uncategorized_data) if language: user.language = language if email: user.email = email.lower() if is_active is not None: user.is_active = is_active if domain_info.can_assign_locations: # Do this here so that we validate the location code before we # save any other information to the user, this way either all of # the user's information is updated, or none of it location_ids = [] for code in location_codes: loc = get_location_from_site_code(code, domain_info.location_cache) location_ids.append(loc.location_id) locations_updated = set(user.assigned_location_ids) != set(location_ids) primary_location_removed = (user.location_id and not location_ids or user.location_id not in location_ids) if primary_location_removed: user.unset_location(commit=False) if locations_updated: user.reset_locations(location_ids, commit=False) if role: role_qualified_id = domain_info.roles_by_name[role].get_qualified_id() user.set_role(domain, role_qualified_id) if web_user: user.user_data.update({'login_as_user': web_user}) user.save() if web_user: if not upload_user.can_edit_web_users(): raise UserUploadError(_( "Only users with the edit web users permission can upload web users" )) current_user = CouchUser.get_by_username(web_user) if remove_web_user: if not current_user or not current_user.is_member_of(domain): raise UserUploadError(_( "You cannot remove a web user that is not a member of this project. {web_user} is not a member.").format(web_user=web_user) ) else: current_user.delete_domain_membership(domain) current_user.save() else: if not role: raise UserUploadError(_( "You cannot upload a web user without a role. {web_user} does not have a role").format(web_user=web_user) ) if not current_user and is_account_confirmed: raise UserUploadError(_( "You can only set 'Is Account Confirmed' to 'True' on an existing Web User. {web_user} is a new username.").format(web_user=web_user) ) if current_user and not current_user.is_member_of(domain) and is_account_confirmed: current_user.add_as_web_user(domain, role=role_qualified_id, location_id=user.location_id) elif not current_user or not current_user.is_member_of(domain): invite_data = { 'email': web_user, 'invited_by': upload_user.user_id, 'invited_on': datetime.utcnow(), 'domain': domain, 'role': role_qualified_id, 'supply_point': user.location_id } invite = Invitation(**invite_data) invite.save() if send_account_confirmation_email: invite.send_activation_email() elif current_user.is_member_of(domain): # edit existing user in the domain current_user.set_role(domain, role_qualified_id) if user.location_id: current_user.set_location(domain, user.location_id) else: current_user.unset_location(domain) current_user.save() if send_account_confirmation_email and not web_user: send_account_confirmation_if_necessary(user) if is_password(password): # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on # Passing use_primary_db=True because of https://dimagi-dev.atlassian.net/browse/ICDS-465 user.get_django_user(use_primary_db=True).check_password(password) for group in domain_info.group_memoizer.by_user_id(user.user_id): if group.name not in group_names: group.remove_user(user) for group_name in group_names: domain_info.group_memoizer.by_name(group_name).add_user(user, save=False) except (UserUploadError, CouchUser.Inconsistent) as e: status_row['flag'] = str(e) ret["rows"].append(status_row) finally: try: for domain_info in domain_info_by_domain.values(): domain_info.group_memoizer.save_all() except BulkSaveError as e: _error_message = ( "Oops! We were not able to save some of your group changes. " "Please make sure no one else is editing your groups " "and try again." ) logging.exception(( 'BulkSaveError saving groups. ' 'User saw error message "%s". Errors: %s' ) % (_error_message, e.errors)) ret['errors'].append(_error_message) return ret
def run_upload(request, domain, workbook): return_val = { "unknown_groups": [], "unknown_users": [], "number_of_fixtures": 0, } group_memoizer = GroupMemoizer(domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr): try: return container[attr] except KeyError: raise Exception("Workbook 'types' has no column '{attr}'".format(attr=attr)) number_of_fixtures = -1 with CouchTransaction() as transaction: for number_of_fixtures, dt in enumerate(data_types): tag = _get_or_raise(dt, 'tag') type_definition_fields = _get_or_raise(dt, 'field') new_data_type = FixtureDataType( domain=domain, is_global=dt.get('is_global', False), name=_get_or_raise(dt, 'name'), tag=_get_or_raise(dt, 'tag'), fields=type_definition_fields, ) try: if dt['UID']: data_type = FixtureDataType.get(dt['UID']) else: data_type = new_data_type pass data_type.fields = type_definition_fields data_type.is_global = dt.get('is_global', False) assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type messages.error(request, _("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': dt['UID']}) if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y": data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields = di['field'] for field in type_definition_fields: if not item_fields.has_key(field): raise Exception(_("Workbook '%(tag)s' does not contain the column " + "'%(field)s' specified in its 'types' definition") % {'tag': tag, 'field': field}) item_fields_list = di['field'].keys() for field in item_fields_list: if not field in type_definition_fields: raise Exception(_("""Workbook '%(tag)s' has an extra column '%(field)s' that's not defined in its 'types' definition""") % {'tag': tag, 'field': field}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, sort_key=sort_key ) try: if di['UID']: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = di['field'] if old_data_item.domain != domain: old_data_item = new_data_item messages.error(request, _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID'] }) assert old_data_item.doc_type == FixtureDataItem._doc_type assert old_data_item.data_type_id == data_type.get_id if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.get_groups() for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.get_users() for user in old_users: old_data_item.remove_user(user) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: messages.error(request, _("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(raw_username, domain) except ValidationError: messages.error(request, _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: messages.error(request, _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username}) return_val["number_of_fixtures"] = number_of_fixtures + 1 return return_val
def setUpClass(cls): super(SchedulingRecipientTest, cls).setUpClass() cls.domain_obj = create_domain(cls.domain) cls.location_types = setup_location_types(cls.domain, ['country', 'state', 'city']) cls.country_location = make_loc('usa', domain=cls.domain, type='country') cls.state_location = make_loc('ma', domain=cls.domain, type='state', parent=cls.country_location) cls.city_location = make_loc('boston', domain=cls.domain, type='city', parent=cls.state_location) cls.mobile_user = CommCareUser.create(cls.domain, 'mobile', 'abc', None, None) cls.mobile_user.set_location(cls.city_location) cls.mobile_user2 = CommCareUser.create(cls.domain, 'mobile2', 'abc', None, None) cls.mobile_user2.set_location(cls.state_location) cls.mobile_user3 = CommCareUser.create(cls.domain, 'mobile3', 'abc', None, None, metadata={ 'role': 'pharmacist', }) cls.mobile_user3.save() cls.mobile_user4 = CommCareUser.create(cls.domain, 'mobile4', 'abc', None, None, metadata={ 'role': 'nurse', }) cls.mobile_user4.save() cls.mobile_user5 = CommCareUser.create(cls.domain, 'mobile5', 'abc', None, None, metadata={ 'role': ['nurse', 'pharmacist'], }) cls.mobile_user5.save() full_username = normalize_username('mobile', cls.domain) cls.full_mobile_user = CommCareUser.create(cls.domain, full_username, 'abc', None, None) cls.definition = CustomDataFieldsDefinition( domain=cls.domain, field_type=UserFieldsView.field_type) cls.definition.save() cls.definition.set_fields([ Field( slug='role', label='Role', ), ]) cls.definition.save() cls.profile = CustomDataFieldsProfile( name='nurse_profile', fields={'role': ['nurse']}, definition=cls.definition, ) cls.profile.save() cls.mobile_user6 = CommCareUser.create(cls.domain, 'mobile6', 'abc', None, None, metadata={ PROFILE_SLUG: cls.profile.id, }) cls.mobile_user5.save() cls.web_user = WebUser.create(cls.domain, 'web', 'abc', None, None) cls.web_user2 = WebUser.create(cls.domain, 'web2', 'abc', None, None, metadata={ 'role': 'nurse', }) cls.web_user2.save() cls.group = Group(domain=cls.domain, users=[cls.mobile_user.get_id]) cls.group.save() cls.group2 = Group(domain=cls.domain, users=[ cls.mobile_user.get_id, cls.mobile_user3.get_id, cls.mobile_user4.get_id, cls.mobile_user5.get_id, cls.mobile_user6.get_id, ]) cls.group2.save() cls.case_group = CommCareCaseGroup(domain=cls.domain) cls.case_group.save() cls.process_pillow_changes = process_pillow_changes( 'DefaultChangeFeedPillow') cls.process_pillow_changes.add_pillow(get_case_messaging_sync_pillow())
def run_upload(domain, workbook, replace=False, task=None): return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) pre_populate_location_groups(group_memoizer, domain) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10. * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=table_def.table_id, fields=table_def.fields, item_attributes=table_def.item_attributes ) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append(_("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': table_def.uid}) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = list(workbook.get_data_sheet(data_type)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() if 'field' in di else [] not_in_sheet, not_in_types = diff_lists(item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = _(FAILURE_MESSAGES["has_no_field_column"]).format(tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _(FAILURE_MESSAGES["has_extra_column"]).format(tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that this item has all the properties listed in its 'types' definition item_attributes_list = di['property'].keys() if 'property' in di else [] not_in_sheet, not_in_types = diff_lists(item_attributes_list, data_type.item_attributes) if len(not_in_sheet) > 0: error_message = _(FAILURE_MESSAGES["has_no_field_column"]).format(tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _(FAILURE_MESSAGES["has_extra_column"]).format(tag=tag, field=not_in_types[0]) raise ExcelMalformatException(error_message) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists(sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = _(FAILURE_MESSAGES["sheet_has_no_property"]).format( tag=tag, property=not_in_sheet[0], field=field.field_name ) raise ExcelMalformatException(error_message) if len(not_in_types) > 0: error_message = _(FAILURE_MESSAGES["sheet_has_extra_property"]).format( tag=tag, property=not_in_types[0], field=field.field_name ) raise ExcelMalformatException(error_message) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = _(FAILURE_MESSAGES["invalid_field_with_property"]).format(field=field.field_name) raise ExcelMalformatException(error_message) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = _(FAILURE_MESSAGES["invalid_property"]).format( field=field.field_name, prop=prop ) raise ExcelMalformatException(error_message) if len(sheet_props[prop]) != field_prop_len: error_message = _(FAILURE_MESSAGES["wrong_field_property_combos"]).format( field=field.field_name, prop=prop ) raise ExcelMalformatException(error_message) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList( field_list=[FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][field.field_name]), properties={} )] ) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={prop: unicode(prop_dict[prop][x]) for prop in prop_dict} ) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list ) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key ) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append(_("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append(_("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append(_("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append(_("Unknown user: '******'. But the row is successfully added") % {'name': raw_username}) return return_val
def create_or_update_users_and_groups(domain, user_specs, group_specs): ret = {"errors": [], "rows": []} group_memoizer = create_or_update_groups(domain, group_specs, log=ret) usernames = set() user_ids = set() allowed_groups = set(group_memoizer.groups) allowed_group_names = [group.name for group in allowed_groups] try: for row in user_specs: data, group_names, language, name, password, phone_number, user_id, username = ( row.get(k) for k in sorted(allowed_headers) ) password = unicode(password) group_names = group_names or [] try: username = normalize_username(username, domain) except TypeError: username = None except ValidationError: ret['rows'].append({ 'username': username, 'row': row, 'flag': _('username cannot contain spaces or symbols'), }) continue status_row = { 'username': raw_username(username) if username else None, 'row': row, } if username in usernames or user_id in user_ids: status_row['flag'] = 'repeat' elif not username and not user_id: status_row['flag'] = 'missing-data' else: try: if username: usernames.add(username) if user_id: user_ids.add(user_id) if user_id: user = CommCareUser.get_by_user_id(user_id, domain) else: user = CommCareUser.get_by_username(username) if user: if user.domain != domain: raise UserUploadError(_('User with username %(username)r is somehow in domain %(domain)r') % {'username': user.username, 'domain': user.domain}) if username and user.username != username: user.change_username(username) if password: user.set_password(password) status_row['flag'] = 'updated' else: if not password: raise UserUploadError(_("Cannot create a new user with a blank password")) user = CommCareUser.create(domain, username, password, uuid=user_id or '') status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(name) if data: user.user_data.update(data) if language: user.language = language user.save() if password: # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on user.get_django_user().check_password(password) for group_id in Group.by_user(user, wrap=False): group = group_memoizer.get(group_id) if group.name not in group_names: group.remove_user(user, save=False) for group_name in group_names: if group_name not in allowed_group_names: raise UserUploadError(_( "Can't add to group '%s' " "(try adding it to your spreadsheet)" ) % group_name) group_memoizer.by_name(group_name).add_user(user, save=False) except UserUploadError as e: status_row['flag'] = '%s' % e ret["rows"].append(status_row) finally: group_memoizer.save_all() return ret
def test_username_doesnt_have_domain(self): user = create_restore_user( username=normalize_username('withdomain', domain='thedomain')) restore_payload = xml.get_registration_xml(user) self.assertTrue('thedomain' not in restore_payload)
def create_or_update_users_and_groups(domain, user_specs, group_specs, location_specs, task=None): ret = {"errors": [], "rows": []} total = len(user_specs) + len(group_specs) + len(location_specs) def _set_progress(progress): if task is not None: DownloadBase.set_progress(task, progress, total) group_memoizer = create_or_update_groups(domain, group_specs, log=ret) current = len(group_specs) usernames = set() user_ids = set() allowed_groups = set(group_memoizer.groups) allowed_group_names = [group.name for group in allowed_groups] try: for row in user_specs: _set_progress(current) current += 1 data, email, group_names, language, name, password, phone_number, user_id, username = ( row.get(k) for k in sorted(allowed_headers)) if password: password = unicode(password) group_names = group_names or [] try: username = normalize_username(str(username), domain) except TypeError: username = None except ValidationError: ret['rows'].append({ 'username': username, 'row': row, 'flag': _('username cannot contain spaces or symbols'), }) continue status_row = { 'username': raw_username(username) if username else None, 'row': row, } if username in usernames or user_id in user_ids: status_row['flag'] = 'repeat' elif not username and not user_id: status_row['flag'] = 'missing-data' else: try: if username: usernames.add(username) if user_id: user_ids.add(user_id) if user_id: user = CommCareUser.get_by_user_id(user_id, domain) else: user = CommCareUser.get_by_username(username) def is_password(password): if not password: return False for c in password: if c != "*": return True return False if user: if user.domain != domain: raise UserUploadError( _('User with username %(username)r is ' 'somehow in domain %(domain)r') % { 'username': user.username, 'domain': user.domain }) if username and user.username != username: user.change_username(username) if is_password(password): user.set_password(password) status_row['flag'] = 'updated' else: if len(raw_username(username) ) > CommCareAccountForm.max_len_username: ret['rows'].append({ 'username': username, 'row': row, 'flag': _("username cannot contain greater than %d characters" % CommCareAccountForm.max_len_username) }) continue if not is_password(password): raise UserUploadError( _("Cannot create a new user with a blank password" )) user = CommCareUser.create(domain, username, password, uuid=user_id or '', commit=False) status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(name) if data: user.user_data.update(data) if language: user.language = language if email: user.email = email user.save() if is_password(password): # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on user.get_django_user().check_password(password) for group_id in Group.by_user(user, wrap=False): group = group_memoizer.get(group_id) if group.name not in group_names: group.remove_user(user, save=False) for group_name in group_names: if group_name not in allowed_group_names: raise UserUploadError( _("Can't add to group '%s' " "(try adding it to your spreadsheet)") % group_name) group_memoizer.by_name(group_name).add_user(user, save=False) except (UserUploadError, CouchUser.Inconsistent) as e: status_row['flag'] = unicode(e) ret["rows"].append(status_row) finally: try: group_memoizer.save_all() except BulkSaveError as e: _error_message = ( "Oops! We were not able to save some of your group changes. " "Please make sure no one else is editing your groups " "and try again.") logging.exception(('BulkSaveError saving groups. ' 'User saw error message "%s". Errors: %s') % (_error_message, e.errors)) ret['errors'].append(_error_message) create_or_update_locations(domain, location_specs, log=ret) _set_progress(total) return ret
def run_upload(request, domain, workbook, replace=False): return_val = { "unknown_groups": [], "unknown_users": [], "number_of_fixtures": 0, } failure_messages = { "has_no_column": "Workbook 'types' has no column '{column_name}'.", "has_no_field_column": "Excel-sheet '{tag}' does not contain the column '{field}' " "as specified in its 'types' definition", "has_extra_column": "Excel-sheet '{tag}' has an extra column" + "'{field}' that's not defined in its 'types' definition", "wrong_property_syntax": "Properties should be specified as 'field 1: property 1'. In 'types' sheet, " + "'{prop_key}' for field '{field}' is not correctly formatted", "sheet_has_no_property": "Excel-sheet '{tag}' does not contain property " + "'{property}' of the field '{field}' as specified in its 'types' definition", "sheet_has_extra_property": "Excel-sheet '{tag}'' has an extra property " + "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting", "invalid_field_with_property": "Fields with attributes should be numbered as 'field: {field} integer", "invalid_property": "Attribute should be written as '{field}: {prop} interger'", "wrong_field_property_combos": "Number of values for field '{field}' and attribute '{prop}' should be same", "replace_with_UID": "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row." } group_memoizer = GroupMemoizer(domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr): try: return container[attr] except KeyError: raise ExcelMalformatException( _(failure_messages["has_no_column"].format(column_name=attr))) def diff_lists(list_a, list_b): set_a = set(list_a) set_b = set(list_b) not_in_b = set_a.difference(set_b) not_in_a = set_a.difference(set_a) return list(not_in_a), list(not_in_b) number_of_fixtures = -1 with CouchTransaction() as transaction: fixtures_tags = [] type_sheets = [] for number_of_fixtures, dt in enumerate(data_types): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: tag = _get_or_raise(dt, 'tag') if tag in fixtures_tags: error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet." raise DuplicateFixtureTagException( _(error_message.format(tag=tag))) fixtures_tags.append(tag) type_sheets.append(dt) for number_of_fixtures, dt in enumerate(type_sheets): try: tag = _get_or_raise(dt, 'table_id') except ExcelMalformatException: messages.info( request, _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed." )) tag = _get_or_raise(dt, 'tag') type_definition_fields = _get_or_raise(dt, 'field') type_fields_with_properties = [] for count, field in enumerate(type_definition_fields): prop_key = "field " + str(count + 1) if dt.has_key(prop_key): try: property_list = dt[prop_key]["property"] except KeyError: error_message = failure_messages[ "wrong_property_syntax"].format(prop_key=prop_key, field=field) raise ExcelMalformatException(_(error_message)) else: property_list = [] field_with_prop = FixtureTypeField(field_name=field, properties=property_list) type_fields_with_properties.append(field_with_prop) new_data_type = FixtureDataType( domain=domain, is_global=dt.get('is_global', False), tag=tag, fields=type_fields_with_properties, ) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif 'UID' in dt and dt['UID']: data_type = FixtureDataType.get(dt['UID']) else: data_type = new_data_type pass if replace: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = type_fields_with_properties data_type.is_global = dt.get('is_global', False) assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type messages.error( request, _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': dt['UID']}) if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y": data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError) as e: data_type = new_data_type transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH item_fields_list = di['field'].keys() not_in_sheet, not_in_types = diff_lists( item_fields_list, data_type.fields_without_attributes) if len(not_in_sheet) > 0: error_message = failure_messages[ "has_no_field_column"].format(tag=tag, field=not_in_sheet[0]) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages[ "has_extra_column"].format(tag=tag, field=not_in_types[0]) raise ExcelMalformatException(_(error_message)) # check that properties in 'types' sheet vs item-sheet MATCH for field in data_type.fields: if len(field.properties) > 0: sheet_props = di.get(field.field_name, {}) sheet_props_list = sheet_props.keys() type_props = field.properties not_in_sheet, not_in_types = diff_lists( sheet_props_list, type_props) if len(not_in_sheet) > 0: error_message = failure_messages[ "sheet_has_no_property"].format( tag=tag, property=not_in_sheet[0], field=field.field_name) raise ExcelMalformatException(_(error_message)) if len(not_in_types) > 0: error_message = failure_messages[ "sheet_has_extra_property"].format( tag=tag, property=not_in_types[0], field=field.field_name) raise ExcelMalformatException(_(error_message)) # check that fields with properties are numbered if type(di['field'][field.field_name]) != list: error_message = failure_messages[ "invalid_field_with_property"].format( field=field.field_name) raise ExcelMalformatException(_(error_message)) field_prop_len = len(di['field'][field.field_name]) for prop in sheet_props: if type(sheet_props[prop]) != list: error_message = failure_messages[ "invalid_property"].format( field=field.field_name, prop=prop) raise ExcelMalformatException(_(error_message)) if len(sheet_props[prop]) != field_prop_len: error_message = failure_messages[ "wrong_field_property_combos"].format( field=field.field_name, prop=prop) raise ExcelMalformatException(_(error_message)) # excel format check should have been covered by this line. Can make assumptions about data now type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=unicode(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=unicode(field_prop_combos[x]), properties={ prop: unicode(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) new_data_item = FixtureDataItem(domain=domain, data_type_id=data_type.get_id, fields=item_fields, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item messages.error( request, _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: messages.error( request, _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: messages.error( request, _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: messages.error( request, _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) return_val["number_of_fixtures"] = number_of_fixtures + 1 return return_val
def create_or_update_users_and_groups(domain, user_specs, group_memoizer=None, update_progress=None): ret = {"errors": [], "rows": []} group_memoizer = group_memoizer or GroupMemoizer(domain) group_memoizer.load_all() current = 0 can_assign_locations = domain_has_privilege(domain, privileges.LOCATIONS) if can_assign_locations: location_cache = SiteCodeToLocationCache(domain) domain_obj = Domain.get_by_name(domain) allowed_group_names = [group.name for group in group_memoizer.groups] roles_by_name = {role.name: role for role in UserRole.by_domain(domain)} validators = get_user_import_validators(domain_obj, user_specs, allowed_group_names, list(roles_by_name)) try: for row in user_specs: if update_progress: update_progress(current) current += 1 username = row.get('username') status_row = { 'username': username, 'row': row, } try: for validator in validators: validator(row) except UserUploadError as e: status_row['flag'] = str(e) ret['rows'].append(status_row) continue data = row.get('data') email = row.get('email') group_names = list(map(str, row.get('group') or [])) language = row.get('language') name = row.get('name') password = row.get('password') phone_number = row.get('phone-number') uncategorized_data = row.get('uncategorized_data') user_id = row.get('user_id') location_codes = row.get('location_code') or [] if location_codes and not isinstance(location_codes, list): location_codes = [location_codes] # ignore empty location_codes = [code for code in location_codes if code] role = row.get('role', '') try: username = normalize_username(str(username), domain) if username else None password = str(password) if password else None is_active = spec_value_to_boolean_or_none(row, 'is_active') is_account_confirmed = spec_value_to_boolean_or_none( row, 'is_account_confirmed') if user_id: user = CommCareUser.get_by_user_id(user_id, domain) if not user: raise UserUploadError( _("User with ID '{user_id}' not found").format( user_id=user_id, domain=domain)) if username and user.username != username: raise UserUploadError( _('Changing usernames is not supported: %(username)r to %(new_username)r' ) % { 'username': user.username, 'new_username': username }) # note: explicitly not including "None" here because that's the default value if not set. # False means it was set explicitly to that value if is_account_confirmed is False: raise UserUploadError( _(f"You can only set 'Is Account Confirmed' to 'False' on a new User." )) if is_password(password): user.set_password(password) status_row['flag'] = 'updated' else: kwargs = {} if is_account_confirmed is not None: kwargs['is_account_confirmed'] = is_account_confirmed user = CommCareUser.create(domain, username, password, commit=False, **kwargs) status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(str(name)) if data: user.user_data.update(data) if uncategorized_data: user.user_data.update(uncategorized_data) if language: user.language = language if email: user.email = email.lower() if is_active is not None: user.is_active = is_active if can_assign_locations: # Do this here so that we validate the location code before we # save any other information to the user, this way either all of # the user's information is updated, or none of it location_ids = [] for code in location_codes: loc = get_location_from_site_code(code, location_cache) location_ids.append(loc.location_id) locations_updated = set( user.assigned_location_ids) != set(location_ids) primary_location_removed = ( user.location_id and not location_ids or user.location_id not in location_ids) if primary_location_removed: user.unset_location(commit=False) if locations_updated: user.reset_locations(location_ids, commit=False) if role: user.set_role(domain, roles_by_name[role].get_qualified_id()) user.save() if is_password(password): # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on # Passing use_primary_db=True because of https://dimagi-dev.atlassian.net/browse/ICDS-465 user.get_django_user( use_primary_db=True).check_password(password) for group in group_memoizer.by_user_id(user.user_id): if group.name not in group_names: group.remove_user(user) for group_name in group_names: group_memoizer.by_name(group_name).add_user(user, save=False) except (UserUploadError, CouchUser.Inconsistent) as e: status_row['flag'] = str(e) ret["rows"].append(status_row) finally: try: group_memoizer.save_all() except BulkSaveError as e: _error_message = ( "Oops! We were not able to save some of your group changes. " "Please make sure no one else is editing your groups " "and try again.") logging.exception(('BulkSaveError saving groups. ' 'User saw error message "%s". Errors: %s') % (_error_message, e.errors)) ret['errors'].append(_error_message) return ret
def test_username_doesnt_have_domain(self): user = create_restore_user(username=normalize_username('withdomain', domain='thedomain')) restore_payload = get_registration_xml(user) self.assertTrue('thedomain' not in restore_payload)
def get_username(xml): try: username = re.search(r'<[Mm]eta>.*<username>(.*)</username>.*</[Mm]eta>', xml).group(1) return normalize_username(username) except: return None
def _run_fixture_upload(domain, workbook, replace=False, task=None): from corehq.apps.users.bulkupload import GroupMemoizer return_val = FixtureUploadResult() group_memoizer = GroupMemoizer(domain) get_location = get_memoized_location_getter(domain) data_types = [] with CouchTransaction() as transaction: type_sheets = workbook.get_all_type_sheets() total_tables = len(type_sheets) return_val.number_of_fixtures = total_tables def _update_progress(table_count, item_count, items_in_table): if task: processed = table_count * 10 + (10 * item_count / items_in_table) DownloadBase.set_progress(task, processed, 10 * total_tables) for table_number, table_def in enumerate(type_sheets): tag = table_def.table_id new_data_type = FixtureDataType( domain=domain, is_global=table_def.is_global, tag=tag, fields=table_def.fields, item_attributes=table_def.item_attributes) try: tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag) if tagged_fdt: data_type = tagged_fdt # support old usage with 'UID' elif table_def.uid: data_type = FixtureDataType.get(table_def.uid) else: data_type = new_data_type if replace and data_type != new_data_type: data_type.recursive_delete(transaction) data_type = new_data_type data_type.fields = table_def.fields data_type.item_attributes = table_def.item_attributes data_type.is_global = table_def.is_global assert data_type.doc_type == FixtureDataType._doc_type if data_type.domain != domain: data_type = new_data_type return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new type is created." ) % {'UID': table_def.uid}) if table_def.delete: data_type.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): data_type = new_data_type transaction.save(data_type) data_types.append(data_type) data_items = list(workbook.get_data_sheet(data_type.tag)) items_in_table = len(data_items) for sort_key, di in enumerate(data_items): _update_progress(table_number, sort_key, items_in_table) type_fields = data_type.fields item_fields = {} for field in type_fields: # if field doesn't have properties if len(field.properties) == 0: item_fields[field.field_name] = FieldList(field_list=[ FixtureItemField( # using unicode here, to cast ints, and multi-language strings field_value=six.text_type(di['field'][ field.field_name]), properties={}) ]) else: field_list = [] field_prop_combos = di['field'][field.field_name] prop_combo_len = len(field_prop_combos) prop_dict = di[field.field_name] for x in range(0, prop_combo_len): fix_item_field = FixtureItemField( field_value=six.text_type( field_prop_combos[x]), properties={ prop: six.text_type(prop_dict[prop][x]) for prop in prop_dict }) field_list.append(fix_item_field) item_fields[field.field_name] = FieldList( field_list=field_list) item_attributes = di.get('property', {}) new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item pass old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if old_data_item.domain != domain \ or not old_data_item.data_type_id == data_type.get_id: old_data_item = new_data_item return_val.errors.append( _("'%(UID)s' is not a valid UID. But the new item is created." ) % {'UID': di['UID']}) assert old_data_item.doc_type == FixtureDataItem._doc_type if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": old_data_item.recursive_delete(transaction) continue except (ResourceNotFound, KeyError): old_data_item = new_data_item transaction.save(old_data_item) old_groups = old_data_item.groups for group in old_groups: old_data_item.remove_group(group) old_users = old_data_item.users for user in old_users: old_data_item.remove_user(user) old_locations = old_data_item.locations for location in old_locations: old_data_item.remove_location(location) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: old_data_item.add_group(group, transaction=transaction) else: return_val.errors.append( _("Unknown group: '%(name)s'. But the row is successfully added" ) % {'name': group_name}) for raw_username in di.get('user', []): try: username = normalize_username(str(raw_username), domain) except ValidationError: return_val.errors.append( _("Invalid username: '******'. Row is not added") % {'name': raw_username}) continue user = CommCareUser.get_by_username(username) if user: old_data_item.add_user(user) else: return_val.errors.append( _("Unknown user: '******'. But the row is successfully added" ) % {'name': raw_username}) for name in di.get('location', []): location_cache = get_location(name) if location_cache.is_error: return_val.errors.append(location_cache.message) else: old_data_item.add_location(location_cache.location, transaction=transaction) clear_fixture_quickcache(domain, data_types) clear_fixture_cache(domain) return return_val
def create_or_update_users_and_groups(domain, user_specs, group_specs, location_specs, task=None): ret = {"errors": [], "rows": []} total = len(user_specs) + len(group_specs) + len(location_specs) def _set_progress(progress): if task is not None: DownloadBase.set_progress(task, progress, total) group_memoizer = create_or_update_groups(domain, group_specs, log=ret) current = len(group_specs) usernames = set() user_ids = set() allowed_groups = set(group_memoizer.groups) allowed_group_names = [group.name for group in allowed_groups] try: for row in user_specs: _set_progress(current) current += 1 data, email, group_names, language, name, password, phone_number, user_id, username = ( row.get(k) for k in sorted(allowed_headers) ) if password: password = unicode(password) group_names = group_names or [] try: username = normalize_username(str(username), domain) except TypeError: username = None except ValidationError: ret['rows'].append({ 'username': username, 'row': row, 'flag': _('username cannot contain spaces or symbols'), }) continue status_row = { 'username': raw_username(username) if username else None, 'row': row, } if username in usernames or user_id in user_ids: status_row['flag'] = 'repeat' elif not username and not user_id: status_row['flag'] = 'missing-data' else: try: if username: usernames.add(username) if user_id: user_ids.add(user_id) if user_id: user = CommCareUser.get_by_user_id(user_id, domain) else: user = CommCareUser.get_by_username(username) def is_password(password): if not password: return False for c in password: if c != "*": return True return False if user: if user.domain != domain: raise UserUploadError(_( 'User with username %(username)r is ' 'somehow in domain %(domain)r' ) % {'username': user.username, 'domain': user.domain}) if username and user.username != username: user.change_username(username) if is_password(password): user.set_password(password) status_row['flag'] = 'updated' else: if len(raw_username(username)) > CommCareAccountForm.max_len_username: ret['rows'].append({ 'username': username, 'row': row, 'flag': _("username cannot contain greater than %d characters" % CommCareAccountForm.max_len_username) }) continue if not is_password(password): raise UserUploadError(_("Cannot create a new user with a blank password")) user = CommCareUser.create(domain, username, password, uuid=user_id or '', commit=False) status_row['flag'] = 'created' if phone_number: user.add_phone_number(_fmt_phone(phone_number), default=True) if name: user.set_full_name(name) if data: user.user_data.update(data) if language: user.language = language if email: user.email = email user.save() if is_password(password): # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on user.get_django_user().check_password(password) for group_id in Group.by_user(user, wrap=False): group = group_memoizer.get(group_id) if group.name not in group_names: group.remove_user(user, save=False) for group_name in group_names: if group_name not in allowed_group_names: raise UserUploadError(_( "Can't add to group '%s' " "(try adding it to your spreadsheet)" ) % group_name) group_memoizer.by_name(group_name).add_user(user, save=False) except (UserUploadError, CouchUser.Inconsistent) as e: status_row['flag'] = '%s' % e ret["rows"].append(status_row) finally: try: group_memoizer.save_all() except BulkSaveError as e: _error_message = ( "Oops! We were not able to save some of your group changes. " "Please make sure no one else is editing your groups " "and try again." ) logging.exception(( 'BulkSaveError saving groups. ' 'User saw error message "%s". Errors: %s' ) % (_error_message, e.errors)) ret['errors'].append(_error_message) create_or_update_locations(domain, location_specs, log=ret) _set_progress(total) return ret
def create_or_update_commcare_users_and_groups(upload_domain, user_specs, upload_user, upload_record_id, group_memoizer=None, update_progress=None): """" Creates and Updates CommCare Users For the associated web user username passed, for each CommCareUser if corresponding web user is present if web user has confirmed account but not a member of domain adds them to the domain with same role and primary location as the CommCareUser if already a member of domain update their role and primary location to be same as that of the CommCareUser else creates or updates user invitation sets Invitation with the CommCare user's role and primary location All changes to users only, are tracked using UserChangeLogger, as an audit trail. """ from corehq.apps.user_importer.helpers import CommCareUserImporter, WebUserImporter domain_info_by_domain = {} ret = {"errors": [], "rows": []} current = 0 update_deactivate_after_date = EnterpriseMobileWorkerSettings.is_domain_using_custom_deactivation( upload_domain) for row in user_specs: if update_progress: update_progress(current) current += 1 username = row.get('username') domain = row.get('domain') or upload_domain username = normalize_username(str(username), domain) if username else None status_row = { 'username': username, 'row': row, } try: domain_info = get_domain_info(domain, upload_domain, user_specs, domain_info_by_domain, group_memoizer) for validator in domain_info.validators: validator(row) except UserUploadError as e: status_row['flag'] = str(e) ret['rows'].append(status_row) continue data = row.get('data', {}) email = row.get('email') group_names = list(map(str, row.get('group') or [])) language = row.get('language') name = row.get('name') password = row.get('password') uncategorized_data = row.get('uncategorized_data', {}) user_id = row.get('user_id') location_codes = row.get('location_code', []) if 'location_code' in row else None location_codes = format_location_codes(location_codes) role = row.get('role', None) profile = row.get('user_profile', None) web_user_username = row.get('web_user') phone_numbers = row.get('phone-number', []) if 'phone-number' in row else None deactivate_after = row.get( 'deactivate_after', None) if update_deactivate_after_date else None if isinstance(deactivate_after, datetime): deactivate_after = deactivate_after.strftime("%m-%Y") row['deactivate_after'] = deactivate_after try: password = str(password) if password else None is_active = spec_value_to_boolean_or_none(row, 'is_active') is_account_confirmed = spec_value_to_boolean_or_none( row, 'is_account_confirmed') send_account_confirmation_email = spec_value_to_boolean_or_none( row, 'send_confirmation_email') remove_web_user = spec_value_to_boolean_or_none( row, 'remove_web_user') user = _get_or_create_commcare_user(domain, user_id, username, is_account_confirmed, web_user_username, password, upload_user) commcare_user_importer = CommCareUserImporter( upload_domain, domain, user, upload_user, is_new_user=not bool(user_id), via=USER_CHANGE_VIA_BULK_IMPORTER, upload_record_id=upload_record_id) if user_id: if is_password(password): commcare_user_importer.update_password(password) # overwrite password in results so we do not save it to the db status_row['row']['password'] = '******' status_row['flag'] = 'updated' else: status_row['flag'] = 'created' if phone_numbers is not None: phone_numbers = clean_phone_numbers(phone_numbers) commcare_user_importer.update_phone_numbers(phone_numbers) if name: commcare_user_importer.update_name(name) commcare_user_importer.update_user_data(data, uncategorized_data, profile, domain_info) if update_deactivate_after_date: commcare_user_importer.update_deactivate_after( deactivate_after) if language: commcare_user_importer.update_language(language) if email: commcare_user_importer.update_email(email) if is_active is not None: commcare_user_importer.update_status(is_active) # Do this here so that we validate the location code before we # save any other information to the user, this way either all of # the user's information is updated, or none of it # Do not update location info if the column is not included at all if domain_info.can_assign_locations and location_codes is not None: commcare_user_importer.update_locations( location_codes, domain_info) if role: role_qualified_id = domain_info.roles_by_name[role] commcare_user_importer.update_role(role_qualified_id) elif not commcare_user_importer.logger.is_new_user and 'role' in row: commcare_user_importer.update_role('none') if web_user_username: user.update_metadata({'login_as_user': web_user_username}) user.save() log = commcare_user_importer.save_log() if web_user_username: check_can_upload_web_users(domain, upload_user) web_user = CouchUser.get_by_username(web_user_username) if web_user: web_user_importer = WebUserImporter( upload_domain, domain, web_user, upload_user, is_new_user=False, via=USER_CHANGE_VIA_BULK_IMPORTER, upload_record_id=upload_record_id) user_change_logger = web_user_importer.logger else: web_user_importer = None user_change_logger = None if remove_web_user: remove_web_user_from_domain(domain, web_user, username, upload_user, user_change_logger) else: check_user_role(username, role) if not web_user and is_account_confirmed: raise UserUploadError( _("You can only set 'Is Account Confirmed' to 'True' on an existing Web User. " f"{web_user_username} is a new username."). format(web_user_username=web_user_username)) if web_user and not web_user.is_member_of( domain) and is_account_confirmed: # add confirmed account to domain # role_qualified_id would be be present here as confirmed in check_user_role web_user_importer.add_to_domain( role_qualified_id, user.location_id) elif not web_user or not web_user.is_member_of(domain): create_or_update_web_user_invite( web_user_username, domain, role_qualified_id, upload_user, user.location_id, user_change_logger, send_email=send_account_confirmation_email) elif web_user.is_member_of(domain): # edit existing user in the domain web_user_importer.update_role(role_qualified_id) if location_codes is not None: web_user_importer.update_primary_location( user.location_id) web_user.save() if web_user_importer: web_user_importer.save_log() if send_account_confirmation_email and not web_user_username: send_account_confirmation_if_necessary(user) if is_password(password): # Without this line, digest auth doesn't work. # With this line, digest auth works. # Other than that, I'm not sure what's going on # Passing use_primary_db=True because of https://dimagi-dev.atlassian.net/browse/ICDS-465 user.get_django_user( use_primary_db=True).check_password(password) group_change_message = commcare_user_importer.update_user_groups( domain_info, group_names) try: domain_info.group_memoizer.save_updated() except BulkSaveError as e: _error_message = ( "Oops! We were not able to save some of your group changes. " "Please make sure no one else is editing your groups " "and try again.") logging.exception(('BulkSaveError saving groups. ' 'User saw error message "%s". Errors: %s') % (_error_message, e.errors)) ret['errors'].append(_error_message) if log and group_change_message: log.change_messages.update(group_change_message) log.save() elif group_change_message: log = commcare_user_importer.logger.save_only_group_changes( group_change_message) except ValidationError as e: status_row['flag'] = e.message except (UserUploadError, CouchUser.Inconsistent) as e: status_row['flag'] = str(e) ret["rows"].append(status_row) return ret