def test_delete(self): self.log_user() cur_user = self._get_logged_user() u1 = UserModel().create_or_update(username='******', password='******', email='*****@*****.**', firstname='u1', lastname='u1') u2 = UserModel().create_or_update(username='******', password='******', email='*****@*****.**', firstname='u2', lastname='u2') # make notifications notification = NotificationModel().create(created_by=cur_user, subject=u'test', body=u'hi there', recipients=[cur_user, u1, u2]) Session().commit() u1 = User.get(u1.user_id) u2 = User.get(u2.user_id) # check DB get_notif = lambda un: [x.notification for x in un] self.assertEqual(get_notif(cur_user.notifications), [notification]) self.assertEqual(get_notif(u1.notifications), [notification]) self.assertEqual(get_notif(u2.notifications), [notification]) cur_usr_id = cur_user.user_id response = self.app.delete(url('notification', notification_id= notification.notification_id)) self.assertEqual(response.body, 'ok') cur_user = User.get(cur_usr_id) self.assertEqual(cur_user.notifications, [])
def test_add_ips(self): self.log_user() default_user_id = User.get_default_user().user_id response = self.app.put(url('edit_user_ips', id=default_user_id), params=dict(new_ip='127.0.0.0/24', _authentication_token=self.authentication_token())) response = self.app.get(url('admin_permissions_ips')) response.mustcontain('127.0.0.0/24') response.mustcontain('127.0.0.0 - 127.0.0.255') ## delete default_user_id = User.get_default_user().user_id del_ip_id = UserIpMap.query().filter(UserIpMap.user_id == default_user_id).first().ip_id response = self.app.post(url('edit_user_ips', id=default_user_id), params=dict(_method='delete', del_ip_id=del_ip_id, _authentication_token=self.authentication_token())) response = self.app.get(url('admin_permissions_ips')) response.mustcontain('All IP addresses are allowed') response.mustcontain(no=['127.0.0.0/24']) response.mustcontain(no=['127.0.0.0 - 127.0.0.255'])
def test_extra_email_map(self): usr = UserModel().create_or_update(username=u'test_user', password=u'qweqwe', email=u'*****@*****.**', firstname=u'u1', lastname=u'u1') Session().commit() m = UserEmailMap() m.email = u'*****@*****.**' m.user = usr Session().add(m) Session().commit() u = User.get_by_email(email='*****@*****.**') self.assertEqual(usr.user_id, u.user_id) self.assertEqual(usr.username, u.username) u = User.get_by_email(email='*****@*****.**') self.assertEqual(usr.user_id, u.user_id) self.assertEqual(usr.username, u.username) u = User.get_by_email(email='*****@*****.**') self.assertEqual(None, u) UserModel().delete(usr.user_id) Session().commit()
def test_enforce_groups(self, pre_existing, regular_should_be, external_should_be, groups, expected): # delete all groups for gr in UserGroupModel.get_all(): fixture.destroy_user_group(gr) Session().commit() user = User.get_by_username(TEST_USER_REGULAR_LOGIN) for gr in pre_existing: gr = fixture.create_user_group(gr) Session().commit() # make sure use is just in those groups for gr in regular_should_be: gr = fixture.create_user_group(gr) Session().commit() UserGroupModel().add_user_to_group(gr, user) Session().commit() # now special external groups created by auth plugins for gr in external_should_be: gr = fixture.create_user_group(gr, user_group_data={'extern_type': 'container'}) Session().commit() UserGroupModel().add_user_to_group(gr, user) Session().commit() UserGroupModel().enforce_groups(user, groups, 'container') Session().commit() user = User.get_by_username(TEST_USER_REGULAR_LOGIN) in_groups = user.group_member self.assertEqual(expected, [x.users_group.users_group_name for x in in_groups])
def log_pull_action(ui, repo, **kwargs): """ Logs user last pull action :param ui: :param repo: """ ex = _extract_extras() user = User.get_by_username(ex.username) action = 'pull' action_logger(user, action, ex.repository, ex.ip, commit=True) # extension hook call from kallithea import EXTENSIONS callback = getattr(EXTENSIONS, 'PULL_HOOK', None) if callable(callback): kw = {} kw.update(ex) callback(**kw) if ex.make_lock is not None and ex.make_lock: Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id) #msg = 'Made lock on repo `%s`' % repository #sys.stdout.write(msg) if ex.locked_by[0]: locked_by = User.get(ex.locked_by[0]).username _http_ret = HTTPLockedRC(ex.repository, locked_by) if str(_http_ret.code).startswith('2'): #2xx Codes don't raise exceptions sys.stdout.write(_http_ret.title) return 0
def test_add_ips(self): self.log_user() default_user_id = User.get_default_user().user_id response = self.app.put(url('edit_user_ips', id=default_user_id), params=dict(new_ip='127.0.0.0/24', _authentication_token=self.authentication_token())) # sleep more than beaker.cache.sql_cache_short.expire to expire user cache time.sleep(1.5) self.app.get(url('admin_permissions_ips'), status=302) # REMOTE_ADDR must match 127.0.0.0/24 response = self.app.get(url('admin_permissions_ips'), extra_environ={'REMOTE_ADDR': '127.0.0.1'}) response.mustcontain('127.0.0.0/24') response.mustcontain('127.0.0.0 - 127.0.0.255') ## delete default_user_id = User.get_default_user().user_id del_ip_id = UserIpMap.query().filter(UserIpMap.user_id == default_user_id).first().ip_id response = self.app.post(url('edit_user_ips', id=default_user_id), params=dict(_method='delete', del_ip_id=del_ip_id, _authentication_token=self.authentication_token()), extra_environ={'REMOTE_ADDR': '127.0.0.1'}) # sleep more than beaker.cache.sql_cache_short.expire to expire user cache time.sleep(1.5) response = self.app.get(url('admin_permissions_ips')) response.mustcontain('All IP addresses are allowed') response.mustcontain(no=['127.0.0.0/24']) response.mustcontain(no=['127.0.0.0 - 127.0.0.255'])
def enforce_groups(self, user, groups, extern_type=None): user = User.guess_instance(user) log.debug('Enforcing groups %s on user %s', user, groups) current_groups = user.group_member # find the external created groups externals = [x.users_group for x in current_groups if 'extern_type' in x.users_group.group_data] # calculate from what groups user should be removed # externals that are not in groups for gr in externals: if gr.users_group_name not in groups: log.debug('Removing user %s from user group %s', user, gr) self.remove_user_from_group(gr, user) # now we calculate in which groups user should be == groups params owner = User.get_first_admin().username for gr in set(groups): existing_group = UserGroup.get_by_group_name(gr) if not existing_group: desc = u'Automatically created from plugin:%s' % extern_type # we use first admin account to set the owner of the group existing_group = UserGroupModel().create(gr, desc, owner, group_data={'extern_type': extern_type}) # we can only add users to special groups created via plugins managed = 'extern_type' in existing_group.group_data if managed: log.debug('Adding user %s to user group %s', user, gr) UserGroupModel().add_user_to_group(existing_group, user) else: log.debug('Skipping addition to group %s since it is ' 'not managed by auth plugins' % gr)
def create(self, form_data, cur_user=None): if not cur_user: cur_user = getattr(get_current_authuser(), 'username', None) from kallithea.lib.hooks import log_create_user, check_allowed_create_user _fd = form_data user_data = { 'username': _fd['username'], 'password': _fd['password'], 'email': _fd['email'], 'firstname': _fd['firstname'], 'lastname': _fd['lastname'], 'active': _fd['active'], 'admin': False } # raises UserCreationError if it's not allowed check_allowed_create_user(user_data, cur_user) from kallithea.lib.auth import get_crypt_password new_user = User() for k, v in form_data.items(): if k == 'password': v = get_crypt_password(v) if k == 'firstname': k = 'name' setattr(new_user, k, v) new_user.api_key = generate_api_key(form_data['username']) self.sa.add(new_user) log_create_user(new_user.get_dict(), cur_user) return new_user
def test_create_notification(self): self.assertEqual([], Notification.query().all()) self.assertEqual([], UserNotification.query().all()) usrs = [self.u1, self.u2] notification = NotificationModel().create(created_by=self.u1, subject=u'subj', body=u'hi there', recipients=usrs) Session().commit() u1 = User.get(self.u1) u2 = User.get(self.u2) u3 = User.get(self.u3) notifications = Notification.query().all() self.assertEqual(len(notifications), 1) self.assertEqual(notifications[0].recipients, [u1, u2]) self.assertEqual(notification.notification_id, notifications[0].notification_id) unotification = UserNotification.query()\ .filter(UserNotification.notification == notification).all() self.assertEqual(len(unotification), len(usrs)) self.assertEqual(set([x.user.user_id for x in unotification]), set(usrs))
def test_init_user_attributes_from_ldap(monkeypatch, arrange_ldap_auth): """Authenticate unknown user with mocked LDAP, verify user is created. """ # Arrange test user. uniqifier = uuid.uuid4() username = '******'.format(uniqifier) assert User.get_by_username(username) is None # Arrange LDAP auth. monkeypatch.setattr(auth_ldap, 'AuthLdap', _AuthLdapMock) # Authenticate with LDAP. user_data = authenticate(username, 'password') # Verify that authenication succeeded and retrieved correct attributes # from LDAP. assert user_data is not None assert user_data.get('firstname') == u'spam ldap first name' assert user_data.get('lastname') == u'spam ldap last name' assert user_data.get('email') == 'spam ldap email' # Verify that authentication created new user with attributes # retrieved from LDAP. new_user = User.get_by_username(username) assert new_user is not None assert new_user.firstname == u'spam ldap first name' assert new_user.lastname == u'spam ldap last name' assert new_user.email == 'spam ldap email'
def test_create_and_remove(self): usr = UserModel().create_or_update(username=u'test_user', password=u'qweqwe', email=u'*****@*****.**', firstname=u'u1', lastname=u'u1') Session().commit() assert User.get_by_username(u'test_user') == usr assert User.get_by_username(u'test_USER', case_insensitive=True) == usr # User.get_by_username without explicit request for case insensitivty # will use database case sensitivity. The following will thus return # None on for example PostgreSQL but find test_user on MySQL - we are # fine with leaving that as undefined as long as it doesn't crash. User.get_by_username(u'test_USER', case_insensitive=False) # make user group user_group = fixture.create_user_group(u'some_example_group') Session().commit() UserGroupModel().add_user_to_group(user_group, usr) Session().commit() assert UserGroup.get(user_group.users_group_id) == user_group assert UserGroupMember.query().count() == 1 UserModel().delete(usr.user_id) Session().commit() assert UserGroupMember.query().all() == []
def set_anonymous_access(enable=True): user = User.get_default_user() user.active = enable Session().commit() print '\tanonymous access is now:', enable if enable != User.get_default_user().active: raise Exception('Cannot set anonymous access')
def test_create_notification(self): with test_context(self.app): usrs = [self.u1, self.u2] def send_email(recipients, subject, body='', html_body='', headers=None, author=None): assert recipients == ['*****@*****.**'] assert subject == 'Test Message' assert body == u"hi there" assert '>hi there<' in html_body assert author.username == 'u1' with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): notification = NotificationModel().create(created_by=self.u1, subject=u'subj', body=u'hi there', recipients=usrs) Session().commit() u1 = User.get(self.u1) u2 = User.get(self.u2) u3 = User.get(self.u3) notifications = Notification.query().all() assert len(notifications) == 1 assert notifications[0].recipients == [u1, u2] assert notification.notification_id == notifications[0].notification_id unotification = UserNotification.query() \ .filter(UserNotification.notification == notification).all() assert len(unotification) == len(usrs) assert set([x.user_id for x in unotification]) == set(usrs)
def set_anonymous_access(enable=True): user = User.get_by_username(User.DEFAULT_USER) user.active = enable Session().add(user) Session().commit() print '\tanonymous access is now:', enable if enable != User.get_by_username(User.DEFAULT_USER).active: raise Exception('Cannot set anonymous access')
def mention_from_description(self, pr, old_description=''): mention_recipients = set(User.get_by_username(username, case_insensitive=True) for username in extract_mentioned_users(pr.description)) mention_recipients.difference_update(User.get_by_username(username, case_insensitive=True) for username in extract_mentioned_users(old_description)) log.debug("Mentioning %s" % mention_recipients) self.__add_reviewers(pr, [], mention_recipients)
def validate_python(self, value, state): try: User.query().filter(User.active == True) \ .filter(User.username == value).one() except sqlalchemy.exc.InvalidRequestError: # NoResultFound/MultipleResultsFound msg = M(self, 'invalid_username', state, username=value) raise formencode.Invalid(msg, value, state, error_dict=dict(username=msg) )
def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): """ Action logger for various actions made by users :param user: user that made this action, can be a unique username string or object containing user_id attribute :param action: action to log, should be on of predefined unique actions for easy translations :param repo: string name of repository or object containing repo_id, that action was made on :param ipaddr: optional IP address from what the action was made :param sa: optional sqlalchemy session """ if not sa: sa = meta.Session() # if we don't get explicit IP address try to get one from registered user # in tmpl context var if not ipaddr: ipaddr = getattr(get_current_authuser(), 'ip_addr', '') if getattr(user, 'user_id', None): user_obj = User.get(user.user_id) elif isinstance(user, basestring): user_obj = User.get_by_username(user) else: raise Exception('You have to provide a user object or a username') if getattr(repo, 'repo_id', None): repo_obj = Repository.get(repo.repo_id) repo_name = repo_obj.repo_name elif isinstance(repo, basestring): repo_name = repo.lstrip('/') repo_obj = Repository.get_by_repo_name(repo_name) else: repo_obj = None repo_name = u'' user_log = UserLog() user_log.user_id = user_obj.user_id user_log.username = user_obj.username user_log.action = safe_unicode(action) user_log.repository = repo_obj user_log.repository_name = repo_name user_log.action_date = datetime.datetime.now() user_log.user_ip = ipaddr sa.add(user_log) log.info('Logging action:%s on %s by user:%s ip:%s', action, safe_unicode(repo), user_obj, ipaddr) if commit: sa.commit()
def create_user(self, name, **kwargs): if 'skip_if_exists' in kwargs: del kwargs['skip_if_exists'] user = User.get_by_username(name) if user: return user form_data = self._get_user_create_params(name, **kwargs) user = UserModel().create(form_data) Session().commit() user = User.get_by_username(user.username) return user
def post(self, repo_name, pull_request_id): pull_request = PullRequest.get_or_404(pull_request_id) if pull_request.is_closed(): raise HTTPForbidden() assert pull_request.other_repo.repo_name == repo_name #only owner or admin can update it owner = pull_request.owner_id == request.authuser.user_id repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name) if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner): raise HTTPForbidden() _form = PullRequestPostForm()().to_python(request.POST) cur_reviewers = set(pull_request.get_reviewer_users()) new_reviewers = set(_get_reviewer(s) for s in _form['review_members']) old_reviewers = set(_get_reviewer(s) for s in _form['org_review_members']) other_added = cur_reviewers - old_reviewers other_removed = old_reviewers - cur_reviewers if other_added: h.flash(_('Meanwhile, the following reviewers have been added: %s') % (', '.join(u.username for u in other_added)), category='warning') if other_removed: h.flash(_('Meanwhile, the following reviewers have been removed: %s') % (', '.join(u.username for u in other_removed)), category='warning') if _form['updaterev']: return self.create_new_iteration(pull_request, _form['updaterev'], _form['pullrequest_title'], _form['pullrequest_desc'], new_reviewers) added_reviewers = new_reviewers - old_reviewers - cur_reviewers removed_reviewers = (old_reviewers - new_reviewers) & cur_reviewers old_description = pull_request.description pull_request.title = _form['pullrequest_title'] pull_request.description = _form['pullrequest_desc'].strip() or _('No description') pull_request.owner = User.get_by_username(_form['owner']) user = User.get(request.authuser.user_id) PullRequestModel().mention_from_description(user, pull_request, old_description) PullRequestModel().add_reviewers(user, pull_request, added_reviewers) PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers) Session().commit() h.flash(_('Pull request updated'), category='success') raise HTTPFound(location=pull_request.url())
def test_get_user_or_raise_if_default(self, monkeypatch, test_context_fixture): # flash complains about an non-existing session def flash_mock(*args, **kwargs): pass monkeypatch.setattr(h, 'flash', flash_mock) u = UsersController() # a regular user should work correctly user = User.get_by_username(TEST_USER_REGULAR_LOGIN) assert u._get_user_or_raise_if_default(user.user_id) == user # the default user should raise with pytest.raises(HTTPNotFound): u._get_user_or_raise_if_default(User.get_default_user().user_id)
def create_pullrequest(self, testcontroller, repo_name, pr_src_rev, pr_dst_rev, title=u'title'): org_ref = 'branch:stable:%s' % pr_src_rev other_ref = 'branch:default:%s' % pr_dst_rev with test_context(testcontroller.app): # needed to be able to mock request user org_repo = other_repo = Repository.get_by_repo_name(repo_name) owner_user = User.get_by_username(TEST_USER_ADMIN_LOGIN) reviewers = [User.get_by_username(TEST_USER_REGULAR_LOGIN)] request.authuser = request.user = AuthUser(dbuser=owner_user) # creating a PR sends a message with an absolute URL - without routing that requires mocking with mock.patch.object(helpers, 'url', (lambda arg, qualified=False, **kwargs: ('https://localhost' if qualified else '') + '/fake/' + arg)): cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, u'No description', owner_user, reviewers) pull_request = cmd.execute() Session().commit() return pull_request.pull_request_id
def create_test_user(force=True): print 'creating test user' sa = get_session() user = sa.query(User).filter(User.username == USER).scalar() if force and user is not None: print 'removing current user' for repo in sa.query(Repository).filter(Repository.user == user).all(): sa.delete(repo) sa.delete(user) sa.commit() if user is None or force: print 'creating new one' new_usr = User() new_usr.username = USER new_usr.password = get_crypt_password(PASS) new_usr.email = '*****@*****.**' new_usr.name = 'test' new_usr.lastname = 'lasttestname' new_usr.active = True new_usr.admin = True sa.add(new_usr) sa.commit() print 'done'
def test_my_account_add_api_keys(self, desc, lifetime): usr = self.log_user('test_regular2', 'test12') user = User.get(usr['user_id']) response = self.app.post(url('my_account_api_keys'), {'description': desc, 'lifetime': lifetime}) self.checkSessionFlash(response, 'Api key successfully created') try: response = response.follow() user = User.get(usr['user_id']) for api_key in user.api_keys: response.mustcontain(api_key) finally: for api_key in UserApiKeys.query().all(): Session().delete(api_key) Session().commit()
def test_my_account_add_api_keys(self, desc, lifetime): usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) user = User.get(usr['user_id']) response = self.app.post(url('my_account_api_keys'), {'description': desc, 'lifetime': lifetime, '_authentication_token': self.authentication_token()}) self.checkSessionFlash(response, 'API key successfully created') try: response = response.follow() user = User.get(usr['user_id']) for api_key in user.api_keys: response.mustcontain(api_key) finally: for api_key in UserApiKeys.query().all(): Session().delete(api_key) Session().commit()
def _get_permission_for_user(user, repo): perm = UserRepoToPerm.query()\ .filter(UserRepoToPerm.repository == Repository.get_by_repo_name(repo))\ .filter(UserRepoToPerm.user == User.get_by_username(user))\ .all() return perm
def permissions_setup_func(group_name='g0', perm='group.read', recursive='all', user_id=None): """ Resets all permissions to perm attribute """ if not user_id: user_id = test_u1_id permissions_setup_func(group_name, perm, recursive, user_id=User.get_default_user().user_id) repo_group = RepoGroup.get_by_group_name(group_name=group_name) if not repo_group: raise Exception('Cannot get group %s' % group_name) # Start with a baseline that current group can read recursive perms_updates = [[user_id, 'group.read', 'user']] RepoGroupModel()._update_permissions(repo_group, perms_updates=perms_updates, recursive='all', check_perms=False) perms_updates = [[user_id, perm, 'user']] RepoGroupModel()._update_permissions(repo_group, perms_updates=perms_updates, recursive=recursive, check_perms=False) Session().commit()
def validate_python(self, value, state): user = User.get_by_email(value) if user is None: msg = M(self, 'non_existing_email', state, email=value) raise formencode.Invalid(msg, value, state, error_dict=dict(email=msg) )
def test_user_permissions_on_group_with_recursive_group_mode_for_default_user(): # set permission to g0/g0_1 with recursive groups only mode, all children including # other groups should have this permission now set. repositories should # remain intact as we use groups only mode ! recursive = 'groups' group = 'g0/g0_1' default_user_id = User.get_default_user().user_id permissions_setup_func(group, 'group.write', recursive=recursive, user_id=default_user_id) # change default to get perms for default user _get_repo_perms = functools.partial(_get_perms, key='repositories', test_u1_id=default_user_id) _get_group_perms = functools.partial(_get_perms, key='repositories_groups', test_u1_id=default_user_id) repo_items = [x for x in _get_repo_perms(group, recursive)] items = [x for x in _get_group_perms(group, recursive)] _check_expected_count(items, repo_items, expected_count(group, True)) for name, perm in repo_items: yield check_tree_perms, name, perm, group, 'repository.read' for name, perm in items: yield check_tree_perms, name, perm, group, 'group.write'
def test_create_on_top_level_without_permissions(self): usr = self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) # revoke user_model = UserModel() # disable fork and create on default user user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository') user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none') user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository') user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none') # disable on regular user user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository') user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none') user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository') user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none') Session().commit() user = User.get(usr['user_id']) repo_name = self.NEW_REPO+'no_perms' description = 'description for newly created repo' response = self.app.post(url('repos'), fixture._get_repo_create_params(repo_private=False, repo_name=repo_name, repo_type=self.REPO_TYPE, repo_description=description, _authentication_token=self.authentication_token())) response.mustcontain('no permission to create repository in root location') RepoModel().delete(repo_name) Session().commit()
def test_user_permissions_on_group_with_recursive_repo_mode_for_default_user(): # set permission to g0/g0_1 recursive repos only mode, all children including # other repos should have this permission now set, inner groups are excluded! recursive = 'repos' group = 'g0/g0_1' perm = 'group.none' default_user_id = User.get_default_user().user_id permissions_setup_func(group, perm, recursive=recursive, user_id=default_user_id) # change default to get perms for default user _get_repo_perms = functools.partial(_get_perms, key='repositories', test_u1_id=default_user_id) _get_group_perms = functools.partial(_get_perms, key='repositories_groups', test_u1_id=default_user_id) repo_items = [x for x in _get_repo_perms(group, recursive)] items = [x for x in _get_group_perms(group, recursive)] _check_expected_count(items, repo_items, expected_count(group, True)) for name, perm in repo_items: yield check_tree_perms, name, perm, group, 'repository.none' for name, perm in items: # permission is set with repos only mode, but we also change the permission # on the group we trigger the apply to children from, thus we need # to change its permission check old_perm = 'group.read' if name == group: old_perm = perm yield check_tree_perms, name, perm, group, old_perm
def test_edit(self): self.log_user() user = User.get_by_username(base.TEST_USER_ADMIN_LOGIN) response = self.app.get(base.url('edit_user', id=user.user_id))
def __load_data(self): c.user = User.get(request.authuser.user_id) if c.user.is_default_user: h.flash(_("You can't edit this user since it's" " crucial for entire application"), category='warning') raise HTTPFound(location=url('users'))
def __enter__(self): anon = User.get_default_user() self._before = anon.active anon.active = status Session().commit() invalidate_all_caches()
def test_fork_create_and_permissions(self): self.log_user() fork_name = self.REPO_FORK description = 'fork of vcs test' repo_name = self.REPO org_repo = Repository.get_by_repo_name(repo_name) creation_args = { 'repo_name': fork_name, 'repo_group': '-1', 'fork_parent_id': org_repo.repo_id, 'repo_type': self.REPO_TYPE, 'description': description, 'private': 'False', 'landing_rev': 'rev:tip', '_session_csrf_secret_token': self.session_csrf_secret_token() } self.app.post( base.url(controller='forks', action='fork_create', repo_name=repo_name), creation_args) repo = Repository.get_by_repo_name(self.REPO_FORK) assert repo.fork.repo_name == self.REPO ## run the check page that triggers the flash message response = self.app.get( base.url('repo_check_home', repo_name=fork_name)) # test if we have a message that fork is ok self.checkSessionFlash( response, 'Forked repository %s as <a href="/%s">%s</a>' % (repo_name, fork_name, fork_name)) # test if the fork was created in the database fork_repo = Session().query(Repository) \ .filter(Repository.repo_name == fork_name).one() assert fork_repo.repo_name == fork_name assert fork_repo.fork.repo_name == repo_name # test if the repository is visible in the list ? response = self.app.get(base.url('summary_home', repo_name=fork_name)) response.mustcontain(fork_name) response.mustcontain(self.REPO_TYPE) response.mustcontain('Fork of "<a href="/%s">%s</a>"' % (repo_name, repo_name)) usr = self.log_user(self.username, self.password)['user_id'] forks = Repository.query() \ .filter(Repository.repo_type == self.REPO_TYPE) \ .filter(Repository.fork_id != None).all() assert 1 == len(forks) # set read permissions for this RepoModel().grant_user_permission(repo=forks[0], user=usr, perm='repository.read') Session().commit() response = self.app.get( base.url(controller='forks', action='forks', repo_name=repo_name)) response.mustcontain('<div>fork of vcs test</div>') # remove permissions default_user = User.get_default_user() try: RepoModel().grant_user_permission(repo=forks[0], user=usr, perm='repository.none') RepoModel().grant_user_permission(repo=forks[0], user=default_user, perm='repository.none') Session().commit() # fork shouldn't be visible response = self.app.get( base.url(controller='forks', action='forks', repo_name=repo_name)) response.mustcontain('There are no forks yet') finally: RepoModel().grant_user_permission(repo=forks[0], user=usr, perm='repository.read') RepoModel().grant_user_permission(repo=forks[0], user=default_user, perm='repository.read') RepoModel().delete(repo=forks[0])
def create(self, created_by, subject, body, recipients=None, type_=Notification.TYPE_MESSAGE, with_email=True, email_kwargs=None, repo_name=None): """ Creates notification of given type :param created_by: int, str or User instance. User who created this notification :param subject: :param body: :param recipients: list of int, str or User objects, when None is given send to all admins :param type_: type of notification :param with_email: send email with this notification :param email_kwargs: additional dict to pass as args to email template """ from kallithea.lib.celerylib import tasks email_kwargs = email_kwargs or {} if recipients and not getattr(recipients, '__iter__', False): raise Exception('recipients must be a list or iterable') created_by_obj = User.guess_instance(created_by) recipients_objs = [] if recipients: for u in recipients: obj = User.guess_instance(u) if obj is not None: recipients_objs.append(obj) else: # TODO: inform user that requested operation couldn't be completed log.error('cannot email unknown user %r', u) recipients_objs = set(recipients_objs) log.debug('sending notifications %s to %s', type_, recipients_objs) elif recipients is None: # empty recipients means to all admins recipients_objs = User.query().filter(User.admin == True).all() log.debug('sending notifications %s to admins: %s', type_, recipients_objs) #else: silently skip notification mails? # TODO: inform user who are notified notif = Notification.create(created_by=created_by_obj, subject=subject, body=body, recipients=recipients_objs, type_=type_) if not with_email: return notif #don't send email to person who created this comment rec_objs = set(recipients_objs).difference(set([created_by_obj])) headers = {} headers['X-Kallithea-Notification-Type'] = type_ if 'threading' in email_kwargs: headers['References'] = ' '.join( '<%s>' % x for x in email_kwargs['threading']) # send email with notification to all other participants for rec in rec_objs: ## this is passed into template html_kwargs = { 'subject': subject, 'body': h.render_w_mentions(body, repo_name), 'when': h.fmt_date(notif.created_on), 'user': notif.created_by_user.username, } txt_kwargs = { 'subject': subject, 'body': body, 'when': h.fmt_date(notif.created_on), 'user': notif.created_by_user.username, } html_kwargs.update(email_kwargs) txt_kwargs.update(email_kwargs) email_subject = EmailNotificationModel() \ .get_email_description(type_, **txt_kwargs) email_txt_body = EmailNotificationModel() \ .get_email_tmpl(type_, 'txt', **txt_kwargs) email_html_body = EmailNotificationModel() \ .get_email_tmpl(type_, 'html', **html_kwargs) tasks.send_email([rec.email], email_subject, email_txt_body, email_html_body, headers, author=created_by_obj) return notif
def create(self, text, repo, author, revision=None, pull_request=None, f_path=None, line_no=None, status_change=None, closing_pr=False, send_email=True): """ Creates a new comment for either a changeset or a pull request. status_change and closing_pr is only for the optional email. Returns the created comment. """ if not status_change and not text: log.warning('Missing text for comment, skipping...') return None repo = Repository.guess_instance(repo) author = User.guess_instance(author) comment = ChangesetComment() comment.repo = repo comment.author = author comment.text = text comment.f_path = f_path comment.line_no = line_no if revision is not None: comment.revision = revision elif pull_request is not None: pull_request = PullRequest.guess_instance(pull_request) comment.pull_request = pull_request else: raise Exception('Please specify revision or pull_request_id') Session().add(comment) Session().flush() if send_email: (subj, body, recipients, notification_type, email_kwargs) = self._get_notification_data( repo, comment, author, comment_text=text, line_no=line_no, revision=revision, pull_request=pull_request, status_change=status_change, closing_pr=closing_pr) email_kwargs['is_mention'] = False # create notification objects, and emails NotificationModel().create( created_by=author, subject=subj, body=body, recipients=recipients, type_=notification_type, email_kwargs=email_kwargs, ) mention_recipients = extract_mentioned_users(body).difference( recipients) if mention_recipients: email_kwargs['is_mention'] = True subj = _('[Mention]') + ' ' + subj # FIXME: this subject is wrong and unused! NotificationModel().create(created_by=author, subject=subj, body=body, recipients=mention_recipients, type_=notification_type, email_kwargs=email_kwargs) return comment
def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): """ Sends an email with defined parameters from the .ini files. :param recipients: list of recipients, if this is None, the defined email address from field 'email_to' and all admins is used instead :param subject: subject of the mail :param body: body of the mail :param html_body: html version of body :param headers: dictionary of prepopulated e-mail headers :param from_name: full name to be used as sender of this mail - often a .full_name_or_username value """ assert isinstance(recipients, list), recipients if headers is None: headers = {} else: # do not modify the original headers object passed by the caller headers = headers.copy() email_config = config email_prefix = email_config.get('email_prefix', '') if email_prefix: subject = "%s %s" % (email_prefix, subject) if not recipients: # if recipients are not defined we send to email_config + all admins recipients = [ u.email for u in User.query().filter(User.admin == True).all() ] if email_config.get('email_to') is not None: recipients += email_config.get('email_to').split(',') # If there are still no recipients, there are no admins and no address # configured in email_to, so return. if not recipients: log.error("No recipients specified and no fallback available.") return False log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients)) # SMTP sender envelope_from = email_config.get('app_email_from', 'Kallithea') # 'From' header if from_name is not None: # set From header based on from_name but with a generic e-mail address # In case app_email_from is in "Some Name <e-mail>" format, we first # extract the e-mail address. envelope_addr = author_email(envelope_from) headers['From'] = '"%s" <%s>' % (email.utils.quote( '%s (no-reply)' % from_name), envelope_addr) user = email_config.get('smtp_username') passwd = email_config.get('smtp_password') mail_server = email_config.get('smtp_server') mail_port = email_config.get('smtp_port') tls = str2bool(email_config.get('smtp_use_tls')) ssl = str2bool(email_config.get('smtp_use_ssl')) debug = str2bool(email_config.get('debug')) smtp_auth = email_config.get('smtp_auth') logmsg = ("Mail details:\n" "recipients: %s\n" "headers: %s\n" "subject: %s\n" "body:\n%s\n" "html:\n%s\n" % (' '.join(recipients), headers, subject, body, html_body)) if mail_server: log.debug("Sending e-mail. " + logmsg) else: log.error("SMTP mail server not configured - cannot send e-mail.") log.warning(logmsg) return False try: m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth, mail_port, ssl, tls, debug=debug) m.send(recipients, subject, body, html_body, headers=headers) except: log.error('Mail sending failed') log.error(traceback.format_exc()) return False return True
def user_activation_state(self): def_user_perms = User.get_default_user().AuthUser.permissions['global'] return 'hg.extern_activate.auto' in def_user_perms
def create_repo(form_data, cur_user): from kallithea.model.repo import RepoModel from kallithea.model.db import Setting DBS = celerylib.get_session() cur_user = User.guess_instance(cur_user) owner = cur_user repo_name = form_data['repo_name'] repo_name_full = form_data['repo_name_full'] repo_type = form_data['repo_type'] description = form_data['repo_description'] private = form_data['repo_private'] clone_uri = form_data.get('clone_uri') repo_group = form_data['repo_group'] landing_rev = form_data['repo_landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') copy_group_permissions = form_data.get('repo_copy_permissions') fork_of = form_data.get('fork_parent_id') state = form_data.get('repo_state', Repository.STATE_PENDING) # repo creation defaults, private and repo_type are filled in form defs = Setting.get_default_repo_settings(strip_prefix=True) enable_statistics = defs.get('repo_enable_statistics') enable_downloads = defs.get('repo_enable_downloads') try: repo = RepoModel()._create_repo( repo_name=repo_name_full, repo_type=repo_type, description=description, owner=owner, private=private, clone_uri=clone_uri, repo_group=repo_group, landing_rev=landing_rev, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions, copy_group_permissions=copy_group_permissions, enable_statistics=enable_statistics, enable_downloads=enable_downloads, state=state) action_logger(cur_user, 'user_created_repo', form_data['repo_name_full'], '') DBS.commit() # now create this repo on Filesystem RepoModel()._create_filesystem_repo( repo_name=repo_name, repo_type=repo_type, repo_group=RepoGroup.guess_instance(repo_group), clone_uri=clone_uri, ) repo = Repository.get_by_repo_name(repo_name_full) log_create_repository(repo.get_dict(), created_by=owner.username) # update repo changeset caches initially repo.update_changeset_cache() # set new created state repo.set_state(Repository.STATE_CREATED) DBS.commit() except Exception as e: log.warning('Exception %s occurred when forking repository, ' 'doing cleanup...' % e) # rollback things manually ! repo = Repository.get_by_repo_name(repo_name_full) if repo: Repository.delete(repo.repo_id) DBS.commit() RepoModel()._delete_filesystem_repo(repo) raise return True
def user_activation_state(self): """Perform user activation according to global persmissions.""" def_user_perms = User.get_default_user().AuthUser.permissions['global'] return 'hg.extern_activate.auto' in def_user_perms
def _authorize(self, environ, action, repo_name, ip_addr): """Authenticate and authorize user. Since we're dealing with a VCS client and not a browser, we only support HTTP basic authentication, either directly via raw header inspection, or by using container authentication to delegate the authentication to the web server. Returns (user, None) on successful authentication and authorization. Returns (None, wsgi_app) to send the wsgi_app response to the client. """ # Use anonymous access if allowed for action on repo. default_user = User.get_default_user() default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) if default_authuser is None: log.debug( 'No anonymous access at all') # move on to proper user auth else: if self._check_permission(action, default_authuser, repo_name): return default_authuser, None log.debug( 'Not authorized to access this repository as anonymous user') username = None #============================================================== # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS #============================================================== # try to auth based on environ, container auth methods log.debug('Running PRE-AUTH for container based authentication') pre_auth = auth_modules.authenticate('', '', environ) if pre_auth is not None and pre_auth.get('username'): username = pre_auth['username'] log.debug('PRE-AUTH got %s as username', username) # If not authenticated by the container, running basic auth if not username: self.authenticate.realm = self.config['realm'] result = self.authenticate(environ) if isinstance(result, str): paste.httpheaders.AUTH_TYPE.update(environ, 'basic') paste.httpheaders.REMOTE_USER.update(environ, result) username = result else: return None, result.wsgi_application #============================================================== # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME #============================================================== try: user = User.get_by_username_or_email(username) except Exception: log.error(traceback.format_exc()) return None, webob.exc.HTTPInternalServerError() authuser = AuthUser.make(dbuser=user, ip_addr=ip_addr) if authuser is None: return None, webob.exc.HTTPForbidden() if not self._check_permission(action, authuser, repo_name): return None, webob.exc.HTTPForbidden() return user, None
def to_python(self, value, state): perms_update = OrderedSet() perms_new = OrderedSet() # build a list of permission to update and new permission to create #CLEAN OUT ORG VALUE FROM NEW MEMBERS, and group them using new_perms_group = defaultdict(dict) for k, v in value.copy().iteritems(): if k.startswith('perm_new_member'): del value[k] _type, part = k.split('perm_new_member_') args = part.split('_') if len(args) == 1: new_perms_group[args[0]]['perm'] = v elif len(args) == 2: _key, pos = args new_perms_group[pos][_key] = v # fill new permissions in order of how they were added for k in sorted(map(int, new_perms_group.keys())): perm_dict = new_perms_group[str(k)] new_member = perm_dict.get('name') new_perm = perm_dict.get('perm') new_type = perm_dict.get('type') if new_member and new_perm and new_type: perms_new.add((new_member, new_perm, new_type)) for k, v in value.iteritems(): if k.startswith('u_perm_') or k.startswith('g_perm_'): member = k[7:] t = {'u': 'user', 'g': 'users_group'}[k[0]] if member == User.DEFAULT_USER: if str2bool(value.get('repo_private')): # set none for default when updating to # private repo protects against form manipulation v = EMPTY_PERM perms_update.add((member, v, t)) value['perms_updates'] = list(perms_update) value['perms_new'] = list(perms_new) # update permissions for k, v, t in perms_new: try: if t is 'user': self.user_db = User.query()\ .filter(User.active == True)\ .filter(User.username == k).one() if t is 'users_group': self.user_db = UserGroup.query()\ .filter(UserGroup.users_group_active == True)\ .filter(UserGroup.users_group_name == k).one() except Exception: log.exception('Updated permission failed') msg = M(self, 'perm_new_member_type', state) raise formencode.Invalid( msg, value, state, error_dict=dict(perm_new_member_name=msg)) return value
def __exit__(self, exc_type, exc_val, exc_tb): anon = User.get_default_user() anon.active = self._before Session().commit()
def create_nodes(self, user, repo, message, nodes, parent_cs=None, author=None, trigger_push_hook=True): """ Commits specified nodes to repo. :param user: Kallithea User object or user_id, the committer :param repo: Kallithea Repository object :param message: commit message :param nodes: mapping {filename:{'content':content},...} :param parent_cs: parent changeset, can be empty than it's initial commit :param author: author of commit, cna be different that committer only for git :param trigger_push_hook: trigger push hooks :returns: new committed changeset """ user = User.guess_instance(user) scm_instance = repo.scm_instance_no_cache() processed_nodes = [] for f_path in nodes: content = nodes[f_path]['content'] f_path = self._sanitize_path(f_path) f_path = safe_str(f_path) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit if isinstance(content, (basestring, )): content = safe_str(content) elif isinstance(content, ( file, cStringIO.OutputType, )): content = content.read() else: raise Exception('Content is of unrecognized type %s' % (type(content))) processed_nodes.append((f_path, content)) message = safe_unicode(message) committer = user.full_contact author = safe_unicode(author) if author else committer IMC = self._get_IMC_module(scm_instance.alias) imc = IMC(scm_instance) if not parent_cs: parent_cs = EmptyChangeset(alias=scm_instance.alias) if isinstance(parent_cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [parent_cs] # add multiple nodes for path, content in processed_nodes: imc.add(FileNode(path, content=content)) tip = imc.commit(message=message, author=author, parents=parents, branch=parent_cs.branch) self.mark_for_invalidation(repo.repo_name) if trigger_push_hook: self._handle_push(scm_instance, username=user.username, action='push_local', repo_name=repo.repo_name, revisions=[tip.raw_id]) return tip
def create(self, description, owner, ip_addr, gist_mapping, gist_type=Gist.GIST_PUBLIC, lifetime=-1): """ :param description: description of the gist :param owner: user who created this gist :param gist_mapping: mapping {filename:{'content':content},...} :param gist_type: type of gist private/public :param lifetime: in minutes, -1 == forever """ owner = User.guess_instance(owner) gist_access_id = make_gist_access_id() lifetime = safe_int(lifetime, -1) gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1 log.debug( 'set GIST expiration date to: %s', time_to_datetime(gist_expires) if gist_expires != -1 else 'forever') # create the Database version gist = Gist() gist.gist_description = description gist.gist_access_id = gist_access_id gist.owner_id = owner.user_id gist.gist_expires = gist_expires gist.gist_type = gist_type Session().add(gist) Session().flush() # make database assign gist.gist_id if gist_type == Gist.GIST_PUBLIC: # use DB ID for easy to use GIST ID gist.gist_access_id = str(gist.gist_id) log.debug('Creating new %s GIST repo %s', gist_type, gist.gist_access_id) repo = RepoModel()._create_filesystem_repo( repo_name=gist.gist_access_id, repo_type='hg', repo_group=GIST_STORE_LOC) processed_mapping = {} for filename in gist_mapping: if filename != os.path.basename(filename): raise Exception('Filename cannot be inside a directory') content = gist_mapping[filename]['content'] # TODO: expand support for setting explicit lexers # if lexer is None: # try: # guess_lexer = pygments.lexers.guess_lexer_for_filename # lexer = guess_lexer(filename,content) # except pygments.util.ClassNotFound: # lexer = 'text' processed_mapping[filename] = {'content': content} # now create single multifile commit message = 'added file' message += 's: ' if len(processed_mapping) > 1 else ': ' message += ', '.join([x for x in processed_mapping]) # fake Kallithea Repository object fake_repo = AttributeDict( dict( repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id), scm_instance_no_cache=lambda: repo, )) ScmModel().create_nodes(user=owner.user_id, ip_addr=ip_addr, repo=fake_repo, message=message, nodes=processed_mapping, trigger_push_hook=False) self._store_metadata(repo, gist.gist_id, gist.gist_access_id, owner.user_id, gist.gist_type, gist.gist_expires) return gist
def update_nodes(self, user, repo, message, nodes, parent_cs=None, author=None, trigger_push_hook=True): """ Commits specified nodes to repo. Again. """ user = User.guess_instance(user) scm_instance = repo.scm_instance_no_cache() message = safe_unicode(message) committer = user.full_contact author = safe_unicode(author) if author else committer imc_class = self._get_IMC_module(scm_instance.alias) imc = imc_class(scm_instance) if not parent_cs: parent_cs = EmptyChangeset(alias=scm_instance.alias) if isinstance(parent_cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [parent_cs] # add multiple nodes for _filename, data in nodes.items(): # new filename, can be renamed from the old one filename = self._sanitize_path(data['filename']) old_filename = self._sanitize_path(_filename) content = data['content'] filenode = FileNode(old_filename, content=content) op = data['op'] if op == 'add': imc.add(filenode) elif op == 'del': imc.remove(filenode) elif op == 'mod': if filename != old_filename: #TODO: handle renames, needs vcs lib changes imc.remove(filenode) imc.add(FileNode(filename, content=content)) else: imc.change(filenode) # commit changes tip = imc.commit(message=message, author=author, parents=parents, branch=parent_cs.branch) self.mark_for_invalidation(repo.repo_name) if trigger_push_hook: self._handle_push(scm_instance, username=user.username, action='push_local', repo_name=repo.repo_name, revisions=[tip.raw_id])
def test_edit_ip_default_user(self): self.log_user() user = User.get_default_user() response = self.app.get(base.url('edit_user_ips', id=user.user_id), status=404)
def delete_nodes(self, user, repo, message, nodes, parent_cs=None, author=None, trigger_push_hook=True): """ Deletes specified nodes from repo. :param user: Kallithea User object or user_id, the committer :param repo: Kallithea Repository object :param message: commit message :param nodes: mapping {filename:{'content':content},...} :param parent_cs: parent changeset, can be empty than it's initial commit :param author: author of commit, cna be different that committer only for git :param trigger_push_hook: trigger push hooks :returns: new committed changeset after deletion """ user = User.guess_instance(user) scm_instance = repo.scm_instance_no_cache() processed_nodes = [] for f_path in nodes: f_path = self._sanitize_path(f_path) # content can be empty but for compatibility it allows same dicts # structure as add_nodes content = nodes[f_path].get('content') processed_nodes.append((f_path, content)) message = safe_unicode(message) committer = user.full_contact author = safe_unicode(author) if author else committer IMC = self._get_IMC_module(scm_instance.alias) imc = IMC(scm_instance) if not parent_cs: parent_cs = EmptyChangeset(alias=scm_instance.alias) if isinstance(parent_cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [parent_cs] # add multiple nodes for path, content in processed_nodes: imc.remove(FileNode(path, content=content)) tip = imc.commit(message=message, author=author, parents=parents, branch=parent_cs.branch) self.mark_for_invalidation(repo.repo_name) if trigger_push_hook: self._handle_push(scm_instance, username=user.username, action='push_local', repo_name=repo.repo_name, revisions=[tip.raw_id]) return tip
def teardown_class(cls): if User.get_by_username(cls.test_user_1): UserModel().delete(cls.test_user_1) Session().commit()
def _dispatch(self, state, remainder=None): """ Parse the request body as JSON, look up the method on the controller and if it exists, dispatch to it. """ # Since we are here we should respond as JSON response.content_type = 'application/json' environ = state.request.environ start = time.time() ip_addr = self._get_ip_addr(environ) self._req_id = None if 'CONTENT_LENGTH' not in environ: log.debug("No Content-Length") raise JSONRPCErrorResponse(retid=self._req_id, message="No Content-Length in request") else: length = environ['CONTENT_LENGTH'] or 0 length = int(environ['CONTENT_LENGTH']) log.debug('Content-Length: %s', length) if length == 0: raise JSONRPCErrorResponse(retid=self._req_id, message="Content-Length is 0") raw_body = environ['wsgi.input'].read(length) try: json_body = ext_json.loads(raw_body) except ValueError as e: # catch JSON errors Here raise JSONRPCErrorResponse( retid=self._req_id, message="JSON parse error ERR:%s RAW:%r" % (e, raw_body)) # check AUTH based on API key try: self._req_api_key = json_body['api_key'] self._req_id = json_body['id'] self._req_method = json_body['method'] self._request_params = json_body['args'] if not isinstance(self._request_params, dict): self._request_params = {} log.debug('method: %s, params: %s', self._req_method, self._request_params) except KeyError as e: raise JSONRPCErrorResponse( retid=self._req_id, message='Incorrect JSON query missing %s' % e) # check if we can find this session using api_key try: u = User.get_by_api_key(self._req_api_key) auth_user = AuthUser.make(dbuser=u, ip_addr=ip_addr) if auth_user is None: raise JSONRPCErrorResponse(retid=self._req_id, message='Invalid API key') except Exception as e: raise JSONRPCErrorResponse(retid=self._req_id, message='Invalid API key') request.authuser = auth_user request.ip_addr = ip_addr self._error = None try: self._func = self._find_method() except AttributeError as e: raise JSONRPCErrorResponse(retid=self._req_id, message=str(e)) # now that we have a method, add self._req_params to # self.kargs and dispatch control to WGIController argspec = inspect.getfullargspec(self._func) arglist = argspec.args[1:] argtypes = [type(arg) for arg in argspec.defaults or []] default_empty = type(NotImplemented) # kw arguments required by this method func_kwargs = dict( itertools.zip_longest(reversed(arglist), reversed(argtypes), fillvalue=default_empty)) # This attribute will need to be first param of a method that uses # api_key, which is translated to instance of user at that name USER_SESSION_ATTR = 'apiuser' # get our arglist and check if we provided them as args for arg, default in func_kwargs.items(): if arg == USER_SESSION_ATTR: # USER_SESSION_ATTR is something translated from API key and # this is checked before so we don't need validate it continue # skip the required param check if it's default value is # NotImplementedType (default_empty) if default == default_empty and arg not in self._request_params: raise JSONRPCErrorResponse( retid=self._req_id, message='Missing non optional `%s` arg in JSON DATA' % arg, ) extra = set(self._request_params).difference(func_kwargs) if extra: raise JSONRPCErrorResponse( retid=self._req_id, message='Unknown %s arg in JSON DATA' % ', '.join('`%s`' % arg for arg in extra), ) self._rpc_args = {} self._rpc_args.update(self._request_params) self._rpc_args['action'] = self._req_method self._rpc_args['environ'] = environ log.info('IP: %s Request to %s time: %.3fs' % (self._get_ip_addr(environ), get_path_info(environ), time.time() - start)) state.set_action(self._rpc_call, []) state.set_params(self._rpc_args) return state
def test_ips(self): self.log_user() user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN) response = self.app.get(base.url('edit_user_ips', id=user.user_id)) response.mustcontain('All IP addresses are allowed')
def __get_user(self, username): return User.get_by_username(username)
def _get_notification_data(self, repo, comment, author, comment_text, line_no=None, revision=None, pull_request=None, status_change=None, closing_pr=False): """ :returns: tuple (subj,body,recipients,notification_type,email_kwargs) """ # make notification body = comment_text # text of the comment line = '' if line_no: line = _('on line %s') % line_no # changeset if revision: notification_type = NotificationModel.TYPE_CHANGESET_COMMENT cs = repo.scm_instance.get_changeset(revision) desc = cs.short_id threading = [ '%s-rev-%s@%s' % (repo.repo_name, revision, h.canonical_hostname()) ] if line_no: # TODO: url to file _and_ line number threading.append('%s-rev-%s-line-%s@%s' % (repo.repo_name, revision, line_no, h.canonical_hostname())) comment_url = h.canonical_url('changeset_home', repo_name=repo.repo_name, revision=revision, anchor='comment-%s' % comment.comment_id) subj = h.link_to( 'Re changeset: %(desc)s %(line)s' % { 'desc': desc, 'line': line }, comment_url) # get the current participants of this changeset recipients = _list_changeset_commenters(revision) # add changeset author if it's known locally cs_author = User.get_from_cs_author(cs.author) if not cs_author: # use repo owner if we cannot extract the author correctly # FIXME: just use committer name even if not a user cs_author = repo.owner recipients.append(cs_author) email_kwargs = { 'status_change': status_change, 'cs_comment_user': author.full_name_and_username, 'cs_target_repo': h.canonical_url('summary_home', repo_name=repo.repo_name), 'cs_comment_url': comment_url, 'cs_url': h.canonical_url('changeset_home', repo_name=repo.repo_name, revision=revision), 'raw_id': revision, 'message': cs.message, 'message_short': h.shorter(cs.message, 50, firstline=True), 'cs_author': cs_author, 'repo_name': repo.repo_name, 'short_id': h.short_id(revision), 'branch': cs.branch, 'comment_username': author.username, 'threading': threading, } # pull request elif pull_request: notification_type = NotificationModel.TYPE_PULL_REQUEST_COMMENT desc = comment.pull_request.title _org_ref_type, org_ref_name, _org_rev = comment.pull_request.org_ref.split( ':') _other_ref_type, other_ref_name, _other_rev = comment.pull_request.other_ref.split( ':') threading = [ '%s-pr-%s@%s' % (pull_request.other_repo.repo_name, pull_request.pull_request_id, h.canonical_hostname()) ] if line_no: # TODO: url to file _and_ line number threading.append('%s-pr-%s-line-%s@%s' % (pull_request.other_repo.repo_name, pull_request.pull_request_id, line_no, h.canonical_hostname())) comment_url = pull_request.url(canonical=True, anchor='comment-%s' % comment.comment_id) subj = h.link_to( 'Re pull request %(pr_nice_id)s: %(desc)s %(line)s' % { 'desc': desc, 'pr_nice_id': comment.pull_request.nice_id(), 'line': line }, comment_url) # get the current participants of this pull request recipients = _list_pull_request_commenters(pull_request) recipients.append(pull_request.owner) recipients += pull_request.get_reviewer_users() # set some variables for email notification email_kwargs = { 'pr_title': pull_request.title, 'pr_title_short': h.shorter(pull_request.title, 50), 'pr_nice_id': pull_request.nice_id(), 'status_change': status_change, 'closing_pr': closing_pr, 'pr_comment_url': comment_url, 'pr_url': pull_request.url(canonical=True), 'pr_comment_user': author.full_name_and_username, 'pr_target_repo': h.canonical_url('summary_home', repo_name=pull_request.other_repo.repo_name), 'pr_target_branch': other_ref_name, 'pr_source_repo': h.canonical_url('summary_home', repo_name=pull_request.org_repo.repo_name), 'pr_source_branch': org_ref_name, 'pr_owner': pull_request.owner, 'pr_owner_username': pull_request.owner.username, 'repo_name': pull_request.other_repo.repo_name, 'comment_username': author.username, 'threading': threading, } return subj, body, recipients, notification_type, email_kwargs
def _get_logged_user(self): return User.get_by_username(self._logged_username)
def repo2db_mapper(initial_repo_list, remove_obsolete=False, install_git_hooks=False, user=None, overwrite_git_hooks=False): """ maps all repos given in initial_repo_list, non existing repositories are created, if remove_obsolete is True it also check for db entries that are not in initial_repo_list and removes them. :param initial_repo_list: list of repositories found by scanning methods :param remove_obsolete: check for obsolete entries in database :param install_git_hooks: if this is True, also check and install git hook for a repo if missing :param overwrite_git_hooks: if this is True, overwrite any existing git hooks that may be encountered (even if user-deployed) """ from kallithea.model.repo import RepoModel from kallithea.model.scm import ScmModel sa = meta.Session() repo_model = RepoModel() if user is None: user = User.get_first_admin() added = [] ##creation defaults defs = Setting.get_default_repo_settings(strip_prefix=True) enable_statistics = defs.get('repo_enable_statistics') enable_locking = defs.get('repo_enable_locking') enable_downloads = defs.get('repo_enable_downloads') private = defs.get('repo_private') for name, repo in initial_repo_list.items(): group = map_groups(name) unicode_name = safe_unicode(name) db_repo = repo_model.get_by_repo_name(unicode_name) # found repo that is on filesystem not in Kallithea database if not db_repo: log.info('repository %s not found, creating now', name) added.append(name) desc = (repo.description if repo.description != 'unknown' else '%s repository' % name) new_repo = repo_model._create_repo( repo_name=name, repo_type=repo.alias, description=desc, repo_group=getattr(group, 'group_id', None), owner=user, enable_locking=enable_locking, enable_downloads=enable_downloads, enable_statistics=enable_statistics, private=private, state=Repository.STATE_CREATED ) sa.commit() # we added that repo just now, and make sure it has githook # installed, and updated server info if new_repo.repo_type == 'git': git_repo = new_repo.scm_instance ScmModel().install_git_hooks(git_repo) # update repository server-info log.debug('Running update server info') git_repo._update_server_info() new_repo.update_changeset_cache() elif install_git_hooks: if db_repo.repo_type == 'git': ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks) removed = [] # remove from database those repositories that are not in the filesystem unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list) for repo in sa.query(Repository).all(): if repo.repo_name not in unicode_initial_repo_list: if remove_obsolete: log.debug("Removing non-existing repository found in db `%s`", repo.repo_name) try: RepoModel().delete(repo, forks='detach', fs_remove=False) sa.commit() except Exception: #don't hold further removals on error log.error(traceback.format_exc()) sa.rollback() removed.append(repo.repo_name) return added, removed
def create_repo_fork(form_data, cur_user): """ Creates a fork of repository using interval VCS methods :param form_data: :param cur_user: """ from kallithea.model.repo import RepoModel DBS = celerylib.get_session() base_path = kallithea.CONFIG['base_path'] cur_user = User.guess_instance(cur_user) repo_name = form_data['repo_name'] # fork in this case repo_name_full = form_data['repo_name_full'] repo_type = form_data['repo_type'] owner = cur_user private = form_data['private'] clone_uri = form_data.get('clone_uri') repo_group = form_data['repo_group'] landing_rev = form_data['landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') try: fork_of = Repository.guess_instance(form_data.get('fork_parent_id')) RepoModel()._create_repo(repo_name=repo_name_full, repo_type=repo_type, description=form_data['description'], owner=owner, private=private, clone_uri=clone_uri, repo_group=repo_group, landing_rev=landing_rev, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions) action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full, fork_of.repo_name, '') DBS.commit() source_repo_path = os.path.join(base_path, fork_of.repo_name) # now create this repo on Filesystem RepoModel()._create_filesystem_repo( repo_name=repo_name, repo_type=repo_type, repo_group=RepoGroup.guess_instance(repo_group), clone_uri=source_repo_path, ) repo = Repository.get_by_repo_name(repo_name_full) log_create_repository(repo.get_dict(), created_by=owner.username) # update repo changeset caches initially repo.update_changeset_cache() # set new created state repo.set_state(Repository.STATE_CREATED) DBS.commit() except Exception as e: log.warning('Exception %s occurred when forking repository, ' 'doing cleanup...' % e) # rollback things manually ! repo = Repository.get_by_repo_name(repo_name_full) if repo: Repository.delete(repo.repo_id) DBS.commit() RepoModel()._delete_filesystem_repo(repo) raise return True
def test_dump_html_mails(self): # Exercise all notification types and dump them to one big html file l = [] def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): l.append('<hr/>\n') l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope l.append('<pre>\n') l.append('From: %s <*****@*****.**>\n' % from_name) l.append('To: %s\n' % ' '.join(recipients)) l.append('Subject: %s\n' % subject) l.append('</pre>\n') l.append('<hr/>\n') l.append('<pre>%s</pre>\n' % body) l.append('<hr/>\n') l.append(html_body) l.append('<hr/>\n') with test_context(self.app): with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): pr_kwargs = dict(pr_nice_id='#7', pr_title='The Title', pr_title_short='The Title', pr_url='http://pr.org/7', pr_target_repo='http://mainline.com/repo', pr_target_branch='trunk', pr_source_repo='https://dev.org/repo', pr_source_branch='devbranch', pr_owner=User.get(self.u2), pr_owner_username='******') for type_, body, kwargs in [ (NotificationModel.TYPE_CHANGESET_COMMENT, 'This is the new \'comment\'.\n\n - and here it ends indented.', dict( short_id='cafe1234', raw_id='cafe1234c0ffeecafe', branch='brunch', cs_comment_user='******', cs_comment_url='http://comment.org', is_mention=[False, True], message= 'This changeset did something clever which is hard to explain', message_short='This changeset did something cl...', status_change=[None, 'Approved'], cs_target_repo='http://example.com/repo_target', cs_url='http://changeset.com', cs_author=User.get(self.u2))), (NotificationModel.TYPE_MESSAGE, 'This is the \'body\' of the "test" message\n - nothing interesting here except indentation.', dict()), #(NotificationModel.TYPE_MENTION, '$body', None), # not used (NotificationModel.TYPE_REGISTRATION, 'Registration body', dict(new_username='******', registered_user_url='http://newbie.org', new_email='*****@*****.**', new_full_name='New Full Name')), ( NotificationModel.TYPE_PULL_REQUEST, 'This PR is \'awesome\' because it does <stuff>\n - please approve indented!', dict( pr_user_created= 'Requesting User (root)', # pr_owner should perhaps be used for @mention in description ... is_mention=[False, True], pr_revisions=[ ('123abc' * 7, "Introduce one and two\n\nand that's it"), ('567fed' * 7, 'Make one plus two equal tree') ], org_repo_name='repo_org', **pr_kwargs)), (NotificationModel.TYPE_PULL_REQUEST_COMMENT, 'Me too!\n\n - and indented on second line', dict(closing_pr=[False, True], is_mention=[False, True], pr_comment_user='******', pr_comment_url='http://pr.org/comment', status_change=[None, 'Under Review'], **pr_kwargs)), ]: kwargs['repo_name'] = 'repo/name' params = [(type_, type_, body, kwargs)] for param_name in [ 'is_mention', 'status_change', 'closing_pr' ]: # TODO: inline/general if not isinstance(kwargs.get(param_name), list): continue new_params = [] for v in kwargs[param_name]: for desc, type_, body, kwargs in params: kwargs = dict(kwargs) kwargs[param_name] = v new_params.append( ('%s, %s=%r' % (desc, param_name, v), type_, body, kwargs)) params = new_params for desc, type_, body, kwargs in params: # desc is used as "global" variable NotificationModel().create(created_by=self.u1, subject='unused', body=body, email_kwargs=kwargs, recipients=[self.u2], type_=type_) # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly: desc = 'TYPE_PASSWORD_RESET' kwargs = dict( user='******', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url= 'http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746' ) kallithea.lib.celerylib.tasks.send_email( ['*****@*****.**'], "Password reset link", EmailNotificationModel().get_email_tmpl( EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs), EmailNotificationModel().get_email_tmpl( EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs), from_name=User.get(self.u1).full_name_or_username) out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \ re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l)) outfn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.out.html') reffn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.ref.html') with open(outfn, 'w') as f: f.write(out) with open(reffn) as f: ref = f.read() assert ref == out # copy test_dump_html_mails.out.html to test_dump_html_mails.ref.html to update expectations os.unlink(outfn)
def get_unread_for_user(self, user): user = User.guess_instance(user) return [x.notification for x in UserNotification.query() \ .filter(UserNotification.read == False) \ .filter(UserNotification.user == user).all()]
def get_unread_cnt_for_user(self, user): user = User.guess_instance(user) return UserNotification.query() \ .filter(UserNotification.read == False) \ .filter(UserNotification.user == user).count()
def _cached_perms_data(user_id, user_is_admin, user_inherit_default_permissions, explicit, algo): RK = 'repositories' GK = 'repositories_groups' UK = 'user_groups' GLOBAL = 'global' PERM_WEIGHTS = Permission.PERM_WEIGHTS permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()} def _choose_perm(new_perm, cur_perm): new_perm_val = PERM_WEIGHTS[new_perm] cur_perm_val = PERM_WEIGHTS[cur_perm] if algo == 'higherwin': if new_perm_val > cur_perm_val: return new_perm return cur_perm elif algo == 'lowerwin': if new_perm_val < cur_perm_val: return new_perm return cur_perm #====================================================================== # fetch default permissions #====================================================================== default_user = User.get_by_username('default', cache=True) default_user_id = default_user.user_id default_repo_perms = Permission.get_default_perms(default_user_id) default_repo_groups_perms = Permission.get_default_group_perms( default_user_id) default_user_group_perms = Permission.get_default_user_group_perms( default_user_id) if user_is_admin: #================================================================== # admin users have all rights; # based on default permissions, just set everything to admin #================================================================== permissions[GLOBAL].add('hg.admin') permissions[GLOBAL].add('hg.create.write_on_repogroup.true') # repositories for perm in default_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name p = 'repository.admin' permissions[RK][r_k] = p # repository groups for perm in default_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name p = 'group.admin' permissions[GK][rg_k] = p # user groups for perm in default_user_group_perms: u_k = perm.UserUserGroupToPerm.user_group.users_group_name p = 'usergroup.admin' permissions[UK][u_k] = p return permissions #================================================================== # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS #================================================================== # default global permissions taken from the default user default_global_perms = UserToPerm.query() \ .filter(UserToPerm.user_id == default_user_id) \ .options(joinedload(UserToPerm.permission)) for perm in default_global_perms: permissions[GLOBAL].add(perm.permission.permission_name) # defaults for repositories, taken from default user for perm in default_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name if perm.Repository.private and not (perm.Repository.owner_id == user_id): # disable defaults for private repos, p = 'repository.none' elif perm.Repository.owner_id == user_id: # set admin if owner p = 'repository.admin' else: p = perm.Permission.permission_name permissions[RK][r_k] = p # defaults for repository groups taken from default user permission # on given group for perm in default_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name p = perm.Permission.permission_name permissions[GK][rg_k] = p # defaults for user groups taken from default user permission # on given user group for perm in default_user_group_perms: u_k = perm.UserUserGroupToPerm.user_group.users_group_name p = perm.Permission.permission_name permissions[UK][u_k] = p #====================================================================== # !! OVERRIDE GLOBALS !! with user permissions if any found #====================================================================== # those can be configured from groups or users explicitly _configurable = set([ 'hg.fork.none', 'hg.fork.repository', 'hg.create.none', 'hg.create.repository', 'hg.usergroup.create.false', 'hg.usergroup.create.true' ]) # USER GROUPS comes first # user group global permissions user_perms_from_users_groups = Session().query(UserGroupToPerm) \ .options(joinedload(UserGroupToPerm.permission)) \ .join((UserGroupMember, UserGroupToPerm.users_group_id == UserGroupMember.users_group_id)) \ .filter(UserGroupMember.user_id == user_id) \ .join((UserGroup, UserGroupMember.users_group_id == UserGroup.users_group_id)) \ .filter(UserGroup.users_group_active == True) \ .order_by(UserGroupToPerm.users_group_id) \ .all() # need to group here by groups since user can be in more than # one group _grouped = [[x, list(y)] for x, y in itertools.groupby( user_perms_from_users_groups, lambda x: x.users_group)] for gr, perms in _grouped: # since user can be in multiple groups iterate over them and # select the lowest permissions first (more explicit) ##TODO: do this^^ if not gr.inherit_default_permissions: # NEED TO IGNORE all configurable permissions and # replace them with explicitly set permissions[GLOBAL] = permissions[GLOBAL] \ .difference(_configurable) for perm in perms: permissions[GLOBAL].add(perm.permission.permission_name) # user specific global permissions user_perms = Session().query(UserToPerm) \ .options(joinedload(UserToPerm.permission)) \ .filter(UserToPerm.user_id == user_id).all() if not user_inherit_default_permissions: # NEED TO IGNORE all configurable permissions and # replace them with explicitly set permissions[GLOBAL] = permissions[GLOBAL] \ .difference(_configurable) for perm in user_perms: permissions[GLOBAL].add(perm.permission.permission_name) ## END GLOBAL PERMISSIONS #====================================================================== # !! PERMISSIONS FOR REPOSITORIES !! #====================================================================== #====================================================================== # check if user is part of user groups for this repository and # fill in his permission from it. _choose_perm decides of which # permission should be selected based on selected method #====================================================================== # user group for repositories permissions user_repo_perms_from_users_groups = \ Session().query(UserGroupRepoToPerm, Permission, Repository,) \ .join((Repository, UserGroupRepoToPerm.repository_id == Repository.repo_id)) \ .join((Permission, UserGroupRepoToPerm.permission_id == Permission.permission_id)) \ .join((UserGroup, UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id)) \ .filter(UserGroup.users_group_active == True) \ .join((UserGroupMember, UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id)) \ .filter(UserGroupMember.user_id == user_id) \ .all() multiple_counter = collections.defaultdict(int) for perm in user_repo_perms_from_users_groups: r_k = perm.UserGroupRepoToPerm.repository.repo_name multiple_counter[r_k] += 1 p = perm.Permission.permission_name cur_perm = permissions[RK][r_k] if perm.Repository.owner_id == user_id: # set admin if owner p = 'repository.admin' else: if multiple_counter[r_k] > 1: p = _choose_perm(p, cur_perm) permissions[RK][r_k] = p # user explicit permissions for repositories, overrides any specified # by the group permission user_repo_perms = Permission.get_default_perms(user_id) for perm in user_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name cur_perm = permissions[RK][r_k] # set admin if owner if perm.Repository.owner_id == user_id: p = 'repository.admin' else: p = perm.Permission.permission_name if not explicit: p = _choose_perm(p, cur_perm) permissions[RK][r_k] = p #====================================================================== # !! PERMISSIONS FOR REPOSITORY GROUPS !! #====================================================================== #====================================================================== # check if user is part of user groups for this repository groups and # fill in his permission from it. _choose_perm decides of which # permission should be selected based on selected method #====================================================================== # user group for repo groups permissions user_repo_group_perms_from_users_groups = \ Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup) \ .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)) \ .join((Permission, UserGroupRepoGroupToPerm.permission_id == Permission.permission_id)) \ .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id == UserGroup.users_group_id)) \ .filter(UserGroup.users_group_active == True) \ .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id == UserGroupMember.users_group_id)) \ .filter(UserGroupMember.user_id == user_id) \ .all() multiple_counter = collections.defaultdict(int) for perm in user_repo_group_perms_from_users_groups: g_k = perm.UserGroupRepoGroupToPerm.group.group_name multiple_counter[g_k] += 1 p = perm.Permission.permission_name cur_perm = permissions[GK][g_k] if multiple_counter[g_k] > 1: p = _choose_perm(p, cur_perm) permissions[GK][g_k] = p # user explicit permissions for repository groups user_repo_groups_perms = Permission.get_default_group_perms(user_id) for perm in user_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name p = perm.Permission.permission_name cur_perm = permissions[GK][rg_k] if not explicit: p = _choose_perm(p, cur_perm) permissions[GK][rg_k] = p #====================================================================== # !! PERMISSIONS FOR USER GROUPS !! #====================================================================== # user group for user group permissions user_group_user_groups_perms = \ Session().query(UserGroupUserGroupToPerm, Permission, UserGroup) \ .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id == UserGroup.users_group_id)) \ .join((Permission, UserGroupUserGroupToPerm.permission_id == Permission.permission_id)) \ .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id == UserGroupMember.users_group_id)) \ .filter(UserGroupMember.user_id == user_id) \ .join((UserGroup, UserGroupMember.users_group_id == UserGroup.users_group_id), aliased=True, from_joinpoint=True) \ .filter(UserGroup.users_group_active == True) \ .all() multiple_counter = collections.defaultdict(int) for perm in user_group_user_groups_perms: g_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name multiple_counter[g_k] += 1 p = perm.Permission.permission_name cur_perm = permissions[UK][g_k] if multiple_counter[g_k] > 1: p = _choose_perm(p, cur_perm) permissions[UK][g_k] = p #user explicit permission for user groups user_user_groups_perms = Permission.get_default_user_group_perms(user_id) for perm in user_user_groups_perms: u_k = perm.UserUserGroupToPerm.user_group.users_group_name p = perm.Permission.permission_name cur_perm = permissions[UK][u_k] if not explicit: p = _choose_perm(p, cur_perm) permissions[UK][u_k] = p return permissions