def test_update_limit(self, dt_mock): _now = dt.datetime(2015, 02, 02, 13, 39) dt_mock.datetime.utcnow.return_value = _now assert_equal(self.wh.last_sent, None) self.wh.update_limit() session(self.wh).expunge(self.wh) assert_equal(M.Webhook.query.get(_id=self.wh._id).last_sent, _now)
def create(self, **kwargs): try: entry = self.model(**request.json) except Invalid as exc: abort(400, exc.message) session(entry).flush() return entry
def test_project_is_deleted(self): p = M.Project.query.get(shortname=self.p_shortname) assert p is not None, 'Can not find project to delete' self.run_script(['p/{}'.format(p.shortname)]) session(p).expunge(p) p = M.Project.query.get(shortname=p.shortname) assert p is None, 'Project is not deleted'
def test_send_no_configured_webhooks(self, send_webhook): self.wh.delete() session(self.wh).flush(self.wh) sender = RepoPushWebhookSender() with h.push_config(c, app=self.git): sender.send(dict(arg1=1, arg2=2)) assert_equal(send_webhook.post.call_count, 0)
def test_edit_validation(self): invalid = M.Webhook( type='invalid type', app_config_id=None, hook_url='http://httpbin.org/post', secret='secret') session(invalid).flush(invalid) self.app.get(self.url + '/repo-push/%s' % invalid._id, status=404) data = {'url': u'http://httpbin.org/post', 'secret': u'secret'} self.create_webhook(data).follow() wh = M.Webhook.query.get(hook_url=data['url'], type='repo-push') # invalid id in hidden field, just in case r = self.app.get(self.url + '/repo-push/%s' % wh._id) data = {k: v[0].value for (k, v) in r.forms[0].fields.items()} data['webhook'] = unicode(invalid._id) self.app.post(self.url + '/repo-push/edit', data, status=404) # empty values data = {'url': '', 'secret': '', 'webhook': str(wh._id)} r = self.app.post(self.url + '/repo-push/edit', data) self.find_error(r, 'url', 'Please enter a value', 'edit') data = {'url': 'qwe', 'secret': 'qwe', 'webhook': str(wh._id)} r = self.app.post(self.url + '/repo-push/edit', data) self.find_error(r, 'url', 'You must provide a full domain name (like qwe.com)', 'edit')
def execute(cls, options): models = [M.Post, ForumPost] app_config_id = cls.get_tool_id(options.tool) # Find all posts that have parent_id, but does not have actual parent # and create fake parent for them for model in models: q = {'parent_id': {'$ne': None}, 'app_config_id': app_config_id} for chunk in chunked_find(model, q): for post in chunk: if not post.parent: log.info('Creating deleted parent for %s %s', model.__mongometa__.name, post._id) c.project = post.app_config.project slug = post.slug.rsplit('/', 1)[0] full_slug = post.full_slug.rsplit('/', 1)[0] author = c.project.admins()[0] deleted_post = model( _id=post.parent_id, deleted=True, text="Automatically created in place of deleted post", app_id=post.app_id, app_config_id=post.app_config_id, discussion_id=post.discussion_id, thread_id=post.thread_id, author_id=author._id, slug=slug, full_slug=full_slug, ) if options.dry_run: session(deleted_post).expunge(deleted_post) else: session(deleted_post).flush(deleted_post)
def skip_last_updated(model_cls): skip_last_updated = getattr(session(model_cls)._get(), 'skip_last_updated', False) session(model_cls)._get().skip_last_updated = True try: yield finally: session(model_cls)._get().skip_last_updated = skip_last_updated
def add_webhooks(suffix, n): for i in range(n): webhook = M.Webhook( type='repo-push', app_config_id=self.git.config._id, hook_url='http://httpbin.org/{}/{}'.format(suffix, i), secret='secret') session(webhook).flush(webhook)
def test_subproject_is_deleted(self): p = M.Project.query.get(shortname='test/sub1') assert p is not None, 'Can not find subproject to delete' self.run_script(['p/test/sub1']) session(p).expunge(p) p = M.Project.query.get(shortname='test/sub1') assert p is None, 'Project is not deleted' p = M.Project.query.get(shortname='test') assert p is not None, 'Parent project should not be deleted'
def merge(merge_request_id): from allura import model as M mr = M.MergeRequest.query.get(_id=merge_request_id) mr.app.repo.merge(mr) mr.add_meta_post(changes={'Status': [mr.status, 'merged']}) mr.status = 'merged' g.director.create_activity(c.user, 'merged', mr, related_nodes=[c.project], tags=['merge-request']) session(mr).flush(mr)
def verify_and_remove_code(self, user, code): self.enforce_rate_limit(user) rc = RecoveryCode.query.get(user_id=user._id, code=code) if rc: rc.query.delete() session(rc).flush(rc) return True else: raise InvalidRecoveryCode
def test_delete_with_reason(self, log, post_event): p = M.Project.query.get(shortname=self.p_shortname) pid = p._id assert p is not None, 'Can not find project to delete' self.run_script(['-r', 'The Reason', 'p/{}'.format(p.shortname)]) session(p).expunge(p) p = M.Project.query.get(shortname=p.shortname) assert p is None, 'Project is not deleted' log.info.assert_called_once_with('Purging %s Reason: %s', '/p/test-delete/', 'The Reason') post_event.assert_called_once_with('project_deleted', project_id=pid, reason='The Reason')
def installable_tools_for(project): tools = [] for name, App in g.entry_points['tool'].iteritems(): cfg = M.AppConfig(project_id=project._id, tool_name=name) app = App(project, cfg) if app.installable: tools.append(dict(name=name, app=App)) session(cfg).expunge(cfg) # prevent from saving temporary config to db tools.sort(key=lambda t: (t['app'].status_int(), t['app'].ordinal)) return [t for t in tools if t['app'].status in project.allowed_tool_status]
def subscribe(self, **kw): threads = kw.pop('threads', []) for t in threads: thread = self.M.Thread.query.get(_id=t['_id']) if t.get('subscription'): thread.subscribe() else: thread.unsubscribe() session(self.M.Thread)._get().skip_mod_date = True session(self.M.Thread)._get().skip_last_updated = True redirect(request.referer)
def test_deleted_post(self): r = self._make_post('This is a post') reply_form = r.html.find( 'div', {'class': 'edit_post_form reply'}).find('form') post_link = str(reply_form['action']).rstrip('/') _, slug = post_link.rsplit('/', 1) r = self.app.get(post_link, status=200) post = M.Post.query.get(slug=slug) post.deleted = True session(post).flush(post) r = self.app.get(post_link, status=404)
def update_webhook(self, wh, url, secret=None): if not secret: secret = self.gen_secret() wh.hook_url = url wh.secret = secret try: session(wh).flush(wh) except DuplicateKeyError: session(wh).expunge(wh) msg = u'_the_form: "{}" webhook already exists for {} {}'.format( wh.type, self.app.config.options.mount_label, url) raise Invalid(msg, None, None)
def setUp(self): setup_basic_test() self.patches = self.monkey_patch() for p in self.patches: p.start() self.setup_with_tools() self.project = M.Project.query.get(shortname=test_project_with_repo) self.git = self.project.app_instance('src') self.wh = M.Webhook( type='repo-push', app_config_id=self.git.config._id, hook_url='http://httpbin.org/post', secret='secret') session(self.wh).flush(self.wh)
def cached_convert(self, artifact, field_name): """Convert ``artifact.field_name`` markdown source to html, caching the result if the render time is greater than the defined threshold. """ source_text = getattr(artifact, field_name) # Check if contents macro and never cache if "[[" in source_text: return self.convert(source_text) cache_field_name = field_name + '_cache' cache = getattr(artifact, cache_field_name, None) if not cache: log.warn( 'Skipping Markdown caching - Missing cache field "%s" on class %s', field_name, artifact.__class__.__name__) return self.convert(source_text) bugfix_rev = 4 # increment this if we need all caches to invalidated (e.g. xss in markdown rendering fixed) md5 = None # If a cached version exists and it is valid, return it. if cache.md5 is not None: md5 = hashlib.md5(source_text.encode('utf-8')).hexdigest() if cache.md5 == md5 and getattr(cache, 'fix7528', False) == bugfix_rev: return h.html.literal(cache.html) # Convert the markdown and time the result. start = time.time() html = self.convert(source_text, render_limit=False) render_time = time.time() - start threshold = config.get('markdown_cache_threshold') try: threshold = float(threshold) if threshold else None except ValueError: threshold = None log.warn('Skipping Markdown caching - The value for config param ' '"markdown_cache_threshold" must be a float.') if threshold is not None and render_time > threshold: # Save the cache if md5 is None: md5 = hashlib.md5(source_text.encode('utf-8')).hexdigest() cache.md5, cache.html, cache.render_time = md5, html, render_time cache.fix7528 = bugfix_rev # flag to indicate good caches created after [#7528] and other critical bugs were fixed. with utils.skip_mod_date(artifact.__class__), \ utils.skip_last_updated(artifact.__class__): session(artifact).flush(artifact) return html
def test_delete_validation(self): invalid = M.Webhook( type='invalid type', app_config_id=None, hook_url='http://httpbin.org/post', secret='secret') session(invalid).flush(invalid) assert_equal(M.Webhook.query.find().count(), 1) data = {'webhook': ''} self.app.post(self.url + '/repo-push/delete', data, status=404) data = {'webhook': unicode(invalid._id)} self.app.post(self.url + '/repo-push/delete', data, status=404) assert_equal(M.Webhook.query.find().count(), 1)
def __call__(self): '''Call the task function with its arguments.''' self.time_start = datetime.utcnow() session(self).flush(self) try: func = self.function self.result = func(*self.task.args, **self.task.kwargs) self.state = 'complete' return self.result except Exception: self.state = 'error' self.result = traceback.format_exc() raise finally: self.time_stop = datetime.utcnow() session(self).flush(self)
def disable_users(cls, usernames): auth_provider = AuthenticationProvider.get(request=None) # would be nice to use the BatchIndexer extension around this but that only works for artifacts not users for username in usernames: user = M.User.query.get(username=username) if not user: log.info('Could not find user: %s', username) elif user.disabled: log.info('User is already disabled: %s', username) session(user).expunge(user) else: log.info('Disabling user: %s', username) auth_provider.disable_user(user) session(user).flush(user)
def setUp(self): super(TestWebhookRestController, self).setUp() self.patches = self.monkey_patch() for p in self.patches: p.start() self.setup_with_tools() self.project = M.Project.query.get(shortname=test_project_with_repo) self.git = self.project.app_instance('src') self.url = str('/rest' + self.git.admin_url + 'webhooks') self.webhooks = [] for i in range(3): webhook = M.Webhook( type='repo-push', app_config_id=self.git.config._id, hook_url='http://httpbin.org/post/{}'.format(i), secret='secret-{}'.format(i)) session(webhook).flush(webhook) self.webhooks.append(webhook)
def test_deleted_post_attachment(self): f = os.path.join(os.path.dirname(__file__), '..', 'data', 'user.png') with open(f) as f: pic = f.read() self.app.post(self.post_link + 'attach', upload_files=[('file_info', 'user.png', pic)]) alink = self.attach_link() thumblink = alink + '/thumb' self.app.get(alink, status=200) self.app.get(thumblink, status=200) _, slug = self.post_link.rstrip('/').rsplit('/', 1) post = M.Post.query.get(slug=slug) assert post, 'Could not find post for {} {}'.format( slug, self.post_link) post.deleted = True session(post).flush(post) self.app.get(alink, status=404) self.app.get(thumblink, status=404)
def test_deleted_post_attachment(self): f = os.path.join(os.path.dirname(__file__), '..', 'data', 'user.png') with open(f) as f: pic = f.read() self.app.post( self.post_link + 'attach', upload_files=[('file_info', 'user.png', pic)]) alink = self.attach_link() thumblink = alink + '/thumb' self.app.get(alink, status=200) self.app.get(thumblink, status=200) _, slug = self.post_link.rstrip('/').rsplit('/', 1) post = M.Post.query.get(slug=slug) assert post, 'Could not find post for {} {}'.format(slug, self.post_link) post.deleted = True session(post).flush(post) self.app.get(alink, status=404) self.app.get(thumblink, status=404)
def main(): task = sys.argv[-1] c.project = None # Fix ticket artifcat titles title = re.compile('^Ticket [0-9]') subs_tickets = M.Mailbox.query.find(dict(artifact_title=title)).all() log.info('Found total %d old artifact titles (tickets).', len(subs_tickets)) for sub in subs_tickets: if not sub.artifact_index_id: log.info('No artifact_index_id on %s', sub) continue ticket = TM.Ticket.query.get( _id=ObjectId(sub.artifact_index_id.split('#')[1])) if not ticket: log.info('Could not find ticket for %s', sub) continue new_title = 'Ticket #%d: %s' % (ticket.ticket_num, ticket.summary) log.info('"%s" --> "%s"', sub.artifact_title, new_title) if (task != 'diff'): sub.artifact_title = new_title session(sub).flush(sub) # Fix merge request artifact titles title = re.compile('^Merge request: ') subs_mrs = M.Mailbox.query.find(dict(artifact_title=title)).all() log.info('Found total %d old artifact titles (merge_requests).', len(subs_tickets)) for sub in subs_mrs: if not sub.artifact_index_id: log.info('No artifact_index_id on %s', sub) continue merge_request = M.MergeRequest.query.get( _id=ObjectId(sub.artifact_index_id.split('#')[1])) if not merge_request: log.info('Could not find merge request for %s', sub) continue new_title = 'Merge Request #%d: %s' % (merge_request.request_number, merge_request.summary) log.info('"%s" --> "%s"', sub.artifact_title, new_title) if task != 'diff': sub.artifact_title = new_title session(sub).flush(sub)
def index(self, **kw): response.content_type = 'application/json' try: params = {'secret': kw.pop('secret', ''), 'url': kw.pop('url', None)} valid = self.create_form().to_python(params) except Exception as e: response.status_int = 400 return {'result': 'error', 'error': self._error(e)} if self.sender.enforce_limit(self.app): webhook = M.Webhook( type=self.sender.type, app_config_id=self.app.config._id) try: self.update_webhook(webhook, valid['url'], valid['secret']) except Invalid as e: response.status_int = 400 return {'result': 'error', 'error': self._error(e)} M.AuditLog.log('add webhook %s %s %s', webhook.type, webhook.hook_url, webhook.app_config.url()) response.status_int = 201 # refetch updated values (e.g. mod_date) session(webhook).expunge(webhook) webhook = M.Webhook.query.get(_id=webhook._id) return webhook.__json__() else: limits = { 'max': M.Webhook.max_hooks( self.sender.type, self.app.config.tool_name), 'used': M.Webhook.query.find({ 'type': self.sender.type, 'app_config_id': self.app.config._id, }).count(), } resp = { 'result': 'error', 'error': 'You have exceeded the maximum number of webhooks ' 'you are allowed to create for this project/app', 'limits': limits, } response.status_int = 400 return resp
def cached_convert(self, artifact, field_name): """Convert ``artifact.field_name`` markdown source to html, caching the result if the render time is greater than the defined threshold. """ source_text = getattr(artifact, field_name) # Check if contents macro and never cache if "[[" in source_text: return self.convert(source_text) cache_field_name = field_name + '_cache' cache = getattr(artifact, cache_field_name, None) if not cache: log.warn( 'Skipping Markdown caching - Missing cache field "%s" on class %s', field_name, artifact.__class__.__name__) return self.convert(source_text) bugfix_rev = 4 # increment this if we need all caches to invalidated (e.g. xss in markdown rendering fixed) md5 = None # If a cached version exists and it is valid, return it. if cache.md5 is not None: md5 = hashlib.md5(source_text.encode('utf-8')).hexdigest() if cache.md5 == md5 and getattr(cache, 'fix7528', False) == bugfix_rev: return h.html.literal(cache.html) # Convert the markdown and time the result. start = time.time() html = self.convert(source_text, render_limit=False) render_time = time.time() - start threshold = config.get('markdown_cache_threshold') try: threshold = float(threshold) if threshold else None except ValueError: threshold = None log.warn('Skipping Markdown caching - The value for config param ' '"markdown_cache_threshold" must be a float.') if threshold is not None and render_time > threshold: # Save the cache if md5 is None: md5 = hashlib.md5(source_text.encode('utf-8')).hexdigest() cache.md5, cache.html, cache.render_time = md5, html, render_time cache.fix7528 = bugfix_rev # flag to indicate good caches created after [#7528] and other critical bugs were fixed. try: sess = session(artifact) except AttributeError: # this can happen if a non-artifact object is used log.exception('Could not get session for %s', artifact) else: with utils.skip_mod_date(artifact.__class__), \ utils.skip_last_updated(artifact.__class__): sess.flush(artifact) return html
def test_webhook_validator(self): sender = Mock(type='repo-push') app = self.git invalid_app = self.project.app_instance('src2') v = WebhookValidator(sender=sender, app=app, not_empty=True) with assert_raises(Invalid) as cm: v.to_python(None) assert_equal(cm.exception.msg, u'Please enter a value') with assert_raises(Invalid) as cm: v.to_python('invalid id') assert_equal(cm.exception.msg, u'Invalid webhook') wh = M.Webhook(type='invalid type', app_config_id=invalid_app.config._id, hook_url='http://hooks.slack.com', secret='secret') session(wh).flush(wh) # invalid type with assert_raises(Invalid) as cm: v.to_python(wh._id) assert_equal(cm.exception.msg, u'Invalid webhook') wh.type = 'repo-push' session(wh).flush(wh) # invalild app with assert_raises(Invalid) as cm: v.to_python(wh._id) assert_equal(cm.exception.msg, u'Invalid webhook') wh.app_config_id = app.config._id session(wh).flush(wh) assert_equal(v.to_python(wh._id), wh) assert_equal(v.to_python(unicode(wh._id)), wh)
def main(): task = sys.argv[-1] c.project = None # Fix ticket artifcat titles title = re.compile('^Ticket [0-9]') subs_tickets = M.Mailbox.query.find(dict(artifact_title=title)).all() log.info('Found total %d old artifact titles (tickets).', len(subs_tickets)) for sub in subs_tickets: if not sub.artifact_index_id: log.info('No artifact_index_id on %s', sub) continue ticket = TM.Ticket.query.get(_id = ObjectId(sub.artifact_index_id.split('#')[1])) if not ticket: log.info('Could not find ticket for %s', sub) continue new_title = 'Ticket #%d: %s' % (ticket.ticket_num, ticket.summary) log.info('"%s" --> "%s"', sub.artifact_title, new_title) if(task != 'diff'): sub.artifact_title = new_title session(sub).flush(sub) # Fix merge request artifact titles title = re.compile('^Merge request: ') subs_mrs = M.Mailbox.query.find(dict(artifact_title=title)).all() log.info('Found total %d old artifact titles (merge_requests).', len(subs_tickets)) for sub in subs_mrs: if not sub.artifact_index_id: log.info('No artifact_index_id on %s', sub) continue merge_request = M.MergeRequest.query.get(_id = ObjectId(sub.artifact_index_id.split('#')[1])) if not merge_request: log.info('Could not find merge request for %s', sub) continue new_title = 'Merge Request #%d: %s' % (merge_request.request_number, merge_request.summary) log.info('"%s" --> "%s"', sub.artifact_title , new_title) if task != 'diff': sub.artifact_title = new_title session(sub).flush(sub)
def test_user_track_active(): # without this session flushing inside track_active raises Exception setup_functional_test() c.user = M.User.by_username('test-admin') assert_equal(c.user.last_access['session_date'], None) assert_equal(c.user.last_access['session_ip'], None) assert_equal(c.user.last_access['session_ua'], None) req = Mock(headers={'User-Agent': 'browser'}, remote_addr='addr') c.user.track_active(req) c.user = M.User.by_username(c.user.username) assert_not_equal(c.user.last_access['session_date'], None) assert_equal(c.user.last_access['session_ip'], 'addr') assert_equal(c.user.last_access['session_ua'], 'browser') # ensure that session activity tracked with a whole-day granularity prev_date = c.user.last_access['session_date'] c.user.track_active(req) c.user = M.User.by_username(c.user.username) assert_equal(c.user.last_access['session_date'], prev_date) yesterday = datetime.utcnow() - timedelta(1) c.user.last_access['session_date'] = yesterday session(c.user).flush(c.user) c.user.track_active(req) c.user = M.User.by_username(c.user.username) assert_true(c.user.last_access['session_date'] > yesterday) # ...or if IP or User Agent has changed req.remote_addr = 'new addr' c.user.track_active(req) c.user = M.User.by_username(c.user.username) assert_equal(c.user.last_access['session_ip'], 'new addr') assert_equal(c.user.last_access['session_ua'], 'browser') req.headers['User-Agent'] = 'new browser' c.user.track_active(req) c.user = M.User.by_username(c.user.username) assert_equal(c.user.last_access['session_ip'], 'new addr') assert_equal(c.user.last_access['session_ua'], 'new browser')
def skip_mod_date(model_cls): """ Avoids updating 'mod_date' Useful for saving cache on a model and things like that. .. note:: This only works when the changes made to the model are flushed. :Example: from allura import model as M key = self.can_merge_cache_key() with utils.skip_mod_date(M.MergeRequest): self.can_merge_cache[key] = val session(self).flush(self) :param model_cls: The model *class* being updated. """ skip_mod_date = getattr(session(model_cls)._get(), 'skip_mod_date', False) session(model_cls)._get().skip_mod_date = True try: yield finally: session(model_cls)._get().skip_mod_date = skip_mod_date
def post(cls, function, args=None, kwargs=None, result_type='forget', priority=10): '''Create a new task object.''' if args is None: args = () if kwargs is None: kwargs = {} task_name = '%s.%s' % ( function.__module__, function.__name__) obj = cls( state='ready', priority=priority, result_type=result_type, task=dict( name=task_name, args=args, kwargs=kwargs), result=None) session(obj).flush(obj) return obj
def test_unmoderated_post_attachments(self): ordinary_user = {'username': '******'} moderator = {'username': '******'} # set up attachment f = os.path.join(os.path.dirname(__file__), '..', 'data', 'user.png') with open(f) as f: pic = f.read() self.app.post( self.post_link + 'attach', upload_files=[('file_info', 'user.png', pic)]) # ... make sure ordinary user can see it r = self.app.get(self.thread_link, extra_environ=ordinary_user) assert '<div class="attachment_thumb">' in r alink = self.attach_link() thumblink = alink + '/thumb' # ... and access it self.app.get(alink, status=200, extra_environ=ordinary_user) self.app.get(thumblink, status=200, extra_environ=ordinary_user) # make post unmoderated _, slug = self.post_link.rstrip('/').rsplit('/', 1) post = M.Post.query.get(slug=slug) assert post, 'Could not find post for {} {}'.format(slug, self.post_link) post.status = 'pending' session(post).flush(post) # ... make sure attachment is not visible to ordinary user r = self.app.get(self.thread_link, extra_environ=ordinary_user) assert '<div class="attachment_thumb">' not in r, 'Attachment is visible on unmoderated post' # ... but visible to moderator r = self.app.get(self.thread_link, extra_environ=moderator) assert '<div class="attachment_thumb">' in r # ... and ordinary user can't access it self.app.get(alink, status=403, extra_environ=ordinary_user) self.app.get(thumblink, status=403, extra_environ=ordinary_user) # ... but moderator can self.app.get(alink, status=200, extra_environ=moderator) self.app.get(thumblink, status=200, extra_environ=moderator)
def test_unmoderated_post_attachments(self): ordinary_user = {'username': '******'} moderator = {'username': '******'} # set up attachment f = os.path.join(os.path.dirname(__file__), '..', 'data', 'user.png') with open(f) as f: pic = f.read() self.app.post(self.post_link + 'attach', upload_files=[('file_info', 'user.png', pic)]) # ... make sure ordinary user can see it r = self.app.get(self.thread_link, extra_environ=ordinary_user) assert '<div class="attachment_thumb">' in r alink = self.attach_link() thumblink = alink + '/thumb' # ... and access it self.app.get(alink, status=200, extra_environ=ordinary_user) self.app.get(thumblink, status=200, extra_environ=ordinary_user) # make post unmoderated _, slug = self.post_link.rstrip('/').rsplit('/', 1) post = M.Post.query.get(slug=slug) assert post, 'Could not find post for {} {}'.format( slug, self.post_link) post.status = 'pending' session(post).flush(post) # ... make sure attachment is not visible to ordinary user r = self.app.get(self.thread_link, extra_environ=ordinary_user) assert '<div class="attachment_thumb">' not in r, 'Attachment is visible on unmoderated post' # ... but visible to moderator r = self.app.get(self.thread_link, extra_environ=moderator) assert '<div class="attachment_thumb">' in r # ... and ordinary user can't access it self.app.get(alink, status=403, extra_environ=ordinary_user) self.app.get(thumblink, status=403, extra_environ=ordinary_user) # ... but moderator can self.app.get(alink, status=200, extra_environ=moderator) self.app.get(thumblink, status=200, extra_environ=moderator)
def disable_users(cls, usernames, message): auth_provider = AuthenticationProvider.get(request=None) # would be nice to use the BatchIndexer extension around this but that only works for artifacts not users for username in usernames: user = M.User.query.get(username=username) if not user: log.info('Could not find user: %s', username) elif user.disabled: log.info('User is already disabled: %s', username) session(user).expunge(user) else: log.info('Disabling user: %s', username) auth_provider.disable_user(user) session(user).flush(user) if message: log_entry = h.auditlog_user(message, user=user) session(log_entry).flush(log_entry)
def test_skip_mod_date(): with utils.skip_mod_date(M.Artifact): assert getattr(session(M.Artifact)._get(), 'skip_mod_date', None) is True assert getattr(session(M.Artifact)._get(), 'skip_mod_date', None) is False
def replace_codes(self, user, codes): RecoveryCode.query.remove({'user_id': user._id}) for code in codes: rc = RecoveryCode(user_id=user._id, code=code) session(rc).flush(rc)
def merge(merge_request_id): from allura import model as M mr = M.MergeRequest.query.get(_id=merge_request_id) mr.app.repo.merge(mr) mr.status = 'merged' session(mr).flush(mr)
def update(self, **kwargs): entry = self._get_entry(**kwargs) entry.query.update({'$set': request.json}, safe=True, new=True) session(entry).flush() return entry
def delete(self, **kwargs): entry = self._get_entry(**kwargs) entry.delete() session(entry).flush()
def test_orphan_object(self): obj = self.Basic() assert session(obj) is self.session self.session.clear() assert session(obj) is None
def update_limit(self): self.last_sent = dt.datetime.utcnow() session(self).flush(self)