def test_attachment_methods(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') p = t.post('This is a post') p_att = p.attach('foo.text', StringIO('Hello, world!'), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach('foo2.text', StringIO('Hello, thread!'), discussion_id=d._id, thread_id=t._id) d_att = p.attach('foo3.text', StringIO('Hello, discussion!'), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert 'wiki/_discuss' in att.url() assert 'attachment/' in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs = FieldStorage() fs.name = 'file_info' fs.filename = 'fake.txt' fs.type = 'text/plain' fs.file = StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum=None, subject='', file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get( subject=u'[test:wiki] Test comment notification') assert '\nAttachment: fake.txt (37 Bytes; text/plain)' in n.text
def add_page_with_attachmetns(self): self.page = WM.Page.upsert('ZTest_title') self.page.text = 'test_text' self.page.mod_date = datetime.datetime(2013, 7, 5) self.page.labels = ['test_label1', 'test_label2'] self.page.attach('some/path/test_file', StringIO('test string')) ThreadLocalORMSession.flush_all()
def register_neighborhood_project(self, neighborhood, users, allow_register=False): from allura import model as M shortname = '--init--' name = 'Home Project for %s' % neighborhood.name p = M.Project(neighborhood_id=neighborhood._id, shortname=shortname, name=name, short_description='', description=( 'You can edit this description in the admin page'), homepage_title = '# ' + name, last_updated = datetime.utcnow(), is_nbhd_project=True, is_root=True) try: p.configure_project( users=users, is_user_project=False, apps=[ ('Wiki', 'wiki', 'Wiki'), ('admin', 'admin', 'Admin')]) except: ThreadLocalORMSession.close_all() log.exception('Error registering project %s' % p) raise if allow_register: role_auth = M.ProjectRole.authenticated(p) security.simple_grant(p.acl, role_auth._id, 'register') state(p).soil() return p
def test_refresh(self): ci = mock.Mock() ci.count_revisions=mock.Mock(return_value=100) ci.authored.name = 'Test Committer' ci.author_url = '/u/test-committer/' self.repo._impl.commit = mock.Mock(return_value=ci) self.repo._impl.new_commits = mock.Mock(return_value=['foo%d' % i for i in range(100) ]) self.repo._impl.all_commit_ids = mock.Mock(return_value=['foo%d' % i for i in range(100) ]) self.repo.symbolics_for_commit = mock.Mock(return_value=[['master', 'branch'], []]) def refresh_commit_info(oid, seen, lazy=False): M.repo.CommitDoc(dict( authored=dict( name='Test Committer', email='*****@*****.**'), _id=oid)).m.insert() def set_heads(): self.repo.heads = [ ming.base.Object(name='head', object_id='foo0', count=100) ] self.repo._impl.refresh_commit_info = refresh_commit_info self.repo._impl.refresh_heads = mock.Mock(side_effect=set_heads) self.repo.shorthand_for_commit = lambda oid: '[' + str(oid) + ']' self.repo.url_for_commit = lambda oid: '/ci/' + str(oid) + '/' self.repo.refresh() ThreadLocalORMSession.flush_all() notifications = M.Notification.query.find().all() for n in notifications: if '100 new commits' in n.subject: assert "master,branch: by Test Committer http://localhost/#" in n.text break else: assert False, 'Did not find notification' assert M.Feed.query.find(dict( title='New commit', author_name='Test Committer')).count()
def main(options): log.addHandler(logging.StreamHandler(sys.stdout)) log.setLevel(getattr(logging, options.log_level.upper())) g.solr = mock.Mock() preamble = options.dry_run and "Would delete" or "Deleting" options.preamble = preamble for nbhd in M.Neighborhood.query.find(): q = {'neighborhood_id': nbhd._id} for projects in utils.chunked_find(M.Project, q): for p in projects: scrub_project(p, options) ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() log.info('%s %s EmailAddress documents' % (preamble, M.EmailAddress.query.find().count())) log.info('%s email addresses from %s User documents' % (preamble, M.User.query.find().count())) log.info('%s monitoring_email addresses from %s Forum documents' % (preamble, DM.Forum.query.find({"monitoring_email": {"$nin": [None, ""]}}).count())) if not options.dry_run: M.EmailAddress.query.remove() M.User.query.update({}, {"$set": {"email_addresses": []}}, multi=True) DM.Forum.query.update({"monitoring_email": {"$nin": [None, ""]}}, {"$set": {"monitoring_email": None}}, multi=True) return 0
def test_members(self): nbhd = M.Neighborhood.query.get(name='Projects') self.app.post('/admin/groups/create', params={'name': 'B_role'}) test_project = M.Project.query.get( shortname='test', neighborhood_id=nbhd._id) test_project.add_user(M.User.by_username('test-user-1'), ['B_role']) test_project.add_user(M.User.by_username('test-user'), ['Developer']) test_project.add_user(M.User.by_username('test-user-0'), ['Member']) test_project.add_user(M.User.by_username('test-user-2'), ['Member']) test_project.add_user(M.User.by_username('test-user-3'), ['Member']) test_project.add_user(M.User.by_username('test-user-3'), ['Developer']) test_project.add_user(M.User.by_username('test-user-4'), ['Admin']) ThreadLocalORMSession.flush_all() r = self.app.get('/p/test/_members/') assert '<td>Test Admin</td>' in r assert '<td><a href="/u/test-admin/">test-admin</a></td>' in r assert '<td>Admin</td>' in r tr = r.html.findAll('tr') assert "<td>Test Admin</td>" in str(tr[1]) assert "<td>Test User 4</td>" in str(tr[2]) assert "<td>Test User</td>" in str(tr[3]) assert "<td>Test User 3</td>" in str(tr[4]) assert "<td>Test User 0</td>" in str(tr[5]) assert "<td>Test User 1</td>" in str(tr[6]) assert "<td>Test User 2</td>" in str(tr[7])
def main(): for chunk in utils.chunked_find(M.Project): for p in chunk: p.install_app('activity') ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all()
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') discussion = project.app_instance('discussion') post = Forum.query.get(shortname='general').sorted_threads[0].first_post test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') post.add_attachment(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() discussion.bulk_export(f, temp_dir, True) f.seek(0) discussion = json.loads(f.read()) forums = sorted(discussion['forums'], key=lambda x: x['name']) threads = sorted(forums[0]['threads'], key=lambda x: x['subject']) file_path = os.path.join( 'discussion', str(post.discussion_id), str(post.thread_id), post.slug, 'test_file' ) assert_equal(threads[0]['posts'][0]['attachments'][0]['path'], file_path) os.path.exists(file_path)
def test_post_delete(): d = M.Discussion(shortname="test", name="test") t = M.Thread.new(discussion_id=d._id, subject="Test Thread") p = t.post("This is a post") p.attach("foo.text", StringIO(""), discussion_id=d._id, thread_id=t._id, post_id=p._id) ThreadLocalORMSession.flush_all() p.delete()
def test_attachment_methods(): d = M.Discussion(shortname="test", name="test") t = M.Thread.new(discussion_id=d._id, subject="Test Thread") p = t.post("This is a post") p_att = p.attach("foo.text", StringIO("Hello, world!"), discussion_id=d._id, thread_id=t._id, post_id=p._id) t_att = p.attach("foo2.text", StringIO("Hello, thread!"), discussion_id=d._id, thread_id=t._id) d_att = p.attach("foo3.text", StringIO("Hello, discussion!"), discussion_id=d._id) ThreadLocalORMSession.flush_all() assert p_att.post == p assert p_att.thread == t assert p_att.discussion == d for att in (p_att, t_att, d_att): assert "wiki/_discuss" in att.url() assert "attachment/" in att.url() # Test notification in mail t = M.Thread.new(discussion_id=d._id, subject="Test comment notification") fs = FieldStorage() fs.name = "file_info" fs.filename = "fake.txt" fs.type = "text/plain" fs.file = StringIO("this is the content of the fake file\n") p = t.post(text=u"test message", forum=None, subject="", file_info=fs) ThreadLocalORMSession.flush_all() n = M.Notification.query.get(subject=u"[test:wiki] Test comment notification") assert "\nAttachment: fake.txt (37 Bytes; text/plain)" in n.text
def fix_for_project(self, project): c.project = project base.log.info( 'Checking discussion instances for each tracker in project %s' % project.shortname) trackers = [ac for ac in project.app_configs if ac.tool_name.lower() == 'tickets'] for tracker in trackers: base.log.info('Found tracker %s' % tracker) for ticket in Ticket.query.find({'app_config_id': tracker._id}): base.log.info('Processing ticket %s [#%s] %s' % (ticket._id, ticket.ticket_num, ticket.summary)) if ticket.discussion_thread.discussion.app_config_id != tracker._id: # Some tickets were moved from this tracker, # and Discussion instance for entire tracker was moved too. # Should move it back. base.log.info("Some tickets were moved from this tracker. " "Moving tracker's discussion instance back.") ticket.discussion_thread.discussion.app_config_id = tracker._id if ticket.discussion_thread.discussion_id != tracker.discussion_id: # Ticket was moved from another tracker. # Should bind his comment thread to tracker's Discussion base.log.info("Ticket was moved from another tracker. " "Bind ticket's comment thread to tracker's Discussion instance.") ticket.discussion_thread.discussion_id = tracker.discussion_id for post in ticket.discussion_thread.posts: post.discussion_id = tracker.discussion_id ThreadLocalORMSession.flush_all()
def test_refresh(self): committer_name = 'Test Committer' committer_email = '*****@*****.**' ci = mock.Mock() ci.authored.name = committer_name ci.committed.name = committer_name ci.committed.email = committer_email ci.author_url = '/u/test-committer/' self.repo._impl.commit = mock.Mock(return_value=ci) self.repo._impl.new_commits = mock.Mock(return_value=['foo%d' % i for i in range(100) ]) self.repo._impl.all_commit_ids = mock.Mock(return_value=['foo%d' % i for i in range(100) ]) self.repo.symbolics_for_commit = mock.Mock(return_value=[['master', 'branch'], []]) def refresh_commit_info(oid, seen, lazy=False): M.repo.CommitDoc(dict( authored=dict( name=committer_name, email=committer_email), _id=oid)).m.insert() self.repo._impl.refresh_commit_info = refresh_commit_info _id = lambda oid: getattr(oid, '_id', str(oid)) self.repo.shorthand_for_commit = lambda oid: '[' + _id(oid) + ']' self.repo.url_for_commit = lambda oid: '/ci/' + _id(oid) + '/' self.repo.refresh() ThreadLocalORMSession.flush_all() notifications = M.Notification.query.find().all() for n in notifications: if '100 new commits' in n.subject: assert "master,branch: by %s http://localhost/ci/foo99" % committer_name in n.text break else: assert False, 'Did not find notification' assert M.Feed.query.find(dict( author_name=committer_name)).count() == 100
def test_make_app_admin_only(): h.set_context('test', 'wiki', neighborhood='Projects') anon = M.User.anonymous() dev = M.User.query.get(username='******') admin = M.User.query.get(username='******') c.project.add_user(dev, ['Developer']) ThreadLocalORMSession.flush_all() Credentials.get().clear() assert has_access(c.app, 'read', user=anon)() assert has_access(c.app, 'read', user=dev)() assert has_access(c.app, 'read', user=admin)() assert not has_access(c.app, 'create', user=anon)() assert has_access(c.app, 'create', user=dev)() assert has_access(c.app, 'create', user=admin)() assert c.app.is_visible_to(anon) assert c.app.is_visible_to(dev) assert c.app.is_visible_to(admin) h.make_app_admin_only(c.app) ThreadLocalORMSession.flush_all() Credentials.get().clear() assert not has_access(c.app, 'read', user=anon)() assert not has_access(c.app, 'read', user=dev)() assert has_access(c.app, 'read', user=admin)() assert not has_access(c.app, 'create', user=anon)() assert not has_access(c.app, 'create', user=dev)() assert has_access(c.app, 'create', user=admin)() assert not c.app.is_visible_to(anon) assert not c.app.is_visible_to(dev) assert c.app.is_visible_to(admin)
def test_post_methods(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') p = t.post('This is a post') p2 = t.post('This is another post') assert p.discussion_class() == M.Discussion assert p.thread_class() == M.Thread assert p.attachment_class() == M.DiscussionAttachment p.commit() assert p.parent is None assert p.subject == 'Test Thread' assert_equals(p.attachments, []) assert 'wiki/_discuss' in p.url() assert p.reply_subject() == 'Re: Test Thread' assert p.link_text() == p.subject ss = p.history().first() assert 'version' in h.get_first(ss.index(), 'title') assert '#' in ss.shorthand_id() jsn = p.__json__() assert jsn["thread_id"] == t._id (p.approve() for p in (p, p2)) ThreadLocalORMSession.flush_all() assert t.num_replies == 2 p.spam() assert t.num_replies == 1 p.undo('ok') assert t.num_replies == 2 p.delete() assert t.num_replies == 1
def test_export_with_attachments(self): project = M.Project.query.get(shortname='test') blog = project.app_instance('blog') with h.push_context('test', 'blog', neighborhood='Projects'): post = BM.BlogPost.new( title='Test title', text='test post', labels=['the firstlabel', 'the second label'], delete=None ) ThreadLocalORMSession.flush_all() test_file1 = FieldStorage() test_file1.name = 'file_info' test_file1.filename = 'test_file' test_file1.file = StringIO('test file1\n') p = post.discussion_thread.add_post(text='test comment') p.add_multiple_attachments(test_file1) ThreadLocalORMSession.flush_all() f = tempfile.TemporaryFile() temp_dir = tempfile.mkdtemp() blog.bulk_export(f, temp_dir, True) f.seek(0) blog = json.loads(f.read()) blog['posts'] = sorted( blog['posts'], key=lambda x: x['title'], reverse=True) file_path = 'blog/{}/{}/{}/test_file'.format( post._id, post.discussion_thread._id, list(post.discussion_thread.post_class().query.find())[0].slug ) assert_equal(blog['posts'][0]['discussion_thread']['posts'][0] ['attachments'][0]['path'], file_path) assert os.path.exists(os.path.join(temp_dir, file_path))
def test_email(self): self._subscribe() # as current user: test-admin user2 = M.User.query.get(username='******') self._subscribe(user=user2) self._post_notification() ThreadLocalORMSession.flush_all() assert_equal(M.Notification.query.get()['from_address'], '"Test Admin" <*****@*****.**>') assert_equal(M.Mailbox.query.find().count(), 2) M.MonQTask.run_ready() # sends the notification out into "mailboxes", and from mailboxes into email tasks mboxes = M.Mailbox.query.find().all() assert_equal(len(mboxes), 2) assert_equal(len(mboxes[0].queue), 1) assert not mboxes[0].queue_empty assert_equal(len(mboxes[1].queue), 1) assert not mboxes[1].queue_empty email_tasks = M.MonQTask.query.find({'state': 'ready'}).all() assert_equal(len(email_tasks), 2) # make sure both subscribers will get an email first_destinations = [e.kwargs['destinations'][0] for e in email_tasks] assert_in(str(c.user._id), first_destinations) assert_in(str(user2._id), first_destinations) assert_equal(email_tasks[0].kwargs['fromaddr'], '"Test Admin" <*****@*****.**>') assert_equal(email_tasks[1].kwargs['fromaddr'], '"Test Admin" <*****@*****.**>') assert_equal(email_tasks[0].kwargs['sender'], '*****@*****.**') assert_equal(email_tasks[1].kwargs['sender'], '*****@*****.**') assert email_tasks[0].kwargs['text'].startswith('Home modified by Test Admin') assert 'you indicated interest in ' in email_tasks[0].kwargs['text']
def test_macro_include_extra_br(): p_nbhd = M.Neighborhood.query.get(name="Projects") p_test = M.Project.query.get(shortname="test", neighborhood_id=p_nbhd._id) wiki = p_test.app_instance("wiki") with h.push_context(p_test._id, app_config_id=wiki.config._id): p = WM.Page.upsert(title="Include_1") p.text = "included page 1" p.commit() p = WM.Page.upsert(title="Include_2") p.text = "included page 2" p.commit() p = WM.Page.upsert(title="Include_3") p.text = "included page 3" p.commit() ThreadLocalORMSession.flush_all() md = "[[include ref=Include_1]]\n[[include ref=Include_2]]\n[[include ref=Include_3]]" html = g.markdown_wiki.convert(md) expected_html = """ <div class="markdown_content"> <p> <div><div class="markdown_content"><p>included page 1</p></div></div> <div><div class="markdown_content"><p>included page 2</p></div></div> <div><div class="markdown_content"><p>included page 3</p></div></div> </p> </div> """.strip().replace( "\n", "" ) assert html.strip().replace("\n", "") == expected_html, html
def setup_with_tools(self): setup_global_objects() _clear_subscriptions() _clear_notifications() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() M.notification.MAILBOX_QUIESCENT=None # disable message combining
def main(): dbs = dict((p.database_uri, p) for p in M.Project.query.find()) for db, p in sorted(dbs.items()): log.info('=== Making attachments in %s polymorphic ===', db) c.project = p log.info('Fixing %d discussions', M.Discussion.query.find().count()) for d in M.Discussion.query.find(): for a in M.DiscussionAttachment.query.find(dict( discussion_id=d._id)): log.info('%s: %s', d.url(), a.filename) log.info('Fixing %d forums', DM.Forum.query.find().count()) for d in DM.Forum.query.find(): for a in DM.ForumAttachment.query.find(dict( discussion_id=d._id)): log.info('%s: %s', d.url(), a.filename) log.info('Fixing %d tickets', TM.Ticket.query.find().count()) for t in TM.Ticket.query.find(): for a in TM.TicketAttachment.query.find(dict( artifact_id=t._id)): log.info('%s: %s', t.url(), a.filename) log.info('Fixing %d wikis', WM.Page.query.find().count()) for p in WM.Page.query.find(): for a in WM.WikiAttachment.query.find(dict( artifact_id=p._id)): log.info('%s: %s', p.url(), a.filename) ThreadLocalORMSession.flush_all()
def test_macro_neighborhood_feeds(): p_nbhd = M.Neighborhood.query.get(name='Projects') p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id) with h.push_context('--init--', 'wiki', neighborhood='Projects'): r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]') assert 'Home modified by' in r, r orig_len = len(r) # Make project private & verify we don't see its new feed items anon = M.User.anonymous() p_test.acl.insert(0, M.ACE.deny( M.ProjectRole.anonymous(p_test)._id, 'read')) ThreadLocalORMSession.flush_all() pg = WM.Page.query.get(title='Home', app_config_id=c.app.config._id) pg.text = 'Change' with h.push_config(c, user=M.User.by_username('test-admin')): pg.commit() r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]') new_len = len(r) assert new_len == orig_len p = BM.BlogPost(title='test me', neighborhood_id=p_test.neighborhood_id) p.text = 'test content' p.state = 'published' p.make_slug() with h.push_config(c, user=M.User.by_username('test-admin')): p.commit() ThreadLocalORMSession.flush_all() with h.push_config(c, user=anon): r = g.markdown_wiki.convert('[[neighborhood_blog_posts]]') assert 'test content' in r
def test_sendsimplemail_with_disabled_user(self): c.user = M.User.by_username('test-admin') with mock.patch.object(mail_tasks.smtp_client, '_client') as _client: mail_tasks.sendsimplemail( fromaddr=str(c.user._id), toaddr='*****@*****.**', text=u'This is a test', reply_to=g.noreply, subject=u'Test subject', message_id=h.gen_message_id()) assert_equal(_client.sendmail.call_count, 1) return_path, rcpts, body = _client.sendmail.call_args[0] body = body.split('\n') assert_in('From: "Test Admin" <*****@*****.**>', body) c.user.disabled = True ThreadLocalORMSession.flush_all() mail_tasks.sendsimplemail( fromaddr=str(c.user._id), toaddr='*****@*****.**', text=u'This is a test', reply_to=g.noreply, subject=u'Test subject', message_id=h.gen_message_id()) assert_equal(_client.sendmail.call_count, 2) return_path, rcpts, body = _client.sendmail.call_args[0] body = body.split('\n') assert_in('From: %s' % g.noreply, body)
def test_permissions(self): # Notification should only be delivered if user has read perms on the # artifact. The perm check happens just before the mail task is # posted. u = M.User.query.get(username='******') self._subscribe(user=u) # Simulate a permission check failure. def patched_has_access(*args, **kw): def predicate(*args, **kw): return False return predicate from allura.model.notification import security orig = security.has_access security.has_access = patched_has_access try: # this will create a notification task self._post_notification() ThreadLocalORMSession.flush_all() # running the notification task will create a mail task if the # permission check passes... M.MonQTask.run_ready() ThreadLocalORMSession.flush_all() # ...but in this case it doesn't create a mail task since we # forced the perm check to fail assert M.MonQTask.get() == None finally: security.has_access = orig
def test_hideawards_macro(): p_nbhd = M.Neighborhood.query.get(name='Projects') app_config_id = ObjectId() award = M.Award(app_config_id=app_config_id) award.short = u'Award short' award.full = u'Award full' award.created_by_neighborhood_id = p_nbhd._id project = M.Project.query.get( neighborhood_id=p_nbhd._id, shortname=u'test') M.AwardGrant( award=award, award_url='http://award.org', comment='Winner!', granted_by_neighborhood=p_nbhd, granted_to_project=project) ThreadLocalORMSession.flush_all() with h.push_context(p_nbhd.neighborhood_project._id): r = g.markdown_wiki.convert('[[projects]]') assert_in('<div class="feature"> <a href="http://award.org" rel="nofollow" title="Winner!">' 'Award short</a> </div>', squish_spaces(r)) r = g.markdown_wiki.convert('[[projects show_awards_banner=False]]') assert_not_in('Award short', r)
def test_direct_sub(self): self._subscribe() self._post_notification(text='A') self._post_notification(text='B') ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() M.Mailbox.fire_ready()
def test_macro_include_no_extra_br(): p_nbhd = M.Neighborhood.query.get(name='Projects') p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id) wiki = p_test.app_instance('wiki') with h.push_context(p_test._id, app_config_id=wiki.config._id): p = WM.Page.upsert(title='Include_1') p.text = 'included page 1' p.commit() p = WM.Page.upsert(title='Include_2') p.text = 'included page 2' p.commit() p = WM.Page.upsert(title='Include_3') p.text = 'included page 3' p.commit() ThreadLocalORMSession.flush_all() md = '[[include ref=Include_1]]\n[[include ref=Include_2]]\n[[include ref=Include_3]]' html = g.markdown_wiki.convert(md) expected_html = '''<div class="markdown_content"><p></p><div> <div class="markdown_content"><p>included page 1</p></div> </div> <div> <div class="markdown_content"><p>included page 2</p></div> </div> <div> <div class="markdown_content"><p>included page 3</p></div> </div> <p></p></div>''' assert_equal(squish_spaces(html), squish_spaces(expected_html))
def test_macro_include_permissions(): p_nbhd = M.Neighborhood.query.get(name='Projects') p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id) wiki = p_test.app_instance('wiki') wiki2 = p_test.app_instance('wiki2') with h.push_context(p_test._id, app_config_id=wiki.config._id): p = WM.Page.upsert(title='CanRead') p.text = 'Can see this!' p.commit() ThreadLocalORMSession.flush_all() with h.push_context(p_test._id, app_config_id=wiki2.config._id): role = M.ProjectRole.by_name('*anonymous')._id read_perm = M.ACE.allow(role, 'read') acl = c.app.config.acl if read_perm in acl: acl.remove(read_perm) p = WM.Page.upsert(title='CanNotRead') p.text = 'Can not see this!' p.commit() ThreadLocalORMSession.flush_all() with h.push_context(p_test._id, app_config_id=wiki.config._id): c.user = M.User.anonymous() md = '[[include ref=CanRead]]\n[[include ref=wiki2:CanNotRead]]' html = g.markdown_wiki.convert(md) assert_in('Can see this!', html) assert_not_in('Can not see this!', html) assert_in("[[include: you don't have a read permission for wiki2:CanNotRead]]", html)
def test_paged_diffs_with_detect_copies(self): # setup h.set_context('test', 'src-weird', neighborhood='Projects') repo_dir = pkg_resources.resource_filename( 'forgegit', 'tests/data') repo = GM.Repository( name='weird-chars.git', fs_path=repo_dir, url_path='/src-weird/', tool='git', status='creating') repo.refresh() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() diffs = repo.paged_diffs('346c52c1dddc729e2c2711f809336401f0ff925e') # Test copy expected = { 'added': [], 'removed': [], 'copied': [{'new': u'README.copy', 'old': u'README', 'ratio': 1.0}], 'renamed': [], 'changed': [u'README'], 'total': 2, } assert_equals(diffs, expected) diffs = repo.paged_diffs('3cb2bbcd7997f89060a14fe8b1a363f01883087f') # Test rename expected = { 'added': [], 'removed': [], 'copied': [], 'renamed': [{'new': u'README', 'old': u'README-copy.md', 'ratio': 1.0}], 'changed': [], 'total': 1, } assert_equals(diffs, expected)
def test_user_search_for_disabled_user(self): user = M.User.by_username('test-admin') user.disabled = True ThreadLocalORMSession.flush_all() r = self.app.get('/p/test/user_search?term=test', status=200) j = json.loads(r.body) assert j == {'users': []}
def fork(self, project_id=None, mount_point=None, mount_label=None): # this shows the form and handles the submission security.require_authenticated() if not c.app.forkable: raise exc.HTTPNotFound from_repo = c.app.repo ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() from_project = c.project to_project = M.Project.query.get(_id=ObjectId(project_id)) mount_label = mount_label or '%s - %s' % (c.project.name, c.app.config.options.mount_label) mount_point = (mount_point or from_project.shortname) if request.method != 'POST' or not mount_point: return dict(from_repo=from_repo, user_project=c.user.private_project(), mount_point=mount_point, mount_label=mount_label) else: with h.push_config(c, project=to_project): if not to_project.database_configured: to_project.configure_project(is_user_project=True) security.require(security.has_access(to_project, 'admin')) try: to_project.install_app( ep_name=from_repo.tool_name, mount_point=mount_point, mount_label=mount_label, cloned_from_project_id=from_project._id, cloned_from_repo_id=from_repo._id) redirect(to_project.url()+mount_point+'/') except exc.HTTPRedirection: raise except Exception, ex: flash(str(ex), 'error') redirect(request.referer)
def notice(self, out, message): self.say('NOTICE %s :%s' % (out, message)) CM.ChatMessage( sender=self.nick, channel=out, text=message) ThreadLocalORMSession.flush_all()
def fork(self, project_id=None, mount_point=None, mount_label=None, **kw): # this shows the form and handles the submission require_authenticated() if not c.app.forkable: raise exc.HTTPNotFound from_repo = c.app.repo ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() from_project = c.project to_project = M.Project.query.get(_id=ObjectId(project_id)) mount_label = mount_label or '%s - %s' % ( c.project.name, c.app.config.options.mount_label) mount_point = (mount_point or from_project.shortname) if request.method != 'POST' or not mount_point: return dict(from_repo=from_repo, user_project=c.user.private_project(), mount_point=mount_point, mount_label=mount_label) else: with h.push_config(c, project=to_project): if not to_project.database_configured: to_project.configure_project(is_user_project=True) require_access(to_project, 'admin') try: to_project.install_app( ep_name=from_repo.tool_name, mount_point=mount_point, mount_label=mount_label, cloned_from_project_id=from_project._id, cloned_from_repo_id=from_repo._id) redirect(to_project.url() + mount_point + '/') except exc.HTTPRedirection: raise except Exception, ex: flash(str(ex), 'error') redirect(request.referer)
def test_email(self): self._subscribe() # as current user: test-admin user2 = M.User.query.get(username='******') self._subscribe(user=user2) self._post_notification() ThreadLocalORMSession.flush_all() assert_equal(M.Notification.query.get()['from_address'], '"Test Admin" <*****@*****.**>') assert_equal(M.Mailbox.query.find().count(), 2) M.MonQTask.run_ready( ) # sends the notification out into "mailboxes", and from mailboxes into email tasks mboxes = M.Mailbox.query.find().all() assert_equal(len(mboxes), 2) assert_equal(len(mboxes[0].queue), 1) assert not mboxes[0].queue_empty assert_equal(len(mboxes[1].queue), 1) assert not mboxes[1].queue_empty email_tasks = M.MonQTask.query.find({'state': 'ready'}).all() assert_equal(len(email_tasks), 2) # make sure both subscribers will get an email first_destinations = [e.kwargs['destinations'][0] for e in email_tasks] assert_in(str(c.user._id), first_destinations) assert_in(str(user2._id), first_destinations) assert_equal(email_tasks[0].kwargs['fromaddr'], '"Test Admin" <*****@*****.**>') assert_equal(email_tasks[1].kwargs['fromaddr'], '"Test Admin" <*****@*****.**>') assert_equal(email_tasks[0].kwargs['sender'], '*****@*****.**') assert_equal(email_tasks[1].kwargs['sender'], '*****@*****.**') assert email_tasks[0].kwargs['text'].startswith( 'Home modified by Test Admin') assert 'you indicated interest in ' in email_tasks[0].kwargs['text']
def test_discussion_methods(): d = M.Discussion(shortname='test', name='test') assert d.thread_class() == M.Thread assert d.post_class() == M.Post assert d.attachment_class() == M.DiscussionAttachment ThreadLocalORMSession.flush_all() d.update_stats() ThreadLocalORMSession.flush_all() assert d.last_post == None assert d.url().endswith('wiki/_discuss/') assert d.index()['name_s'] == 'test' assert d.find_posts().count() == 0 jsn = d.__json__() assert jsn['name'] == d.name d.delete() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all()
def refresh_repo_lcds(cls, commit_ids, options): tree_cache = {} timings = [] model_cache = M.repository.ModelCache( max_instances={M.repository.LastCommit: 4000}, max_queries={M.repository.LastCommit: 4000}, ) lcid_cache = {} timings = [] print 'Processing last commits' for i, commit_id in enumerate(commit_ids): commit = M.repository.Commit.query.get(_id=commit_id) if commit is None: print "Commit missing, skipping: %s" % commit_id continue commit.set_context(c.app.repo) with time(timings): M.repo_refresh.compute_lcds(commit, model_cache, lcid_cache) ThreadLocalORMSession.flush_all() if i % 100 == 0: cls._print_stats(i, timings, 100) if options.limit and i >= options.limit: break ThreadLocalORMSession.flush_all()
def setup_with_tools(self): setup_global_objects() repo_dir = pkg_resources.resource_filename('forgesvn', 'tests/data/') with h.push_context('test', 'src', neighborhood='Projects'): c.app.repo.name = 'testsvn' c.app.repo.fs_path = repo_dir self.repo = c.app.repo self.repo.refresh() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() with h.push_context('test', 'svn-tags', neighborhood='Projects'): c.app.repo.name = 'testsvn-trunk-tags-branches' c.app.repo.fs_path = repo_dir self.svn_tags = c.app.repo self.svn_tags.refresh() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() h.set_context('test', 'src', neighborhood='Projects')
def test_thread_methods(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test Thread') assert t.discussion_class() == M.Discussion assert t.post_class() == M.Post assert t.attachment_class() == M.DiscussionAttachment p0 = t.post('This is a post') p1 = t.post('This is another post') time.sleep(0.25) t.post('This is a reply', parent_id=p0._id) ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() d = M.Discussion.query.get(shortname='test') t = d.threads[0] assert d.last_post is not None assert t.last_post is not None t.create_post_threads(t.posts) posts0 = t.find_posts(page=0, limit=10, style='threaded') posts1 = t.find_posts(page=0, limit=10, style='timestamp') assert posts0 != posts1 ts = p0.timestamp.replace( microsecond=int(p0.timestamp.microsecond // 1000) * 1000) posts2 = t.find_posts(page=0, limit=10, style='threaded', timestamp=ts) assert len(posts2) > 0 assert 'wiki/_discuss/' in t.url() assert t.index()['views_i'] == 0 assert not t.subscription t.subscription = True assert t.subscription t.subscription = False assert not t.subscription assert t.post_count == 3 jsn = t.__json__() assert '_id' in jsn assert_equals(len(jsn['posts']), 3) (p.approve() for p in (p0, p1)) ThreadLocalORMSession.flush_all() assert t.num_replies == 3 t.spam() assert t.num_replies == 0 ThreadLocalORMSession.flush_all() assert len(t.find_posts()) == 0 t.delete()
def setup_with_tools(self): setup_global_objects() g.set_app('wiki') _clear_subscriptions() _clear_notifications() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() self.pg = WM.Page.query.get(app_config_id=c.app.config._id) M.notification.MAILBOX_QUIESCENT = None # disable message combining while M.MonQTask.run_ready('setup'): ThreadLocalORMSession.flush_all()
def test_subscribe_unsubscribe(self): M.Mailbox.subscribe(type='direct') ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() subscriptions = M.Mailbox.query.find( dict(project_id=c.project._id, app_config_id=c.app.config._id, user_id=c.user._id)).all() assert len(subscriptions) == 1 assert subscriptions[0].type == 'direct' assert M.Mailbox.query.find().count() == 1 M.Mailbox.unsubscribe() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() subscriptions = M.Mailbox.query.find( dict(project_id=c.project._id, app_config_id=c.app.config._id, user_id=c.user._id)).all() assert len(subscriptions) == 0 assert M.Mailbox.query.find().count() == 0
def _test_message(self): self._subscribe() thd = M.Thread.query.get(ref_id=self.pg.index_id()) thd.post('This is a very cool message') M.MonQTask.run_ready() ThreadLocalORMSession.flush_all() M.Mailbox.fire_ready() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() msg = M.MonQTask.query.get( task_name='allura.tasks.mail_tasks.sendmail', state='ready') assert msg is not None assert '[email protected]' in msg.kwargs['reply_to'] u = M.User.by_username('test-admin') assert str(u._id) in msg.kwargs['fromaddr'], msg.kwargs['fromaddr']
def handle_command(self, sender, cmd, rest): if cmd == 'NOTICE': pass elif cmd == '433': self.set_nick() self.channels = {} self.configure() elif cmd == 'PING': self.say('PONG ' + rest) elif cmd in ('NOTICE', 'PRIVMSG'): rcpt, msg = rest.split(' ', 1) if not self.set_context(rcpt): return if msg.startswith(':'): msg = msg[1:] self.log_channel(sender, cmd, rcpt, msg) if cmd == 'NOTICE': return for lnk in search.find_shortlinks(msg): self.handle_shortlink(lnk, sender, rcpt) ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() self.check_configure() ThreadLocalORMSession.close_all()
def execute(cls, options): # This script will indirectly call app.sidebar_menu() for every app in # every project. Some of the sidebar_menu methods expect the # pylons.request threadlocal object to be present. So, we're faking it. # # The fact that this isn't a 'real' request doesn't matter for the # purposes of the sitemap. pylons.request._push_object(webob.Request.blank('/')) output_path = options.output_dir if os.path.exists(output_path): raise Exception('%s directory already exists.' % output_path) os.mkdir(output_path) now = datetime.utcnow().date() sitemap_content_template = Template(SITEMAP_TEMPLATE) def write_sitemap(urls, file_no): sitemap_content = sitemap_content_template.render( dict(now=now, locs=urls)) with open(os.path.join(output_path, 'sitemap-%d.xml' % file_no), 'w') as f: f.write(sitemap_content) creds = security.Credentials.get() locs = [] file_count = 0 nbhd_id = [] if options.neighborhood: prefix = ['/%s/' % n for n in options.neighborhood] nbhd_id = [ nbhd._id for nbhd in M.Neighborhood.query.find( {'url_prefix': { '$in': prefix }}) ] # write sitemap files, MAX_SITEMAP_URLS per file for chunk in utils.chunked_find(M.Project, { 'deleted': False, 'neighborhood_id': { '$nin': nbhd_id } }): for p in chunk: c.project = p try: for s in p.sitemap(excluded_tools=['git', 'hg', 'svn']): url = config['base_url'] + s.url if s.url[ 0] == '/' else s.url locs.append({ 'url': url, 'date': p.last_updated.strftime("%Y-%m-%d") }) except Exception, e: print "Error creating sitemap for project '%s': %s" %\ (p.shortname, e) creds.clear() if len(locs) >= options.urls_per_file: write_sitemap(locs[:options.urls_per_file], file_count) del locs[:options.urls_per_file] file_count += 1 M.main_orm_session.clear() ThreadLocalORMSession.close_all()
def import_tool(self, project, user, mount_point=None, mount_label=None, **kw): import_id_converter = ImportIdConverter.get() tracker_json = self._load_json(project) tracker_json['tracker_config']['options'].pop('ordinal', None) tracker_json['tracker_config']['options'].pop('mount_point', None) tracker_json['tracker_config']['options'].pop('mount_label', None) tracker_json['tracker_config']['options'].pop('import_id', None) app = project.install_app( 'tickets', mount_point, mount_label, import_id={ 'source': self.source, 'app_config_id': tracker_json['tracker_config']['_id'], }, open_status_names=tracker_json['open_status_names'], closed_status_names=tracker_json['closed_status_names'], **tracker_json['tracker_config']['options']) ThreadLocalORMSession.flush_all() try: M.session.artifact_orm_session._get().skip_mod_date = True for ticket_json in tracker_json['tickets']: reporter = self.get_user(ticket_json['reported_by']) owner = self.get_user(ticket_json['assigned_to']) with h.push_config(c, user=reporter, app=app): self.max_ticket_num = max(ticket_json['ticket_num'], self.max_ticket_num) ticket = TM.Ticket( app_config_id=app.config._id, import_id=import_id_converter.expand( ticket_json['ticket_num'], app), description=self.annotate(self.annotate( ticket_json['description'], owner, ticket_json['assigned_to'], label=' owned'), reporter, ticket_json['reported_by'], label=' created'), created_date=dateutil.parser.parse( ticket_json['created_date']), mod_date=dateutil.parser.parse( ticket_json['mod_date']), ticket_num=ticket_json['ticket_num'], summary=ticket_json['summary'], custom_fields=ticket_json['custom_fields'], status=ticket_json['status'], labels=ticket_json['labels'], votes_down=ticket_json['votes_down'], votes_up=ticket_json['votes_up'], votes=ticket_json['votes_up'] - ticket_json['votes_down'], assigned_to_id=owner._id, ) # add an attachment to the ticket ticket.add_multiple_attachments( [File(a['url']) for a in ticket_json['attachments']]) # trigger the private property ticket.private = ticket_json['private'] self.process_comments( ticket, ticket_json['discussion_thread']['posts']) session(ticket).flush(ticket) session(ticket).expunge(ticket) app.globals.custom_fields = tracker_json['custom_fields'] self.process_bins(app, tracker_json['saved_bins']) app.globals.last_ticket_num = self.max_ticket_num M.AuditLog.log( 'import tool %s from exported Allura JSON' % (app.config.options.mount_point, ), project=project, user=user, url=app.url, ) g.post_event('project_updated') app.globals.invalidate_bin_counts() ThreadLocalORMSession.flush_all() return app except Exception: h.make_app_admin_only(app) raise finally: M.session.artifact_orm_session._get().skip_mod_date = False
def new_fn(*args, **kwargs): result = fn(*args, **kwargs) ThreadLocalORMSession.flush_all() return result
def test_macro_projects(): file_name = 'neo-icon-set-454545-256x350.png' file_path = os.path.join(allura.__path__[0], 'nf', 'allura', 'images', file_name) p_nbhd = M.Neighborhood.query.get(name='Projects') p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id) c.project = p_test icon_file = open(file_path) M.ProjectFile.save_image(file_name, icon_file, content_type='image/png', square=True, thumbnail_size=(48, 48), thumbnail_meta=dict(project_id=c.project._id, category='icon')) icon_file.close() p_test2 = M.Project.query.get(shortname='test2', neighborhood_id=p_nbhd._id) c.project = p_test2 icon_file = open(file_path) M.ProjectFile.save_image(file_name, icon_file, content_type='image/png', square=True, thumbnail_size=(48, 48), thumbnail_meta=dict(project_id=c.project._id, category='icon')) icon_file.close() p_sub1 = M.Project.query.get(shortname='test/sub1', neighborhood_id=p_nbhd._id) c.project = p_sub1 icon_file = open(file_path) M.ProjectFile.save_image(file_name, icon_file, content_type='image/png', square=True, thumbnail_size=(48, 48), thumbnail_meta=dict(project_id=c.project._id, category='icon')) icon_file.close() p_test.labels = ['test', 'root'] p_sub1.labels = ['test', 'sub1'] # Make one project private p_test.private = False p_sub1.private = False p_test2.private = True ThreadLocalORMSession.flush_all() with h.push_config(c, project=p_nbhd.neighborhood_project, user=M.User.by_username('test-admin')): r = g.markdown_wiki.convert('[[projects]]') assert 'alt="Test Project Logo"' in r, r assert 'alt="A Subproject Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=root]]') assert 'alt="Test Project Logo"' in r, r assert 'alt="A Subproject Logo"' not in r, r r = g.markdown_wiki.convert('[[projects labels=sub1]]') assert 'alt="Test Project Logo"' not in r, r assert 'alt="A Subproject Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=test]]') assert 'alt="Test Project Logo"' in r, r assert 'alt="A Subproject Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=test,root]]') assert 'alt="Test Project Logo"' in r, r assert 'alt="A Subproject Logo"' not in r, r r = g.markdown_wiki.convert('[[projects labels=test,sub1]]') assert 'alt="Test Project Logo"' not in r, r assert 'alt="A Subproject Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=root|sub1]]') assert 'alt="Test Project Logo"' in r, r assert 'alt="A Subproject Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=test,root|root,sub1]]') assert 'alt="Test Project Logo"' in r, r assert 'alt="A Subproject Logo"' not in r, r r = g.markdown_wiki.convert('[[projects labels=test,root|test,sub1]]') assert 'alt="Test Project Logo"' in r, r assert 'alt="A Subproject Logo"' in r, r r = g.markdown_wiki.convert('[[projects show_total=True sort=random]]') assert '<p class="macro_projects_total">3 Projects' in r, r r = g.markdown_wiki.convert( '[[projects show_total=True private=True sort=random]]') assert '<p class="macro_projects_total">1 Projects' in r, r assert 'alt="Test 2 Logo"' in r, r assert 'alt="Test Project Logo"' not in r, r assert 'alt="A Subproject Logo"' not in r, r r = g.markdown_wiki.convert('[[projects show_proj_icon=True]]') assert 'alt="Test Project Logo"' in r r = g.markdown_wiki.convert('[[projects show_proj_icon=False]]') assert 'alt="Test Project Logo"' not in r
project.tool_data.update(p.tool_data) for a in p.awards: M.AwardGrant(app_config_id=bson.ObjectId(), award_id=a._id, granted_to_project_id=project._id, granted_by_neighborhood_id=nbhd._id) if p.icon: with open(p.icon) as icon_file: project.save_icon(p.icon, icon_file) project.notifications_disabled = False with h.push_config(c, project=project, user=p.admin): ThreadLocalORMSession.flush_all() g.post_event('project_updated') session(project).clear() return 0 def create_projects(projects, nbhd, options): for p in projects: r = create_project(Object(p), nbhd, options) if r != 0: sys.exit(r) def main(options): log.addHandler(logging.StreamHandler(sys.stdout)) log.setLevel(getattr(logging, options.log_level.upper()))
def setUp(self): setup_basic_test() setup_global_objects() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() self.prefix = tg.config.get('scm.repos.root', '/')
def setup_with_tools(self): setup_global_objects() h.set_context('test', 'src-git', neighborhood='Projects') ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all()
def _subscribe(self, type='direct', topic=None): self.pg.subscribe(type=type, topic=topic) ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all()
def moderate_post(self, **kwargs): self.controller.save_moderation(post=[dict(checked=True, _id=self.get_post()._id)], **kwargs) ThreadLocalORMSession.flush_all()
def execute(cls, options): q_project = {} if options.nbhd: nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd) if not nbhd: return "Invalid neighborhood url prefix." q_project['neighborhood_id'] = nbhd._id if options.project: q_project['shortname'] = options.project elif options.project_regex: q_project['shortname'] = {'$regex': options.project_regex} log.info('Refreshing repositories') for chunk in chunked_find(M.Project, q_project): for p in chunk: log.info("Refreshing repos for project '%s'." % p.shortname) if options.dry_run: continue c.project = p if options.mount_point: mount_points = [options.mount_point] else: mount_points = [ac.options.mount_point for ac in M.AppConfig.query.find(dict(project_id=p._id))] for app in (p.app_instance(mp) for mp in mount_points): c.app = app if not hasattr(app, 'repo'): continue if c.app.repo.tool.lower() not in options.repo_types: log.info("Skipping %r: wrong type (%s)", c.app.repo, c.app.repo.tool.lower()) continue ci_ids = [] if options.clean: ci_ids = list(c.app.repo.all_commit_ids()) elif options.clean_after: for ci in M.repository.CommitDoc.m.find({'repo_ids': c.app.repo._id, 'committed.date': {'$gt': options.clean_after}}): ci_ids.append(ci._id) if ci_ids: log.info("Deleting mongo data for %i commits...", len(ci_ids)) # delete these in chunks, otherwise the query doc can # exceed the max BSON size limit (16MB at the moment) for ci_ids_chunk in chunked_list(ci_ids, 3000): i = M.repository.CommitDoc.m.find( {"_id": {"$in": ci_ids_chunk}}).count() if i: log.info("Deleting %i CommitDoc docs...", i) M.repository.CommitDoc.m.remove( {"_id": {"$in": ci_ids_chunk}}) # we used to have a TreesDoc (plural) collection to provide a mapping of commit_id to tree_id # so that we could clear the relevant TreeDoc records # its ok though, since they are created in refresh_tree_info() and overwrite existing records for ci_ids_chunk in chunked_list(ci_ids, 3000): # delete LastCommitDocs i = M.repository.LastCommitDoc.m.find( dict(commit_id={'$in': ci_ids_chunk})).count() if i: log.info( "Deleting %i LastCommitDoc docs...", i) M.repository.LastCommitDoc.m.remove( dict(commit_id={'$in': ci_ids_chunk})) del ci_ids try: if options.all: log.info('Refreshing ALL commits in %r', c.app.repo) else: log.info('Refreshing NEW commits in %r', c.app.repo) if options.profile: import cProfile cProfile.runctx( 'c.app.repo.refresh(options.all, notify=options.notify, ' ' commits_are_new=options.commits_are_new)', globals(), locals(), 'refresh.profile') else: c.app.repo.refresh( options.all, notify=options.notify, commits_are_new=options.commits_are_new) except Exception: log.exception('Error refreshing %r', c.app.repo) ThreadLocalORMSession.flush_all()
def _clear_subscriptions(self): M.Mailbox.query.remove({}) ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all()
def _subscribe(self, **kw): self.pg.subscribe(type='direct', **kw) ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all()
def test_post_notification(self): self._post_notification() ThreadLocalORMSession.flush_all() M.MonQTask.list() t = M.MonQTask.get() assert t.args[1] == self.pg.index_id()
def setUp(self): setup_basic_test() setup_global_objects() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all()
def test_paged_diffs(self): # setup h.set_context('test', 'src-weird', neighborhood='Projects') repo_dir = pkg_resources.resource_filename( 'forgegit', 'tests/data') repo = GM.Repository( name='weird-chars.git', fs_path=repo_dir, url_path='/src-weird/', tool='git', status='creating') repo.refresh() ThreadLocalORMSession.flush_all() ThreadLocalORMSession.close_all() # spaces and unicode filenames diffs = repo.paged_diffs('407950e8fba4dbc108ffbce0128ed1085c52cfd7') expected = { 'removed': [], 'changed': [], 'renamed': [], 'added': ['with space.txt', 'привіт.txt'], 'copied': [], 'total': 2, } assert_equals(diffs, expected) diffs = repo.paged_diffs('f3de6a0e7601cdde326054a1cc708afdc1dbe70b') expected = { 'added': [], 'removed': [], 'copied': [], 'renamed': [], 'changed': ['привіт.txt'], 'total': 1, } assert_equals(diffs, expected) # initial commit is special, but must work too diffs = repo.paged_diffs('afaa6d93eb5661fb04f8e10e9ba1039b7441a6c7') expected = { 'added': ['README.md'], 'removed': [], 'changed': [], 'copied': [], 'renamed': [], 'total': 1, } assert_equals(diffs, expected) # pagination diffs = repo.paged_diffs('407950e8fba4dbc108ffbce0128ed1085c52cfd7', start=0, end=1) expected = { 'added': ['with space.txt'], 'removed': [], 'copied': [], 'renamed': [], 'changed': [], 'total': 2, # there are two total changes but result is limited to first } assert_equals(diffs, expected) diffs = repo.paged_diffs('407950e8fba4dbc108ffbce0128ed1085c52cfd7', start=1, end=2) expected = { 'added': ['привіт.txt'], 'removed': [], 'copied': [], 'renamed': [], 'changed': [], 'total': 2, # there are two total changes but result is limited to second } assert_equals(diffs, expected) diffs = repo.paged_diffs('346c52c1dddc729e2c2711f809336401f0ff925e') # Test copy expected = { 'added': ['README.copy'], 'removed': [], 'copied': [], 'renamed': [], 'changed': ['README'], 'total': 2, } assert_equals(diffs, expected) diffs = repo.paged_diffs('3cb2bbcd7997f89060a14fe8b1a363f01883087f') # Test rename expected = { 'added': ['README'], 'removed': ['README-copy.md'], 'copied': [], 'renamed': [], 'changed': [], 'total': 2, } assert_equals(diffs, expected) diffs = repo.paged_diffs('616d24f8dd4e95cadd8e93df5061f09855d1a066') # Test type change expected = { 'added': [], 'removed': [], 'copied': [], 'renamed': [], 'changed': ['README.copy'], 'total': 1, } assert_equals(diffs, expected)
c.project = p try: locs += [ BASE_URL + s.url if s.url[0] == '/' else s.url for s in p.sitemap(excluded_tools=['git', 'hg', 'svn']) ] except Exception, e: print "Error creating sitemap for project '%s': %s" %\ (p.shortname, e) creds.clear() if len(locs) >= options.urls_per_file: write_sitemap(locs[:options.urls_per_file], file_count) del locs[:options.urls_per_file] file_count += 1 M.main_orm_session.clear() ThreadLocalORMSession.close_all() while locs: write_sitemap(locs[:options.urls_per_file], file_count) del locs[:options.urls_per_file] file_count += 1 # write sitemap index file if file_count: sitemap_index_vars = dict(now=now, sitemaps=[ '%s/allura_sitemap/sitemap-%d.xml' % (BASE_URL, n) for n in range(file_count) ]) sitemap_index_content = Template(INDEX_TEMPLATE).render( sitemap_index_vars) with open(os.path.join(output_path, 'sitemap.xml'), 'w') as f: f.write(sitemap_index_content)
def tearDown(): ThreadLocalORMSession.close_all()
def notice(self, out, message): self.say('NOTICE %s :%s' % (out, message)) CM.ChatMessage(sender=self.nick, channel=out, text=message) ThreadLocalORMSession.flush_all()
def setUp(self): super(_TestWithRepoAndCommit, self).setUp() self.ci, isnew = self._make_commit('foo') ThreadLocalORMSession.flush_all()
def test_macros(): file_name = 'neo-icon-set-454545-256x350.png' file_path = os.path.join(allura.__path__[0], 'nf', 'allura', 'images', file_name) curr_project = c.project p_nbhd = M.Neighborhood.query.get(name='Projects') p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id) c.project = p_test icon_file = open(file_path) M.ProjectFile.save_image(file_name, icon_file, content_type='image/png', square=True, thumbnail_size=(48, 48), thumbnail_meta=dict(project_id=c.project._id, category='icon')) icon_file.close() p_test2 = M.Project.query.get(shortname='test2', neighborhood_id=p_nbhd._id) c.project = p_test2 icon_file = open(file_path) M.ProjectFile.save_image(file_name, icon_file, content_type='image/png', square=True, thumbnail_size=(48, 48), thumbnail_meta=dict(project_id=c.project._id, category='icon')) icon_file.close() p_sub1 = M.Project.query.get(shortname='test/sub1', neighborhood_id=p_nbhd._id) c.project = p_sub1 icon_file = open(file_path) M.ProjectFile.save_image(file_name, icon_file, content_type='image/png', square=True, thumbnail_size=(48, 48), thumbnail_meta=dict(project_id=c.project._id, category='icon')) icon_file.close() p_test.labels = ['test', 'root'] p_sub1.labels = ['test', 'sub1'] # Make one project private p_test.private = False p_sub1.private = False p_test2.private = True ThreadLocalORMSession.flush_all() with h.push_context(p_nbhd.neighborhood_project._id): r = g.markdown_wiki.convert('[[projects]]') assert '<img alt="test Logo"' in r, r assert '<img alt="sub1 Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=root]]') assert '<img alt="test Logo"' in r, r assert '<img alt="sub1 Logo"' not in r, r r = g.markdown_wiki.convert('[[projects labels=sub1]]') assert '<img alt="test Logo"' not in r, r assert '<img alt="sub1 Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=test]]') assert '<img alt="test Logo"' in r, r assert '<img alt="sub1 Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=test,root]]') assert '<img alt="test Logo"' in r, r assert '<img alt="sub1 Logo"' not in r, r r = g.markdown_wiki.convert('[[projects labels=test,sub1]]') assert '<img alt="test Logo"' not in r, r assert '<img alt="sub1 Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=root|sub1]]') assert '<img alt="test Logo"' in r, r assert '<img alt="sub1 Logo"' in r, r r = g.markdown_wiki.convert('[[projects labels=test,root|root,sub1]]') assert '<img alt="test Logo"' in r, r assert '<img alt="sub1 Logo"' not in r, r r = g.markdown_wiki.convert('[[projects labels=test,root|test,sub1]]') assert '<img alt="test Logo"' in r, r assert '<img alt="sub1 Logo"' in r, r r = g.markdown_wiki.convert('[[projects show_total=True sort=random]]') assert '<p class="macro_projects_total">3 Projects</p>' in r, r r = g.markdown_wiki.convert( '[[projects show_total=True private=True sort=random]]') assert '<p class="macro_projects_total">1 Projects</p>' in r, r assert '<img alt="test2 Logo"' in r, r assert '<img alt="test Logo"' not in r, r assert '<img alt="sub1 Logo"' not in r, r r = g.markdown_wiki.convert('[[projects show_proj_icon=True]]') assert '<img alt="test Logo"' in r r = g.markdown_wiki.convert('[[projects show_proj_icon=False]]') assert '<img alt="test Logo"' not in r c.project = curr_project r = g.markdown_wiki.convert('[[download_button]]') assert_equal( r, '<div class="markdown_content"><p><span class="download-button-%s" style="margin-bottom: 1em; display: block;"></span></p>\n</div>' % p_test._id) h.set_context('--init--', 'wiki', neighborhood='Projects') r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]') assert 'Home modified by' in r, r orig_len = len(r) # Make project private & verify we don't see its new feed items proj = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id) c.user = M.User.anonymous() proj.acl.insert(0, M.ACE.deny(c.user.project_role(proj)._id, 'read')) ThreadLocalORMSession.flush_all() pg = WM.Page.query.get(title='Home', app_config_id=c.app.config._id) pg.text = 'Change' pg.commit() r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]') new_len = len(r) assert new_len == orig_len p = BM.BlogPost(title='test me', neighborhood_id=p_test.neighborhood_id) p.text = 'test content' p.state = 'published' p.make_slug() p.commit() ThreadLocalORMSession.flush_all() r = g.markdown_wiki.convert('[[neighborhood_blog_posts]]') assert 'test content' in r