Beispiel #1
0
    def test_ipy_script_file_attribute(self):
        """Test that `__file__` is set when running `ipython file.ipy`"""
        src = "print(__file__)\n"
        self.mktmp(src, ext='.ipy')

        if dec.module_not_available('sqlite3'):
            err = 'WARNING: IPython History requires SQLite, your history will not be saved\n'
        else:
            err = None
        tt.ipexec_validate(self.fname, self.fname, err)
Beispiel #2
0
    def test_ipy_script_file_attribute(self):
        """Test that `__file__` is set when running `ipython file.ipy`"""
        src = "print(__file__)\n"
        self.mktmp(src, ext='.ipy')

        if dec.module_not_available('sqlite3'):
            err = 'WARNING: IPython History requires SQLite, your history will not be saved\n'
        else:
            err = None
        tt.ipexec_validate(self.fname, self.fname, err)
Beispiel #3
0
 def test_obj_del(self):
     """Test that object's __del__ methods are called on exit."""
     if sys.platform == "win32":
         try:
             import win32api
         except ImportError:
             raise SkipTest("Test requires pywin32")
     src = "class A(object):\n" "    def __del__(self):\n" "        print 'object A deleted'\n" "a = A()\n"
     self.mktmp(py3compat.doctest_refactor_print(src))
     if dec.module_not_available("sqlite3"):
         err = "WARNING: IPython History requires SQLite, your history will not be saved\n"
     else:
         err = None
     tt.ipexec_validate(self.fname, "object A deleted", err)
Beispiel #4
0
 def test_obj_del(self):
     """Test that object's __del__ methods are called on exit."""
     if sys.platform == 'win32':
         try:
             import win32api
         except ImportError:
             raise SkipTest("Test requires pywin32")
     src = ("class A(object):\n"
            "    def __del__(self):\n"
            "        print 'object A deleted'\n"
            "a = A()\n")
     self.mktmp(py3compat.doctest_refactor_print(src))
     if dec.module_not_available('sqlite3'):
         err = 'WARNING: IPython History requires SQLite, your history will not be saved\n'
     else:
         err = None
     tt.ipexec_validate(self.fname, 'object A deleted', err)
Beispiel #5
0
 def test_obj_del(self):
     """Test that object's __del__ methods are called on exit."""
     if sys.platform == "win32":
         try:
             import win32api
         except ImportError:
             raise SkipTest("Test requires pywin32")
     src = ("class A(object):\n"
            "    def __del__(self):\n"
            "        print('object A deleted')\n"
            "a = A()\n")
     self.mktmp(src)
     if dec.module_not_available("sqlite3"):
         err = "WARNING: IPython History requires SQLite, your history will not be saved\n"
     else:
         err = None
     tt.ipexec_validate(self.fname, "object A deleted", err)
 def test_obj_del(self):
     """Test that object's __del__ methods are called on exit."""
     if sys.platform == 'win32':
         try:
             import win32api
         except ImportError:
             raise SkipTest("Test requires pywin32")
     src = ("class A(object):\n"
            "    def __del__(self):\n"
            "        print('object A deleted')\n"
            "a = A()\n")
     self.mktmp(src)
     if dec.module_not_available('sqlite3'):
         err = 'WARNING: IPython History requires SQLite, your history will not be saved\n'
     else:
         err = None
     tt.ipexec_validate(self.fname, 'object A deleted', err)
Beispiel #7
0
    def test_tclass(self):
        mydir = os.path.dirname(__file__)
        tc = os.path.join(mydir, "tclass")
        src = ("%%run '%s' C-first\n" "%%run '%s' C-second\n" "%%run '%s' C-third\n") % (tc, tc, tc)
        self.mktmp(src, ".ipy")
        out = """\
ARGV 1-: ['C-first']
ARGV 1-: ['C-second']
tclass.py: deleting object: C-first
ARGV 1-: ['C-third']
tclass.py: deleting object: C-second
tclass.py: deleting object: C-third
"""
        if dec.module_not_available("sqlite3"):
            err = "WARNING: IPython History requires SQLite, your history will not be saved\n"
        else:
            err = None
        tt.ipexec_validate(self.fname, out, err)
Beispiel #8
0
class Test_as_markdown(object):

    # this is covered by functional tests (useing test-issue.html)
    # but adding some unit tests for easier verification of hosted domain link rewriting

    def test_link_within_proj(self):
        html = BeautifulSoup('''<pre>Foo: <a href="/p/myproj/issues/detail?id=1">issue 1</a></pre>''')
        assert_equal(
            _as_markdown(html.first(), 'myproj'),
            'Foo: [issue 1](#1)'
        )

    @skipif(module_not_available('html2text'))
    def test_link_other_proj_has_html2text(self):
        html = BeautifulSoup('''<pre>Foo: <a href="/p/other-project/issues/detail?id=1">issue other-project:1</a></pre>''')
        assert_equal(
            _as_markdown(html.first(), 'myproj'),
            'Foo: [issue other-project:1](https://code.google.com/p/other-project/issues/detail?id=1)'
        )

    @td.without_module('html2text')
    def test_link_other_proj_no_html2text(self):
        # without html2text, the dash in other-project doesn't get escaped right
        html = BeautifulSoup('''<pre>Foo: <a href="/p/other-project/issues/detail?id=1">issue other-project:1</a></pre>''')
        assert_equal(
            _as_markdown(html.first(), 'myproj'),
            'Foo: [issue other\\-project:1](https://code.google.com/p/other-project/issues/detail?id=1)'
        )

    def test_link_hosted_domain_within_proj(self):
        html = BeautifulSoup('''<pre>Foo: <a href="/a/eclipselabs.org/p/myproj/issues/detail?id=1">issue 1</a></pre>''')
        assert_equal(
            _as_markdown(html.first(), 'a/eclipselabs.org/p/myproj'),
            'Foo: [issue 1](#1)'
        )

    def test_link_hosted_domain_other_proj(self):
        html = BeautifulSoup('''<pre>Foo: <a href="/a/eclipselabs.org/p/other-proj/issues/detail?id=1">issue 1</a></pre>''')
        assert_equal(
            _as_markdown(html.first(), 'a/eclipselabs.org/p/myproj'),
            'Foo: [issue 1](https://code.google.com/a/eclipselabs.org/p/other-proj/issues/detail?id=1)'
        )
Beispiel #9
0
    def test_tclass(self):
        mydir = os.path.dirname(__file__)
        tc = os.path.join(mydir, 'tclass')
        src = ("%%run '%s' C-first\n"
               "%%run '%s' C-second\n"
               "%%run '%s' C-third\n") % (tc, tc, tc)
        self.mktmp(src, '.ipy')
        out = """\
ARGV 1-: ['C-first']
ARGV 1-: ['C-second']
tclass.py: deleting object: C-first
ARGV 1-: ['C-third']
tclass.py: deleting object: C-second
tclass.py: deleting object: C-third
"""
        if dec.module_not_available('sqlite3'):
            err = 'WARNING: IPython History requires SQLite, your history will not be saved\n'
        else:
            err = None
        tt.ipexec_validate(self.fname, out, err)
Beispiel #10
0
class TestTracImportSupportFunctional(TestRestApiBase, TestCase):
    @with_tracker
    def test_links(self):
        doc_text = open(os.path.dirname(__file__) +
                        '/data/trac-export.json').read()

        TracImportSupport().perform_import(
            doc_text,
            '{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}'
        )

        r = self.app.get('/p/test/bugs/204/')
        ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
                                     ticket_num=204)
        slug = ticket.discussion_thread.post_class().query.find(
            dict(discussion_id=ticket.discussion_thread.discussion_id,
                 thread_id=ticket.discussion_thread._id,
                 status={'$in': ['ok',
                                 'pending']})).sort('timestamp').all()[0].slug

        assert '[test comment](204/#%s)' % slug in r
        assert 'test link [\[2496\]](http://testlink.com)' in r
        assert 'test ticket ([#201](201))' in r

    @with_tracker
    def test_slug(self):
        doc_text = open(os.path.dirname(__file__) +
                        '/data/trac-export.json').read()

        TracImportSupport().perform_import(
            doc_text,
            '{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}'
        )

        ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
                                     ticket_num=204)
        comments = ticket.discussion_thread.post_class().query.find(
            dict(discussion_id=ticket.discussion_thread.discussion_id,
                 thread_id=ticket.discussion_thread._id,
                 status={'$in': ['ok', 'pending']})).sort('timestamp').all()

        import_support = TracImportSupport()
        self.assertEqual(import_support.get_slug_by_id('204', '1'),
                         comments[0].slug)
        self.assertEqual(import_support.get_slug_by_id('204', '2'),
                         comments[1].slug)

    @with_tracker
    @skipif(module_not_available('html2text'))
    def test_list(self):
        from allura.scripts.trac_export import TracExport, DateJSONEncoder
        csv_fp = open(os.path.dirname(__file__) + '/data/test-list.csv')
        html_fp = open(os.path.dirname(__file__) + '/data/test-list.html')
        with patch.object(TracExport,
                          'next_ticket_ids',
                          return_value=[(390, {})]):
            te = TracExport('url', do_attachments=False)
            te.exhausted = True
            te.csvopen = lambda s: csv_fp
        with patch('allura.scripts.trac_export.urlopen', return_value=html_fp):
            json_data = {
                'class': 'PROJECT',
                'trackers': {
                    'default': {
                        'artifacts': list(te)
                    }
                },
            }
        TracImportSupport().perform_import(
            json.dumps(json_data, cls=DateJSONEncoder), '{"user_map": {}}')
        ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
                                     ticket_num=390)
        self.assertIn('To reproduce:  \n\\- open an mzML file',
                      ticket.description)
        self.assertIn('duplicate of:  \n\\- [#316](316)',
                      ticket.discussion_thread.find_posts()[0].text)
        self.assertIn('will crash TOPPView.', ticket.description)
Beispiel #11
0
        nt.assert_raises(ImportError, _ip.magic, "load_ext daft_extension")
        url = os.path.join(os.path.dirname(__file__), "daft_extension.py")
        _ip.magic("install_ext %s" % url)
        _ip.user_ns.pop("arq", None)
        invalidate_caches()  # Clear import caches
        _ip.magic("load_ext daft_extension")
        nt.assert_equal(_ip.user_ns["arq"], 185)
        _ip.magic("unload_ext daft_extension")
        assert "arq" not in _ip.user_ns
    finally:
        _ip.ipython_dir = orig_ipython_dir
        tmpdir.cleanup()


# The nose skip decorator doesn't work on classes, so this uses unittest's skipIf
@skipIf(dec.module_not_available("IPython.nbformat.current"), "nbformat not importable")
class NotebookExportMagicTests(TestCase):
    def test_notebook_export_json(self):
        with TemporaryDirectory() as td:
            outfile = os.path.join(td, "nb.ipynb")
            _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
            _ip.magic("notebook -e %s" % outfile)

    def test_notebook_export_py(self):
        with TemporaryDirectory() as td:
            outfile = os.path.join(td, "nb.py")
            _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
            _ip.magic("notebook -e %s" % outfile)

    def test_notebook_reformat_py(self):
        from IPython.nbformat.v3.tests.nbexamples import nb0
Beispiel #12
0
# -----------------------------------------------------------------------------
#  Copyright (C) 2012  The IPython Development Team
#
#  Distributed under the terms of the BSD License.  The full license is in
#  the file COPYING, distributed as part of this software.
# -----------------------------------------------------------------------------

# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
import unittest

from IPython.testing import decorators as dec
from IPython.testing import tools as tt

sqlite_err_maybe = dec.module_not_available("sqlite3")
SQLITE_NOT_AVAILABLE_ERROR = ("WARNING: IPython History requires SQLite,"
                              " your history will not be saved\n")


class TestFileToRun(tt.TempFileMixin, unittest.TestCase):
    """Test the behavior of the file_to_run parameter."""
    def test_py_script_file_attribute(self):
        """Test that `__file__` is set when running `ipython file.py`"""
        src = "print(__file__)\n"
        self.mktmp(src)

        err = SQLITE_NOT_AVAILABLE_ERROR if sqlite_err_maybe else None
        tt.ipexec_validate(self.fname, self.fname, err)

    def test_ipy_script_file_attribute(self):
Beispiel #13
0
class TestGCTrackerImporter(TestCase):
    def _make_extractor(self, html):
        with mock.patch.object(base.h, 'urlopen') as urlopen:
            urlopen.return_value = ''
            extractor = google.GoogleCodeProjectExtractor(
                'my-project', 'project_info')
        extractor.page = BeautifulSoup(html)
        extractor.url = "http://test/issue/?id=1"
        return extractor

    def _make_ticket(self, issue, issue_id=1):
        self.assertIsNone(self.project.app_instance('test-issue'))
        with mock.patch.object(base.h, 'urlopen') as urlopen,\
             mock.patch.object(google.tracker, 'GoogleCodeProjectExtractor') as GPE,\
             mock.patch.object(google.tracker.M, 'AuditLog') as AL,\
             mock.patch('forgetracker.tasks.update_bin_counts') as ubc:
            urlopen.side_effect = lambda req, **kw: mock.Mock(
                read=req.get_full_url,
                info=lambda: {'content-type': 'text/plain'})
            GPE.iter_issues.return_value = [(issue_id, issue)]
            gti = google.tracker.GoogleCodeTrackerImporter()
            gti.import_tool(self.project,
                            self.user,
                            'test-issue-project',
                            mount_point='test-issue')
        c.app = self.project.app_instance('test-issue')
        query = TM.Ticket.query.find({'app_config_id': c.app.config._id})
        self.assertEqual(query.count(), 1)
        ticket = query.all()[0]
        return ticket

    def setUp(self, *a, **kw):
        super(TestGCTrackerImporter, self).setUp(*a, **kw)
        setup_basic_test()
        self.empty_issue = self._make_extractor(
            open(
                pkg_resources.resource_filename(
                    'forgeimporters',
                    'tests/data/google/empty-issue.html')).read())
        self.test_issue = self._make_extractor(
            open(
                pkg_resources.resource_filename(
                    'forgeimporters',
                    'tests/data/google/test-issue.html')).read())
        c.project = self.project = M.Project.query.get(shortname='test')
        c.user = self.user = M.User.query.get(username='******')

    def test_empty_issue(self):
        ticket = self._make_ticket(self.empty_issue)
        self.assertEqual(ticket.summary, 'Empty Issue')
        self.assertEqual(
            ticket.description,
            '*Originally created by:* [email protected]\n\nEmpty')
        self.assertEqual(ticket.status, '')
        self.assertEqual(ticket.milestone, '')
        self.assertEqual(ticket.custom_fields, {})
        assert c.app.config.options.get('EnableVoting')
        open_bin = TM.Bin.query.get(summary='Open Tickets',
                                    app_config_id=c.app.config._id)
        self.assertItemsEqual(open_bin.terms.split(' && '), [
            '!status:Fixed',
            '!status:Verified',
            '!status:Invalid',
            '!status:Duplicate',
            '!status:WontFix',
            '!status:Done',
        ])
        closed_bin = TM.Bin.query.get(summary='Closed Tickets',
                                      app_config_id=c.app.config._id)
        self.assertItemsEqual(closed_bin.terms.split(' or '), [
            'status:Fixed',
            'status:Verified',
            'status:Invalid',
            'status:Duplicate',
            'status:WontFix',
            'status:Done',
        ])

    @without_module('html2text')
    def test_issue_basic_fields(self):
        anon = M.User.anonymous()
        ticket = self._make_ticket(self.test_issue)
        self.assertEqual(ticket.reported_by, anon)
        self.assertIsNone(ticket.assigned_to_id)
        self.assertEqual(ticket.summary, 'Test "Issue"')
        assert_equal(
            ticket.description,
            '*Originally created by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
            '*Originally owned by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
            '\n'
            'Test \\*Issue\\* for testing\n'
            '\n'
            '&nbsp; 1\\. Test List\n'
            '&nbsp; 2\\. Item\n'
            '\n'
            '\\*\\*Testing\\*\\*\n'
            '\n'
            ' \\* Test list 2\n'
            ' \\* Item\n'
            '\n'
            '\\# Test Section\n'
            '\n'
            '&nbsp;&nbsp;&nbsp; p = source\\.test\\_issue\\.post\\(\\)\n'
            '&nbsp;&nbsp;&nbsp; p\\.count = p\\.count \\*5 \\#\\* 6\n'
            '&nbsp;&nbsp;&nbsp; if p\\.count &gt; 5:\n'
            '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
            '\n'
            'That\'s all')
        self.assertEqual(ticket.status, 'Started')
        self.assertEqual(ticket.created_date, datetime(2013, 8, 8, 15, 33, 52))
        self.assertEqual(ticket.mod_date, datetime(2013, 8, 8, 15, 36, 57))
        self.assertEqual(
            ticket.custom_fields, {
                '_priority': 'Medium',
                '_opsys': 'All, OSX, Windows',
                '_component': 'Logic',
                '_type': 'Defect',
                '_milestone': 'Release1.0'
            })
        self.assertEqual(ticket.labels, ['Performance', 'Security'])
        self.assertEqual(ticket.votes_up, 1)
        self.assertEqual(ticket.votes, 1)

    def test_import_id(self):
        ticket = self._make_ticket(self.test_issue, issue_id=6)
        self.assertEqual(ticket.app.config.options.import_id, {
            'source': 'Google Code',
            'project_name': 'test-issue-project',
        })
        self.assertEqual(ticket.ticket_num, 6)
        self.assertEqual(
            ticket.import_id, {
                'source': 'Google Code',
                'project_name': 'test-issue-project',
                'source_id': 6,
            })

    @skipif(module_not_available('html2text'))
    def test_html2text_escaping(self):
        ticket = self._make_ticket(self.test_issue)
        assert_equal(
            ticket.description,
            '*Originally created by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
            '*Originally owned by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
            '\n'
            'Test \\*Issue\\* for testing\n'
            '\n'
            '&nbsp; 1. Test List\n'
            '&nbsp; 2. Item\n'
            '\n'
            '\\*\\*Testing\\*\\*\n'
            '\n'
            ' \\* Test list 2\n'
            ' \\* Item\n'
            '\n'
            '\\# Test Section\n'
            '\n'
            '&nbsp;&nbsp;&nbsp; p = source.test\\_issue.post\\(\\)\n'
            '&nbsp;&nbsp;&nbsp; p.count = p.count \\*5 \\#\\* 6\n'
            '&nbsp;&nbsp;&nbsp; if p.count &gt; 5:\n'
            '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
            '\n'
            'That\'s all')

    def _assert_attachments(self, actual, *expected):
        self.assertEqual(len(actual), len(expected))
        atts = set(
            (a.filename, a.content_type, a.rfile().read()) for a in actual)
        self.assertEqual(atts, set(expected))

    def test_attachements(self):
        ticket = self._make_ticket(self.test_issue)
        self._assert_attachments(
            ticket.attachments,
            ('at1.txt', 'text/plain',
             'http://allura-google-importer.googlecode.com/issues/attachment?aid=70000000&name=at1.txt&token=3REU1M3JUUMt0rJUg7ldcELt6LA%3A1376059941255'
             ),
        )

    @without_module('html2text')
    def test_comments(self):
        anon = M.User.anonymous()
        ticket = self._make_ticket(self.test_issue)
        actual_comments = ticket.discussion_thread.find_posts()
        expected_comments = [
            {
                'timestamp':
                datetime(2013, 8, 8, 15, 35, 15),
                'text':
                ('*Originally posted by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
                 '\n'
                 'Test \\*comment\\* is a comment\n'
                 '\n'
                 '**Labels:** -OpSys-Linux OpSys-Windows\n'
                 '**Status:** Started'),
                'attachments': [
                    ('at2.txt', 'text/plain',
                     'http://allura-google-importer.googlecode.com/issues/attachment?aid=60001000&name=at2.txt&token=JOSo4duwaN2FCKZrwYOQ-nx9r7U%3A1376001446667'
                     ),
                ],
            },
            {
                'timestamp':
                datetime(2013, 8, 8, 15, 35, 34),
                'text':
                ('*Originally posted by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
                 '\n'
                 'Another comment\n\n'),
            },
            {
                'timestamp':
                datetime(2013, 8, 8, 15, 36, 39),
                'text':
                ('*Originally posted by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
                 '\n'
                 'Last comment\n\n'),
                'attachments': [
                    ('at4.txt', 'text/plain',
                     'http://allura-google-importer.googlecode.com/issues/attachment?aid=60003000&name=at4.txt&token=6Ny2zYHmV6b82dqxyoiH6HUYoC4%3A1376001446667'
                     ),
                    ('at1.txt', 'text/plain',
                     'http://allura-google-importer.googlecode.com/issues/attachment?aid=60003001&name=at1.txt&token=NS8aMvWsKzTAPuY2kniJG5aLzPg%3A1376001446667'
                     ),
                ],
            },
            {
                'timestamp':
                datetime(2013, 8, 8, 15, 36, 57),
                'text':
                ('*Originally posted by:* [[email protected]](http://code.google.com/u/101557263855536553789/)\n'
                 '\n'
                 'Oh, I forgot one\n'
                 '\n'
                 '**Labels:** OpSys-OSX'),
            },
        ]
        self.assertEqual(len(actual_comments), len(expected_comments))
        for actual, expected in zip(actual_comments, expected_comments):
            self.assertEqual(actual.author(), anon)
            self.assertEqual(actual.timestamp, expected['timestamp'])
            self.assertEqual(actual.text, expected['text'])
            if 'attachments' in expected:
                self._assert_attachments(actual.attachments,
                                         *expected['attachments'])

    def test_globals(self):
        globals = self._make_ticket(self.test_issue, issue_id=6).globals
        self.assertEqual(globals.open_status_names, 'New Accepted Started')
        self.assertEqual(globals.closed_status_names,
                         'Fixed Verified Invalid Duplicate WontFix Done')
        self.assertEqual(globals.last_ticket_num, 6)
        self.assertItemsEqual(globals.custom_fields, [
            {
                'label':
                'Milestone',
                'name':
                '_milestone',
                'type':
                'milestone',
                'options':
                '',
                'milestones': [
                    {
                        'name': 'Release1.0',
                        'due_date': None,
                        'complete': False
                    },
                ],
            },
            {
                'label': 'Type',
                'name': '_type',
                'type': 'select',
                'options': 'Defect',
            },
            {
                'label': 'Priority',
                'name': '_priority',
                'type': 'select',
                'options': 'Medium',
            },
            {
                'label': 'OpSys',
                'name': '_opsys',
                'type': 'string',
                'options': '',
            },
            {
                'label': 'Component',
                'name': '_component',
                'type': 'string',
                'options': '',
            },
        ])
Beispiel #14
0
        nt.assert_raises(ImportError, _ip.magic, "load_ext daft_extension")
        url = os.path.join(os.path.dirname(__file__), "daft_extension.py")
        _ip.magic("install_ext %s" % url)
        _ip.user_ns.pop('arq', None)
        invalidate_caches()   # Clear import caches
        _ip.magic("load_ext daft_extension")
        nt.assert_equal(_ip.user_ns['arq'], 185)
        _ip.magic("unload_ext daft_extension")
        assert 'arq' not in _ip.user_ns
    finally:
        _ip.ipython_dir = orig_ipython_dir
        tmpdir.cleanup()


# The nose skip decorator doesn't work on classes, so this uses unittest's skipIf
@skipIf(dec.module_not_available('IPython.nbformat'), 'nbformat not importable')
class NotebookExportMagicTests(TestCase):
    def test_notebook_export_json(self):
        with TemporaryDirectory() as td:
            outfile = os.path.join(td, "nb.ipynb")
            _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
            _ip.magic("notebook -e %s" % outfile)


class TestEnv(TestCase):

    def test_env(self):
        env = _ip.magic("env")
        self.assertTrue(isinstance(env, dict))

    def test_env_get_set_simple(self):
Beispiel #15
0
class TestGitHubWikiImporter(TestCase):

    def _make_project(self, gh_proj_name=None):
        project = Mock()
        project.get_tool_data.side_effect = lambda *args: gh_proj_name
        return project

    @patch('forgeimporters.github.wiki.M')
    @patch('forgeimporters.github.wiki.ThreadLocalORMSession')
    @patch('forgeimporters.github.wiki.g')
    @patch('forgeimporters.github.wiki.GitHubProjectExtractor')
    def test_import_tool_happy_path(self, ghpe, g, tlorms, M):
        with patch('forgeimporters.github.wiki.GitHubWikiImporter.import_pages'),\
             patch('forgeimporters.github.wiki.GitHubWikiImporter.has_wiki_repo', return_value=True),\
             patch('forgeimporters.github.wiki.c'):
            ghpe.return_value.has_wiki.return_value = True
            p = self._make_project(gh_proj_name='myproject')
            u = Mock(name='c.user')
            app = p.install_app.return_value
            app.config.options.mount_point = 'wiki'
            app.url = 'foo'
            GitHubWikiImporter().import_tool(
                p, u, project_name='project_name', user_name='testuser')
            p.install_app.assert_called_once_with(
                'Wiki',
                mount_point='wiki',
                mount_label='Wiki',
                import_id={
                    'source': 'GitHub',
                    'project_name': 'testuser/project_name',
                }
            )
            M.AuditLog.log.assert_called_once_with(
                'import tool wiki from testuser/project_name on GitHub',
                project=p, user=u, url='foo')
            g.post_event.assert_called_once_with('project_updated')

    def setUp(self):
        setup_basic_test()
        self.blob1 = Mock()
        self.blob1.name = 'Home.md'
        self.blob1.data_stream.read.return_value = '# test message'

        self.blob2 = Mock()
        self.blob2.name = 'Home2.creole'
        self.blob2.data_stream.read.return_value = '**test message**'

        self.blob3 = Mock()
        self.blob3.name = 'Home3.rest'
        self.blob3.data_stream.read.return_value = 'test message'

        self.commit1 = Mock()
        self.commit1.tree.blobs = [self.blob1]
        self.commit1.committed_date = 1256301446

        self.commit2 = Mock()
        blobs = [self.blob1, self.blob2, self.blob3]
        self.commit2.tree.blobs = blobs
        self.commit2.tree.__contains__ = lambda _, item: item in [
            self.blob1.name, self.blob2.name, self.blob3.name]
        self.commit2.tree.traverse.return_value = blobs
        self.commit2.committed_date = 1256291446

    @patch('forgeimporters.github.wiki.WM.Page.upsert')
    def test_import_id(self, upsert):
        page = Mock()
        upsert.return_value = page
        importer = GitHubWikiImporter()
        importer.app = Mock()
        importer.app.config.options = {
            'import_id': {
                'source': 'GitHub',
                'project_name': 'me/project',
            }
        }
        importer._make_page('text', 'Page.md', self.commit2)
        import_id = {
            'source': 'GitHub',
            'project_name': 'me/project',
            'source_id': 'Page',
        }
        assert_equal(page.import_id, import_id)

    @patch('forgeimporters.github.wiki.WM.Page.upsert')
    @patch('forgeimporters.github.wiki.h.render_any_markup')
    def test_without_history(self, render, upsert):
        self.commit2.tree.blobs = [self.blob2, self.blob3]
        upsert.text = Mock()
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.config.options = {}
        importer.app.url = '/p/test/wiki/'
        importer.rewrite_links = Mock(return_value='')
        importer._without_history(self.commit2)
        assert_equal(upsert.call_args_list, [call('Home2'), call('Home3')])

        assert_equal(render.call_args_list, [
            call('Home2.creole', '**test message**'),
            call('Home3.rest', 'test message')])

    @patch('forgeimporters.github.wiki.git.Repo')
    @patch('forgeimporters.github.wiki.mkdtemp')
    def test_clone_from(self, path, repo):
        with patch('forgeimporters.github.wiki.rmtree'):
            path.return_value = 'temp_path'
            GitHubWikiImporter().import_pages('wiki_url')
            repo.clone_from.assert_called_with(
                'wiki_url', to_path='temp_path', bare=True)

    @patch('forgeimporters.github.wiki.git.Repo._clone')
    @patch('forgeimporters.github.wiki.GitHubWikiImporter._with_history')
    @patch('forgeimporters.github.wiki.GitHubWikiImporter._without_history')
    def test_import_with_history(self, without_history, with_history, clone):
        repo = clone.return_value
        repo.iter_commits.return_value = [self.commit1, self.commit2]
        GitHubWikiImporter().import_pages('wiki_url', history=True)
        assert_equal(with_history.call_count, 2)
        assert_equal(without_history.call_count, 0)

    @patch('forgeimporters.github.wiki.GitHubWikiImporter._with_history')
    @patch('forgeimporters.github.wiki.GitHubWikiImporter._without_history')
    def test_get_commits_without_history(self, without_history, with_history):
        with patch('forgeimporters.github.wiki.git.Repo._clone'):
            GitHubWikiImporter().import_pages('wiki_url')
            assert_equal(with_history.call_count, 0)
            assert_equal(without_history.call_count, 1)

    @patch('forgeimporters.github.wiki.WM.Page.upsert')
    @patch('forgeimporters.github.wiki.h.render_any_markup')
    def test_with_history(self, render, upsert):
        self.commit2.stats.files = {"Home.rst": self.blob1}
        self.commit2.tree = {"Home.rst": self.blob1}
        importer = GitHubWikiImporter()
        importer._set_available_pages = Mock()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.config.options = {}
        importer.app.url = '/p/test/wiki/'
        importer.rewrite_links = Mock(return_value='')
        importer._with_history(self.commit2)
        assert_equal(upsert.call_args_list, [call('Home')])
        assert_equal(render.call_args_list,
                     [call('Home.rst', '# test message')])

    @skipif(module_not_available('html2text'))
    @patch('forgeimporters.github.wiki.WM.Page.upsert')
    @patch('forgeimporters.github.wiki.mediawiki2markdown')
    def test_with_history_mediawiki(self, md2mkm, upsert):
        self.commit2.stats.files = {"Home.mediawiki": self.blob1}
        self.commit2.tree = {"Home.mediawiki": self.blob1}
        md2mkm.return_value = '# test message'
        importer = GitHubWikiImporter()
        importer._set_available_pages = Mock()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.config.options = {}
        importer.app.url = '/p/test/wiki/'
        importer.rewrite_links = Mock(return_value='')
        importer.convert_gollum_tags = Mock(return_value='# test message')
        importer._with_history(self.commit2)
        assert_equal(upsert.call_args_list, [call('Home')])
        assert_equal(md2mkm.call_args_list, [call('# test message')])

    def test_set_available_pages(self):
        importer = GitHubWikiImporter()
        commit = Mock()
        blobs = [Mock() for i in range(3)]
        blobs[0].name = 'Home-42.md'
        blobs[1].name = 'image.png'
        blobs[2].name = 'code & fun.textile'
        commit.tree.traverse.return_value = blobs
        importer._set_available_pages(commit)
        assert_equal(importer.available_pages, ['Home 42', 'code & fun'])

    def test_gollum_page_links_case_insensitive(self):
        i = GitHubWikiImporter()
        i.available_pages = ['Home 42', 'code & fun']
        assert_equal(i.convert_gollum_tags('[[Code & Fun]]'), '[code & fun]')
        assert_equal(i.convert_gollum_tags('[[home-42]]'), '[Home 42]')
        assert_equal(i.convert_gollum_tags('[[Unknown]]'), '[Unknown]')

    def test_convert_page_name(self):
        f = GitHubWikiImporter()._convert_page_name
        assert_equal(f('Page Name'), 'Page Name')
        assert_equal(f('Page-Name'), 'Page Name')
        assert_equal(f('Page / Name'), 'Page   Name')

    def test_convert_gollum_page_links(self):
        f = GitHubWikiImporter().convert_gollum_tags
        assert_equal(f('[[Page]]'), '[Page]')
        assert_equal(f('[[Page Title|Page]]'), '[Page Title](Page)')
        assert_equal(f('[[Pagê Nâme]]'), '[Pagê Nâme]')
        # Github always converts spaces and slashes in links to hyphens,
        # to lookup page in the filesystem. During import we're converting
        # all hyphens in page name to spaces, but still supporting both link
        # formats.
        assert_equal(f('[[Page With Spaces]]'), '[Page With Spaces]')
        assert_equal(f('[[Page-With-Spaces]]'), '[Page With Spaces]')
        assert_equal(f('[[Page / 1]]'), '[Page   1]')
        assert_equal(f('[[Title|Page With Spaces]]'),
                     '[Title](Page With Spaces)')
        assert_equal(f('[[Title|Page-With-Spaces]]'),
                     '[Title](Page With Spaces)')
        assert_equal(f('[[go here|Page / 1]]'), '[go here](Page   1)')

    def test_convert_gollum_page_links_escaped(self):
        f = GitHubWikiImporter().convert_gollum_tags
        assert_equal(f("'[[Page]]"), '[[Page]]')
        assert_equal(f("'[[Page Title|Page]]"), '[[Page Title|Page]]')
        assert_equal(f("'[[Page With Spaces]]"), '[[Page With Spaces]]')
        assert_equal(f("'[[Page-With-Spaces]]"), '[[Page-With-Spaces]]')
        assert_equal(f("'[[Page / 1]]"), '[[Page / 1]]')
        assert_equal(f("'[[Title|Page With Spaces]]"),
                     '[[Title|Page With Spaces]]')
        assert_equal(f("'[[Title|Page-With-Spaces]]"),
                     '[[Title|Page-With-Spaces]]')
        assert_equal(f("'[[go here|Page / 1]]"), '[[go here|Page / 1]]')

    def test_convert_gollum_external_links(self):
        f = GitHubWikiImporter().convert_gollum_tags
        assert_equal(f('[[http://domain.net]]'), '<http://domain.net>')
        assert_equal(f('[[https://domain.net]]'), '<https://domain.net>')
        assert_equal(f('[[Site|http://domain.net]]'),
                     '[Site](http://domain.net)')

    def test_convert_gollum_external_links_escaped(self):
        f = GitHubWikiImporter().convert_gollum_tags
        assert_equal(f("'[[http://domain.net]]"), '[[http://domain.net]]')
        assert_equal(f("'[[https://domain.net]]"), '[[https://domain.net]]')
        assert_equal(f("'[[Site|http://domain.net]]"),
                     '[[Site|http://domain.net]]')

    def test_convert_gollum_toc(self):
        f = GitHubWikiImporter().convert_gollum_tags
        assert_equal(f('[[_TOC_]]'), '[TOC]')
        assert_equal(f("'[[_TOC_]]"), '[[_TOC_]]')

    def test_convert_gollum_tags(self):
        f = GitHubWikiImporter().convert_gollum_tags
        source = '''Look at [[this page|Some Page]]

More info at: [[MoreInfo]] [[Even More Info]]

Our website is [[http://domain.net]].

'[[Escaped Tag]]'''

        result = '''Look at [this page](Some Page)

More info at: [MoreInfo] [Even More Info]

Our website is <http://domain.net>.

[[Escaped Tag]]'''

        assert_equal(f(source), result)

    @skipif(module_not_available('html2text'))
    def test_convert_markup(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        importer.github_markdown_converter = GitHubMarkdownConverter(
            'user', 'proj')
        f = importer.convert_markup
        source = '''Look at [[this page|Some Page]]

More info at: [[MoreInfo]] [[Even More Info]]

Our website is [[http://domain.net]].

'[[Escaped Tag]]

```python
codeblock
```

ticket #1

#1 header

sha aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'''
        result = '''Look at [this page](Some Page)

More info at: [MoreInfo] [Even More Info]

Our website is <http://domain.net>.

[[Escaped Tag]]

    :::python
    codeblock

ticket [#1]

[#1] header

sha [aaaaaa]'''
        assert_equal(f(source, 'test.md').strip(), result)

        assert_equal(f('h1. Hello', 't.textile').strip(), '# Hello')

    @without_module('html2text')
    def test_convert_markup_without_html2text(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        f = importer.convert_markup
        source = '''Look at [[this page|Some Page]]

More info at: [[MoreInfo]] [[Even More Info]]

Our website is [[http://domain.net]].

'[[Escaped Tag]]

[External link to the wiki page](https://github.com/a/b/wiki/Page)

[External link](https://github.com/a/b/issues/1)'''

        result = '''<p>Look at [[this page|Some Page]]</p>
<p>More info at: [[MoreInfo]] [[Even More Info]]</p>
<p>Our website is [[http://domain.net]].</p>
<p>\u2018[[Escaped Tag]]</p>
<p>[External link to the wiki page](https://github.com/a/b/wiki/Page)</p>
<p>[External link](https://github.com/a/b/issues/1)</p>'''

        assert_equal(f(source, 'test.textile').strip(), result)

    def test_rewrite_links(self):
        f = GitHubWikiImporter().rewrite_links
        prefix = 'https://github/a/b/wiki'
        new = '/p/test/wiki/'
        assert_equal(
            f('<a href="https://github/a/b/wiki/Test Page">Test Page</a>',
              prefix, new),
            '<a href="/p/test/wiki/Test Page">Test Page</a>')
        assert_equal(
            f('<a href="https://github/a/b/wiki/Test-Page">Test-Page</a>',
              prefix, new),
            '<a href="/p/test/wiki/Test Page">Test Page</a>')
        assert_equal(
            f('<a href="https://github/a/b/issues/1" class="1"></a>',
              prefix, new),
            '<a class="1" href="https://github/a/b/issues/1"></a>')
        assert_equal(
            f('<a href="https://github/a/b/wiki/Test Page">https://github/a/b/wiki/Test Page</a>',
              prefix, new),
            '<a href="/p/test/wiki/Test Page">/p/test/wiki/Test Page</a>')
        assert_equal(
            f('<a href="https://github/a/b/wiki/Test Page">Test blah blah</a>',
              prefix, new),
            '<a href="/p/test/wiki/Test Page">Test blah blah</a>')
        assert_equal(
            f('<a href="https://github/a/b/wiki/Test Page">Test <b>Page</b></a>',
              prefix, new),
            '<a href="/p/test/wiki/Test Page">Test <b>Page</b></a>')

    @skipif(module_not_available('html2text'))
    def test_convert_markup_with_mediawiki2markdown(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        f = importer.convert_markup
        source = '''
''Al'fredas 235 BC''
== See also ==
* [https://github.com/a/b/wiki/AgentSpring-running-instructions-for-d13n-model Test1]
* [https://github.com/a/b/wiki/AgentSpring-conventions Test2]
* [https://github.com/a/b/wiki/AgentSpring-Q&A Test3]
* [https://github.com/a/b/wiki/Extensions Test4]'''

        result = '''_Al'fredas 235 BC_

## See also

  * [Test1](/p/test/wiki/AgentSpring running instructions for d13n model)
  * [Test2](/p/test/wiki/AgentSpring conventions)
  * [Test3](/p/test/wiki/AgentSpring Q&A)
  * [Test4](/p/test/wiki/Extensions)

'''

        assert_equal(f(source, 'test.mediawiki'), result)

    @skipif(module_not_available('html2text'))
    def test_convert_textile_no_leading_tabs(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        f = importer.convert_markup
        source = '''h1. Header 1

Some text 1.

h2. Header 2

See [[Page]]'''

        result = '''# Header 1

Some text 1.

## Header 2

See [Page]'''
        assert_equal(f(source, 'test.textile').strip(), result)

    @skipif(module_not_available('html2text'))
    def test_convert_markup_with_amp_in_links(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        f = importer.convert_markup
        source = '[[Ticks & Leeches]]'
        result = '[Ticks & Leeches]'
        # markdown should be untouched
        assert_equal(f(source, 'test.rst').strip(), result)

    @skipif(module_not_available('html2text'))
    def test_convert_markup_textile(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        f = importer.convert_markup

        # check if lists converting works properly
        source = '''There are good reasons for this:

  # Duplicate libraries regularly break builds
  # Subtle bugs emerge with duplicate libraries, and to a lesser extent, duplicate tools
  # We want you to try harder to make your formula work with what OS X comes with
'''
        result = '''There are good reasons for this:

  1. Duplicate libraries regularly break builds
  2. Subtle bugs emerge with duplicate libraries, and to a lesser extent, duplicate tools
  3. We want you to try harder to make your formula work with what OS X comes with

'''

        assert_equal(f(source, 'test.textile'), result)

        # textile-style links converts normal
        source = '*"Textile":Troubleshooting*'
        result = '**[Textile](Troubleshooting)**\n\n'
        assert_equal(f(source, 'test2.textile'), result)

        # links with formatting converts normal in textile now
        source = '''*[[this checklist|Troubleshooting]]*

some text and *[[Tips n' Tricks]]*

*[[link|http://otherlink.com]]*
'''
        result = '''**[this checklist](Troubleshooting)**

some text and **[Tips n\u2019 Tricks]**

**[link](http://otherlink.com)**

'''
        assert_equal(f(source, 'test3.textile'), result)

    @skipif(module_not_available('html2text'))
    def test_convert_textile_special_tag(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        f = importer.convert_markup
        source = '*[[this checklist|Troubleshooting]]*'
        assert_equal(f(source, 't.textile').strip(),
                     '**[this checklist](Troubleshooting)**')

    @without_module('html2text')
    def test_convert_textile_special_tag_without_html2text(self):
        importer = GitHubWikiImporter()
        importer.github_wiki_url = 'https://github.com/a/b/wiki'
        importer.app = Mock()
        importer.app.url = '/p/test/wiki/'
        f = importer.convert_markup
        source = '*[[this checklist|Troubleshooting]]*'
        result = '<p><strong>[[this checklist|Troubleshooting]]</strong></p>'
        assert_equal(f(source, 't.textile').strip(), result)

    @patch('forgeimporters.github.wiki.mkdtemp', autospec=True)
    @patch('forgeimporters.github.wiki.rmtree', autospec=True)
    @patch('forgeimporters.github.wiki.git.Repo', autospec=True)
    def test_has_wiki_repo(self, repo, rmtree, mkdtemp):
        mkdtemp.return_value = 'fake path'
        i = GitHubWikiImporter()
        assert_equal(i.has_wiki_repo('fake url'), True)
        repo.clone_from.assert_called_once_with(
            'fake url', to_path='fake path', bare=True)
        rmtree.assert_called_once_with('fake path')

        def raise_error(*args, **kw):
            raise git.GitCommandError('bam', 'bam', 'bam')
        repo.clone_from.side_effect = raise_error
        assert_equal(i.has_wiki_repo('fake url'), False)
Beispiel #16
0
#
#       Unless required by applicable law or agreed to in writing,
#       software distributed under the License is distributed on an
#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#       KIND, either express or implied.  See the License for the
#       specific language governing permissions and limitations
#       under the License.

from __future__ import unicode_literals
from __future__ import absolute_import
from IPython.testing.decorators import module_not_available, skipif

from forgewiki import converters


@skipif(module_not_available('mediawiki'))
def test_mediawiki2markdown():
    mediawiki_text = """
'''bold''' ''italics''
== Getting started ==
* [http://www.mediawiki.org/wiki/Manual:Configuration_settings Configuration]
* [http://www.mediawiki.org/wiki/Manual:FAQ MediaWiki FAQ]
<plugin>.plugin()
    """
    mediawiki_output = converters.mediawiki2markdown(mediawiki_text)
    assert "**bold** _italics_" in mediawiki_output
    assert "## Getting started" in mediawiki_output
    assert ("* [MediaWiki FAQ](http://www.mediawiki.org/wiki/Manual:FAQ)"
            in mediawiki_output)
    assert '&lt;plugin&gt;.plugin()' in mediawiki_output
Beispiel #17
0
        nt.assert_raises(ImportError, _ip.magic, "load_ext daft_extension")
        url = os.path.join(os.path.dirname(__file__), "daft_extension.py")
        _ip.magic("install_ext %s" % url)
        _ip.user_ns.pop('arq', None)
        invalidate_caches()  # Clear import caches
        _ip.magic("load_ext daft_extension")
        nt.assert_equal(_ip.user_ns['arq'], 185)
        _ip.magic("unload_ext daft_extension")
        assert 'arq' not in _ip.user_ns
    finally:
        _ip.ipython_dir = orig_ipython_dir
        tmpdir.cleanup()


# The nose skip decorator doesn't work on classes, so this uses unittest's skipIf
@skipIf(dec.module_not_available('IPython.nbformat.current'),
        'nbformat not importable')
class NotebookExportMagicTests(TestCase):
    def test_notebook_export_json(self):
        with TemporaryDirectory() as td:
            outfile = os.path.join(td, "nb.ipynb")
            _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
            _ip.magic("notebook -e %s" % outfile)

    def test_notebook_export_py(self):
        with TemporaryDirectory() as td:
            outfile = os.path.join(td, "nb.py")
            _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
            _ip.magic("notebook -e %s" % outfile)

    def test_notebook_reformat_py(self):
Beispiel #18
0
class TestRootController(TestController):

    def setUp(self):
        super(TestRootController, self).setUp()
        n_adobe = M.Neighborhood.query.get(name='Adobe')
        assert n_adobe
        u_admin = M.User.query.get(username='******')
        assert u_admin
        n_adobe.register_project('adobe-2', u_admin)

    def test_index(self):
        response = self.app.get('/', extra_environ=dict(username=str('*anonymous')))
        assert_equal(response.location, 'http://localhost/neighborhood')

        response = self.app.get('/')
        assert_equal(response.location, 'http://localhost/dashboard')

    def test_neighborhood(self):
        response = self.app.get('/neighborhood')
        assert_equal(response.html.find('h2', {'class': 'dark title'}).contents[
                     0].strip(), 'All Neighborhoods')
        nbhds = response.html.findAll('div', {'class': 'nbhd_name'})
        assert nbhds[0].find('a').get('href') == '/adobe/'
        cat_links = response.html.find('div', {'id': 'sidebar'}).findAll('li')
        assert len(cat_links) == 4
        assert cat_links[0].find('a').get('href') == '/browse/clustering'
        assert cat_links[0].find('a').find('span').string == 'Clustering'

    def test_validation(self):
        # this is not configured ON currently, so adding an individual test to get coverage of the validator itself
        with mock.patch.dict(os.environ, ALLURA_VALIDATION='all'):
            self.app.get('/neighborhood')
            self.app.get('/nf/markdown_to_html?markdown=aaa&project=test&app=bugs&neighborhood=%s'
                         % M.Neighborhood.query.get(name='Projects')._id,
                         validate_chunk=True)

    def test_sidebar_escaping(self):
        # use this as a convenient way to get something in the sidebar
        M.ProjectCategory(name='test-xss', label='<script>alert(1)</script>')
        ThreadLocalORMSession.flush_all()

        response = self.app.get('/neighborhood')
        # inject it into the sidebar data
        content = response.html.find('div', {'id': 'content_base'}).prettify()
        assert '<script>' not in content, content
        assert '&lt;script&gt;' in content

    def test_strange_accept_headers(self):
        hdrs = [
            'text/plain;text/html;text/*',
            'text/html,application/xhtml+xml,application/xml;q=0.9;text/plain;q=0.8,image/png,*/*;q=0.5']
        for hdr in hdrs:
            # malformed headers used to return 500, just make sure they don't
            # now
            self.app.get('/', headers=dict(Accept=str(hdr)), validate_skip=True)

    def test_encoded_urls(self):
        if six.PY2:
            # not valid unicode
            self.app.get(b'/foo\xFF', status=400)
        self.app.get('/foo%FF', status=400)
        # encoded
        self.app.get('/foo%C3%A9', status=404)
        self.app.get('/u/foo%C3%A9/profile', status=404)

    def test_project_browse(self):
        com_cat = M.ProjectCategory.query.find(
            dict(label='Communications')).first()
        M.Project.query.find(dict(shortname='adobe-1')
                             ).first().category_id = com_cat._id
        response = self.app.get('/browse')
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1
        response = self.app.get('/browse/communications')
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 0
        response = self.app.get('/browse/communications/fax')
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 0
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 0

    def test_neighborhood_home(self):
        setup_trove_categories()
        # Install home app
        nb = M.Neighborhood.query.get(name='Adobe')
        p = nb.neighborhood_project
        with h.push_config(c, user=M.User.query.get(username='******')):
            p.install_app('home', 'home', 'Home', ordinal=0)

        response = self.app.get('/adobe/')
        projects = response.html.findAll('div', {'class': 'list card proj_icon'})
        assert_equal(len(projects), 2)
        cat_links = response.html.find('div', {'id': 'sidebar'}).findAll('ul')[1].findAll('li')
        assert len(cat_links) == 3, cat_links
        assert cat_links[0].find('a').get('href') == '/adobe/browse/clustering'
        assert cat_links[0].find('a').find('span').string == 'Clustering'

    def test_neighborhood_project_browse(self):
        com_cat = M.ProjectCategory.query.find(
            dict(label='Communications')).first()
        fax_cat = M.ProjectCategory.query.find(dict(label='Fax')).first()
        M.Project.query.find(dict(shortname='adobe-1')
                             ).first().category_id = com_cat._id
        M.Project.query.find(dict(shortname='adobe-2')
                             ).first().category_id = fax_cat._id
        response = self.app.get('/adobe/browse')
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1
        response = self.app.get('/adobe/browse/communications')
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1
        response = self.app.get('/adobe/browse/communications/fax')
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 0
        assert len(
            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1

    @td.with_wiki
    def test_markdown_to_html(self):
        n = M.Neighborhood.query.get(name='Projects')
        r = self.app.get(
            '/nf/markdown_to_html?markdown=*aaa*bb[wiki:Home]&project=test&app=bugs&neighborhood=%s' % n._id, validate_chunk=True)
        assert '<p><em>aaa</em>bb<a class="alink" href="/p/test/wiki/Home/">[wiki:Home]</a></p>' in r, r

        # this happens to trigger an error
        bad_markdown = '<foo {bar}>'
        r = self.app.get('/nf/markdown_to_html?markdown=%s&project=test&app=bugs&neighborhood=%s' %
                         (quote(bad_markdown), n._id))
        r.mustcontain('The markdown supplied could not be parsed correctly.')
        r.mustcontain('<pre>&lt;foo {bar}&gt;</pre>')

    def test_slash_redirect(self):
        self.app.get('/p', status=301)
        self.app.get('/p/', status=302)

    @skipif(module_not_available('newrelic'))
    def test_newrelic_set_transaction_name(self):
        from allura.controllers.project import NeighborhoodController
        with mock.patch('newrelic.agent.callable_name') as callable_name,\
                mock.patch('newrelic.agent.set_transaction_name') as set_transaction_name:
            callable_name.return_value = 'foo'
            self.app.get('/p/')
            arg = callable_name.call_args[0][0]
            assert_equal(arg.__wrapped__,
                         NeighborhoodController.index.__wrapped__)
            set_transaction_name.assert_called_with('foo')

    def test_error_page(self):
        # hard to force a real error (esp. with middleware debugging being different for tests) but we can hit direct:
        r = self.app.get('/error/document')
        r.mustcontain("We're sorry but we weren't able to process")
#  Copyright (C) 2012  The IPython Development Team
#
#  Distributed under the terms of the BSD License.  The full license is in
#  the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------

#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import unittest

from IPython.testing import decorators as dec
from IPython.testing import tools as tt
from IPython.utils.py3compat import PY3

sqlite_err_maybe = dec.module_not_available('sqlite3')
SQLITE_NOT_AVAILABLE_ERROR = ('WARNING: IPython History requires SQLite,'
                              ' your history will not be saved\n')


class TestFileToRun(unittest.TestCase, tt.TempFileMixin):

    """Test the behavior of the file_to_run parameter."""

    def test_py_script_file_attribute(self):
        """Test that `__file__` is set when running `ipython file.py`"""
        src = "print(__file__)\n"
        self.mktmp(src)

        err = SQLITE_NOT_AVAILABLE_ERROR if sqlite_err_maybe else None
        tt.ipexec_validate(self.fname, self.fname, err)
Beispiel #20
0
class TestGoogleCodeProjectExtractor(TestCase):

    def setUp(self):
        self._p_urlopen = mock.patch.object(base.ProjectExtractor, 'urlopen')
        self._p_soup = mock.patch.object(base, 'BeautifulSoup')
        self.urlopen = self._p_urlopen.start()
        self.soup = self._p_soup.start()
        self.project = mock.Mock(name='project')
        self.project.get_tool_data.return_value = 'my-project'

    def tearDown(self):
        for patcher in ('_p_urlopen', '_p_soup'):
            try:
                getattr(self, patcher).stop()
            except RuntimeError as e:
                if 'unstarted patcher' in str(e):
                    pass  # test case might stop them early
                else:
                    raise

    def test_init(self):
        extractor = google.GoogleCodeProjectExtractor(
            'my-project', 'project_info')

        self.urlopen.assert_called_once_with(
            'http://code.google.com/p/my-project/')
        self.soup.assert_called_once_with(self.urlopen.return_value, convertEntities=self.soup.HTML_ENTITIES)
        self.assertEqual(extractor.page, self.soup.return_value)

    def test_get_page(self):
        extractor = google.GoogleCodeProjectExtractor(
            'my-project', 'project_info')
        self.assertEqual(1, self.urlopen.call_count)
        page = extractor.get_page('project_info')
        self.assertEqual(1, self.urlopen.call_count)
        self.assertEqual(
            page, extractor._page_cache['http://code.google.com/p/my-project/'])
        page = extractor.get_page('project_info')
        self.assertEqual(1, self.urlopen.call_count)
        self.assertEqual(
            page, extractor._page_cache['http://code.google.com/p/my-project/'])
        page = extractor.get_page('source_browse')
        self.assertEqual(2, self.urlopen.call_count)
        self.assertEqual(
            page, extractor._page_cache['http://code.google.com/p/my-project/source/browse/'])
        parser = mock.Mock(return_value='parsed')
        page = extractor.get_page('url', parser=parser)
        self.assertEqual(page, 'parsed')
        self.assertEqual(page, extractor._page_cache['url'])

    def test_get_page_url(self):
        extractor = google.GoogleCodeProjectExtractor('my-project')
        self.assertEqual(extractor.get_page_url('project_info'),
                         'http://code.google.com/p/my-project/')

    def test_get_page_url_hosted(self):
        extractor = google.GoogleCodeProjectExtractor('a/eclipselabs.org/p/restclient-tool')
        self.assertEqual(extractor.get_page_url('project_info'),
                         'http://code.google.com/a/eclipselabs.org/p/restclient-tool/')

    def test_get_short_description(self):
        extractor = google.GoogleCodeProjectExtractor(
            'my-project', 'project_info')
        extractor.page.find.return_value.text = 'My Super Project'

        extractor.get_short_description(self.project)

        extractor.page.find.assert_called_once_with(itemprop='description')
        self.assertEqual(self.project.short_description, 'My Super Project')

    @mock.patch.object(google, 'File')
    @mock.patch.object(google, 'M')
    def test_get_icon(self, M, File):
        File.return_value.type = 'image/png'
        File.return_value.file = 'data'
        extractor = google.GoogleCodeProjectExtractor(
            'my-project', 'project_info')
        extractor.page.find.return_value.get.return_value = 'http://example.com/foo/bar/my-logo.png'

        extractor.get_icon(self.project)

        extractor.page.find.assert_called_once_with(itemprop='image')
        File.assert_called_once_with(
            'http://example.com/foo/bar/my-logo.png', 'my-logo.png')
        M.ProjectFile.save_image.assert_called_once_with(
            'my-logo.png', 'data', 'image/png', square=True,
            thumbnail_size=(48, 48), thumbnail_meta={
                'project_id': self.project._id, 'category': 'icon'})

    @mock.patch.object(google, 'M')
    def test_get_license(self, M):
        self.project.trove_license = []
        extractor = google.GoogleCodeProjectExtractor(
            'my-project', 'project_info')
        extractor.page.find.return_value.findNext.return_value.find.return_value.text = '  New BSD License  '
        trove = M.TroveCategory.query.get.return_value

        extractor.get_license(self.project)

        extractor.page.find.assert_called_once_with(text='Code license')
        extractor.page.find.return_value.findNext.assert_called_once_with()
        extractor.page.find.return_value.findNext.return_value.find.assert_called_once_with(
            'a')
        self.assertEqual(self.project.trove_license, [trove._id])
        M.TroveCategory.query.get.assert_called_once_with(
            fullname='BSD License')

        M.TroveCategory.query.get.reset_mock()
        extractor.page.find.return_value.findNext.return_value.find.return_value.text = 'non-existant license'
        extractor.get_license(self.project)
        M.TroveCategory.query.get.assert_called_once_with(
            fullname='Other/Proprietary License')

    def _make_extractor(self, html):
        with mock.patch.object(base.ProjectExtractor, 'urlopen'):
            extractor = google.GoogleCodeProjectExtractor('allura-google-importer')
        extractor.page = BeautifulSoup(html)
        extractor.get_page = lambda pagename: extractor.page
        extractor.url = "http://test/source/browse"
        return extractor

    def test_get_repo_type_happy_path(self):
        extractor = self._make_extractor(
            '<span id="crumb_root">\nsvn/&nbsp;</span>')
        self.assertEqual('svn', extractor.get_repo_type())

    def test_get_repo_type_no_crumb_root(self):
        extractor = self._make_extractor('')
        with self.assertRaises(Exception) as cm:
            extractor.get_repo_type()
        self.assertEqual(str(cm.exception),
                         "Couldn't detect repo type: no #crumb_root in "
                         "http://test/source/browse")

    def test_get_repo_type_unknown_repo_type(self):
        extractor = self._make_extractor(
            '<span id="crumb_root">cvs</span>')
        with self.assertRaises(Exception) as cm:
            extractor.get_repo_type()
        self.assertEqual(str(cm.exception), "Unknown repo type: cvs")

    def test_empty_issue(self):
        empty_issue = open(pkg_resources.resource_filename(
            'forgeimporters', 'tests/data/google/empty-issue.html')).read()
        gpe = self._make_extractor(empty_issue)
        self.assertIsNone(gpe.get_issue_owner())
        self.assertEqual(gpe.get_issue_status(), '')
        self.assertEqual(gpe.get_issue_attachments(), [])
        self.assertEqual(list(gpe.iter_comments()), [])
        self.assertEqual(gpe.get_issue_mod_date(), 'Thu Aug  8 14:56:23 2013')

    @without_module('html2text')
    def test_get_issue_basic_fields(self):
        test_issue = open(pkg_resources.resource_filename(
            'forgeimporters', 'tests/data/google/test-issue.html')).read()
        gpe = self._make_extractor(test_issue)

        self.assertEqual(gpe.get_issue_creator().name, '*****@*****.**')
        self.assertEqual(gpe.get_issue_creator().url,
                         'http://code.google.com/u/101557263855536553789/')
        self.assertEqual(gpe.get_issue_owner().name, '*****@*****.**')
        self.assertEqual(gpe.get_issue_owner().url,
                         'http://code.google.com/u/101557263855536553789/')
        self.assertEqual(gpe.get_issue_status(), 'Started')
        self._p_soup.stop()
        self.assertEqual(gpe.get_issue_summary(), 'Test "Issue"')
        assert_equal(gpe.get_issue_description(),
                     'Test \\*Issue\\* for testing\n'
                     '\n'
                     '&nbsp; 1\\. Test List\n'
                     '&nbsp; 2\\. Item\n'
                     '\n'
                     '\\*\\*Testing\\*\\*\n'
                     '\n'
                     ' \\* Test list 2\n'
                     ' \\* Item\n'
                     '\n'
                     '\\# Test Section\n'
                     '\n'
                     '&nbsp;&nbsp;&nbsp; p = source\\.test\\_issue\\.post\\(\\)\n'
                     '&nbsp;&nbsp;&nbsp; p\\.count = p\\.count \\*5 \\#\\* 6\n'
                     '&nbsp;&nbsp;&nbsp; if p\\.count &gt; 5:\n'
                     '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
                     '\n'
                     'References: [issue 1](#1), [r2]\n'
                     '\n'
                     'That\'s all'
                     )
        self.assertEqual(gpe.get_issue_created_date(),
                         'Thu Aug  8 15:33:52 2013')
        self.assertEqual(gpe.get_issue_stars(), 1)

    @skipif(module_not_available('html2text'))
    def test_get_issue_basic_fields_html2text(self):
        test_issue = open(pkg_resources.resource_filename(
            'forgeimporters', 'tests/data/google/test-issue.html')).read()
        gpe = self._make_extractor(test_issue)
        self.assertEqual(gpe.get_issue_creator().name, '*****@*****.**')
        self.assertEqual(gpe.get_issue_creator().url,
                         'http://code.google.com/u/101557263855536553789/')
        self.assertEqual(gpe.get_issue_owner().name, '*****@*****.**')
        self.assertEqual(gpe.get_issue_owner().url,
                         'http://code.google.com/u/101557263855536553789/')
        self.assertEqual(gpe.get_issue_status(), 'Started')
        self._p_soup.stop()
        self.assertEqual(gpe.get_issue_summary(), 'Test "Issue"')
        assert_equal(gpe.get_issue_description(),
                     'Test \\*Issue\\* for testing\n'
                     '\n'
                     '&nbsp; 1. Test List\n'
                     '&nbsp; 2. Item\n'
                     '\n'
                     '\\*\\*Testing\\*\\*\n'
                     '\n'
                     ' \\* Test list 2\n'
                     ' \\* Item\n'
                     '\n'
                     '\\# Test Section\n'
                     '\n'
                     '&nbsp;&nbsp;&nbsp; p = source.test\\_issue.post\\(\\)\n'
                     '&nbsp;&nbsp;&nbsp; p.count = p.count \\*5 \\#\\* 6\n'
                     '&nbsp;&nbsp;&nbsp; if p.count &gt; 5:\n'
                     '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
                     '\n'
                     'References: [issue 1](#1), [r2]\n'
                     '\n'
                     'That\'s all'
                     )
        self.assertEqual(gpe.get_issue_created_date(),
                         'Thu Aug  8 15:33:52 2013')
        self.assertEqual(gpe.get_issue_stars(), 1)

    def test_get_issue_summary(self):
        html = u"""
        <div id="issueheader">
            <table>
                <tbody>
                    <tr>
                        <td></td>
                        <td><span>%s</span></td>
                    </tr>
                </tbody>
            </table>
        </div>
        """
        gpe = self._make_extractor(html % u'')
        self.assertEqual(gpe.get_issue_summary(), u'')
        gpe = self._make_extractor(html % u'My Summary')
        self.assertEqual(gpe.get_issue_summary(), u'My Summary')

    def test_get_issue_labels(self):
        test_issue = open(pkg_resources.resource_filename(
            'forgeimporters', 'tests/data/google/test-issue.html')).read()
        gpe = self._make_extractor(test_issue)
        self.assertEqual(gpe.get_issue_labels(), [
            'Type-Defect',
            'Priority-Medium',
            'Milestone-Release1.0',
            'OpSys-All',
            'Component-Logic',
            'Performance',
            'Security',
            'OpSys-Windows',
            'OpSys-OSX',
        ])

    @mock.patch.object(base, 'StringIO')
    def test_get_issue_attachments(self, StringIO):
        self.urlopen.return_value.info.return_value = {
            'content-type': 'text/plain; foo'}
        test_issue = open(pkg_resources.resource_filename(
            'forgeimporters', 'tests/data/google/test-issue.html')).read()
        gpe = self._make_extractor(test_issue)
        attachments = gpe.get_issue_attachments()
        self.assertEqual(len(attachments), 1)
        self.assertEqual(attachments[0].filename, 'at1.txt')
        self.assertEqual(attachments[0].url,
                         'http://allura-google-importer.googlecode.com/issues/attachment?aid=70000000&name=at1.txt'
                         '&token=3REU1M3JUUMt0rJUg7ldcELt6LA%3A1376059941255')
        self.assertEqual(attachments[0].type, 'text/plain')

    def test_get_issue_ids(self):
        extractor = google.GoogleCodeProjectExtractor(None)
        extractor.get_page = mock.Mock(side_effect=((1, 2, 3), (2, 3, 4), ()))
        self.assertItemsEqual(extractor.get_issue_ids(start=10), (1, 2, 3, 4))
        self.assertEqual(extractor.get_page.call_count, 3)
        extractor.get_page.assert_has_calls([
            mock.call('issues_csv', parser=google.csv_parser, start=10),
            mock.call('issues_csv', parser=google.csv_parser, start=110),
            mock.call('issues_csv', parser=google.csv_parser, start=210),
        ])

    @mock.patch.object(google.GoogleCodeProjectExtractor, 'get_page')
    @mock.patch.object(google.GoogleCodeProjectExtractor, 'get_issue_ids')
    def test_iter_issue_ids(self, get_issue_ids, get_page):
        get_issue_ids.side_effect = [set([1, 2]), set([2, 3, 4])]
        issue_ids = [i for i,
                     e in list(google.GoogleCodeProjectExtractor.iter_issues('foo'))]
        self.assertEqual(issue_ids, [1, 2, 3, 4])
        get_issue_ids.assert_has_calls([
            mock.call(start=0),
            mock.call(start=-8),
        ])

    @mock.patch.object(google.GoogleCodeProjectExtractor, '__init__')
    @mock.patch.object(google.GoogleCodeProjectExtractor, 'get_issue_ids')
    def test_iter_issue_ids_raises(self, get_issue_ids, __init__):
        get_issue_ids.side_effect = [set([1, 2, 3, 4, 5])]
        __init__.side_effect = [
            None,
            None,
            # should skip but keep going
            HTTPError('fourohfour', 404, 'fourohfour', {}, mock.Mock()),
            None,
            # should be re-raised
            HTTPError('fubar', 500, 'fubar', {}, mock.Mock()),
            None,
        ]
        issue_ids = []
        try:
            for issue_id, extractor in google.GoogleCodeProjectExtractor.iter_issues('foo'):
                issue_ids.append(issue_id)
        except HTTPError as e:
            self.assertEqual(e.code, 500)
        else:
            assert False, 'Missing expected raised exception'
        self.assertEqual(issue_ids, [1, 3])

    @mock.patch.object(google.requests, 'head')
    def test_check_readable(self, head):
        head.return_value.status_code = 200
        assert google.GoogleCodeProjectExtractor('my-project').check_readable()
        head.return_value.status_code = 404
        assert not google.GoogleCodeProjectExtractor('my-project').check_readable()
Beispiel #21
0
class TestMediawikiLoader(object):
    def setUp(self):
        setup_basic_test()
        self.options = mock.Mock()
        # need test project with installed wiki app
        self.options.nbhd = 'Adobe'
        self.options.project = '--init--'

        nbhd = M.Neighborhood.query.get(name=self.options.nbhd)
        h.set_context(self.options.project, 'wiki', neighborhood=nbhd)

        # monkey-patch MediawikiLoader for test
        def pages(self):
            yield 1
            yield 2

        def history(self, page_dir):
            data = {
                1: [
                    {
                        'title': 'Test title',
                        'text': "'''bold''' ''italics''",
                        'page_id': 1,
                        'timestamp': '20120808000001',
                        'username': '******'
                    },
                    {
                        'title': 'Test title',
                        'text': "'''bold'''",
                        'page_id': 1,
                        'timestamp': '20120809000001',
                        'username': '******'
                    },
                ],
                2: [
                    {
                        'title': 'Main',
                        'text': "Main text rev 1",
                        'page_id': 2,
                        'timestamp': '20120808000001',
                        'username': '******'
                    },
                    {
                        'title': 'Main',
                        'text': "Main text rev 2",
                        'page_id': 2,
                        'timestamp': '20120809000001',
                        'username': '******'
                    },
                ],
            }
            for page in data[page_dir]:
                yield page

        def talk(self, page_dir):
            data = {
                1: {
                    'text': "''Talk page'' for page 1.",
                    'username': '******',
                    'timestamp': '20120809000001'
                },
                2: {
                    'text': "''Talk page'' for page 2.",
                    'username': '******',
                    'timestamp': '20120809000001'
                },
            }
            return data[page_dir]

        def attachments(self, *args, **kwargs):
            # make 'empty' iterator
            if False:
                yield

        MediawikiLoader._pages = pages
        MediawikiLoader._history = history
        MediawikiLoader._talk = talk
        MediawikiLoader._attachments = attachments
        self.loader = MediawikiLoader(self.options)

    def get_page(self, title):
        return WM.Page.query.get(app_config_id=context.app.config._id,
                                 title=title)

    def get_post(self, title):
        page = self.get_page(title)
        thread = M.Thread.query.get(ref_id=page.index_id())
        return M.Post.query.get(discussion_id=thread.discussion_id,
                                thread_id=thread._id)

    @skipif(module_not_available('mediawiki'))
    @mock.patch('allura.model.discuss.g.director')
    def test_load_pages(self, director):
        """Test that pages, edit history and talk loaded properly"""
        self.loader.load_pages()
        page = self.get_page('Test title')

        assert page.mod_date == datetime.strptime('20120809000001',
                                                  self.loader.TIMESTAMP_FMT)
        assert page.authors()[0].username == 'test-user'
        assert '**bold**' in page.text
        # _italics should be only in the first revision of page
        assert '_italics_' not in page

        page = page.get_version(1)
        assert '**bold** _italics_' in page.text
        assert page.mod_date == datetime.strptime('20120808000001',
                                                  self.loader.TIMESTAMP_FMT)
        assert page.authors()[0].username == 'test-user'

        page = self.get_page('Main')
        assert page.mod_date == datetime.strptime('20120809000001',
                                                  self.loader.TIMESTAMP_FMT)
        assert page.authors()[0].username == '*anonymous'
        assert 'Main text rev 2' in page.text

        page = page.get_version(1)
        assert page.mod_date == datetime.strptime('20120808000001',
                                                  self.loader.TIMESTAMP_FMT)
        assert page.authors()[0].username == '*anonymous'
        assert 'Main text rev 1' in page.text

        # Check that talk pages loaded
        post = self.get_post('Test title')
        assert post.timestamp == datetime.strptime('20120809000001',
                                                   self.loader.TIMESTAMP_FMT)
        assert post.author().username == 'test-user'
        assert '_Talk page_ for page 1.' in post.text

        post = self.get_post('Main')
        assert post.timestamp == datetime.strptime('20120809000001',
                                                   self.loader.TIMESTAMP_FMT)
        assert post.author().username == '*anonymous'
        assert '_Talk page_ for page 2.' in post.text
Beispiel #22
0
class TestWithSetupForComments(TestCase):
    # The main test suite did too much patching for how we want these tests to work
    # These tests use iter_comments and 2 HTML pages of comments.

    def _create_extractor(self):
        test_issue = open(pkg_resources.resource_filename(
            'forgeimporters', 'tests/data/google/test-issue-first-page.html')).read()
        test_issue_older = open(pkg_resources.resource_filename(
            'forgeimporters', 'tests/data/google/test-issue-prev-page.html')).read()

        class LocalTestExtractor(google.GoogleCodeProjectExtractor):
            def urlopen(self, url, **kw):
                return self.urlopen_results.pop(0)

            def setup_urlopen_results(self, results):
                self.urlopen_results = results

        gpe = LocalTestExtractor('allura-google-importer')
        gpe.setup_urlopen_results([test_issue, test_issue_older])

        return gpe

    def test_get_issue_mod_date(self):
        gpe = self._create_extractor()
        gpe.get_page('detail?id=6')
        self.assertEqual(gpe.get_issue_mod_date(), 'Thu Aug  8 15:36:57 2013')

    @without_module('html2text')
    @mock.patch.object(base, 'StringIO')
    def test_iter_comments(self, StringIO):
        gpe = self._create_extractor()
        gpe.get_page('detail?id=6')

        with mock.patch.object(base.ProjectExtractor, 'urlopen'):  # for attachments, which end up using a different Extractor urlopen
            comments = list(gpe.iter_comments())

        self.assertEqual(len(comments), 6)
        expected = [
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:34:01 2013',
                'body': 'Simple comment',
                'updates': {},
                'attachments': [],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:34:09 2013',
                'body': 'Boring comment',
                'updates': {},
                'attachments': [],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:35:15 2013',
                'body': 'Test \\*comment\\* is a comment',
                'updates': {'Status:': 'Started', 'Labels:': '-OpSys-Linux OpSys-Windows'},
                'attachments': ['at2.txt'],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:35:34 2013',
                'body': 'Another comment with references: [issue 2](#2), [r1]',
                'updates': {},
                'attachments': [],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:36:39 2013',
                'body': 'Last comment',
                'updates': {},
                'attachments': ['at4.txt', 'at1.txt'],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:36:57 2013',
                'body': 'Oh, I forgot one \\(with an inter\\-project reference to '
                        '[issue other\\-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)',
                'updates': {'Labels:': 'OpSys-OSX'},
                'attachments': [],
            },
        ]
        for actual, expected in zip(comments, expected):
            self.assertEqual(actual.author.name, expected['author.name'])
            self.assertEqual(actual.author.url, expected['author.url'])
            self.assertEqual(actual.created_date, expected['created_date'])
            self.assertEqual(actual.body, expected['body'])
            self.assertEqual(actual.updates, expected['updates'])
            self.assertEqual(
                [a.filename for a in actual.attachments], expected['attachments'])

    @skipif(module_not_available('html2text'))
    @mock.patch.object(base, 'StringIO')
    def test_iter_comments_html2text(self, StringIO):
        gpe = self._create_extractor()
        gpe.get_page('detail?id=6')

        with mock.patch.object(base.ProjectExtractor, 'urlopen'):  # for attachments, which end up using a different Extractor urlopen
            comments = list(gpe.iter_comments())

        self.assertEqual(len(comments), 6)
        expected = [
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:34:01 2013',
                'body': 'Simple comment',
                'updates': {},
                'attachments': [],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:34:09 2013',
                'body': 'Boring comment',
                'updates': {},
                'attachments': [],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:35:15 2013',
                'body': 'Test \\*comment\\* is a comment',
                'updates': {'Status:': 'Started', 'Labels:': '-OpSys-Linux OpSys-Windows'},
                'attachments': ['at2.txt'],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:35:34 2013',
                'body': 'Another comment with references: [issue 2](#2), [r1]',
                'updates': {},
                'attachments': [],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:36:39 2013',
                'body': 'Last comment',
                'updates': {},
                'attachments': ['at4.txt', 'at1.txt'],
            },
            {
                'author.name': '*****@*****.**',
                'author.url': 'http://code.google.com/u/101557263855536553789/',
                'created_date': 'Thu Aug  8 15:36:57 2013',
                'body': 'Oh, I forgot one \\(with an inter-project reference to '
                        '[issue other-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)',
                'updates': {'Labels:': 'OpSys-OSX'},
                'attachments': [],
            },
        ]
        for actual, expected in zip(comments, expected):
            self.assertEqual(actual.author.name, expected['author.name'])
            self.assertEqual(actual.author.url, expected['author.url'])
            self.assertEqual(actual.created_date, expected['created_date'])
            self.assertEqual(actual.body, expected['body'])
            self.assertEqual(actual.updates, expected['updates'])
            self.assertEqual(
                [a.filename for a in actual.attachments], expected['attachments'])
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#       Unless required by applicable law or agreed to in writing,
#       software distributed under the License is distributed on an
#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#       KIND, either express or implied.  See the License for the
#       specific language governing permissions and limitations
#       under the License.

from IPython.testing.decorators import module_not_available, skipif

from forgewiki import converters


@skipif(module_not_available('mediawiki'))
def test_mediawiki2markdown():
    mediawiki_text = """
'''bold''' ''italics''
== Getting started ==
* [http://www.mediawiki.org/wiki/Manual:Configuration_settings Configuration]
* [http://www.mediawiki.org/wiki/Manual:FAQ MediaWiki FAQ]
<plugin>.plugin()
    """
    mediawiki_output = converters.mediawiki2markdown(mediawiki_text)
    assert "**bold** _italics_" in mediawiki_output
    assert "## Getting started" in mediawiki_output
    assert ("* [MediaWiki FAQ](http://www.mediawiki.org/wiki/Manual:FAQ)"
            in mediawiki_output)
    assert '&lt;plugin&gt;.plugin()' in mediawiki_output
Beispiel #24
0
class TestRootController(TestController):
    def setUp(self):
        super(TestRootController, self).setUp()
        n_adobe = M.Neighborhood.query.get(name='Adobe')
        assert n_adobe
        u_admin = M.User.query.get(username='******')
        assert u_admin
        n_adobe.register_project('adobe-2', u_admin)

    def test_index(self):
        response = self.app.get('/')
        assert_equal(
            response.html.find('h2', {
                'class': 'dark title'
            }).contents[0].strip(), 'All Neighborhoods')
        nbhds = response.html.findAll('td', {'class': 'nbhd'})
        assert nbhds[0].find('a').get('href') == '/adobe/'
        cat_links = response.html.find('div', {'id': 'sidebar'}).findAll('li')
        assert len(cat_links) == 4
        assert cat_links[0].find('a').get('href') == '/browse/clustering'
        assert cat_links[0].find('a').find('span').string == 'Clustering'

    def test_sidebar_escaping(self):
        # use this as a convenient way to get something in the sidebar
        M.ProjectCategory(name='test-xss', label='<script>alert(1)</script>')
        ThreadLocalORMSession.flush_all()

        response = self.app.get('/')
        # inject it into the sidebar data
        content = str(response.html.find('div', {'id': 'content_base'}))
        assert '<script>' not in content
        assert '&lt;script&gt;' in content

    def test_strange_accept_headers(self):
        hdrs = [
            'text/plain;text/html;text/*',
            'text/html,application/xhtml+xml,application/xml;q=0.9;text/plain;q=0.8,image/png,*/*;q=0.5'
        ]
        for hdr in hdrs:
            # malformed headers used to return 500, just make sure they don't
            # now
            self.app.get('/', headers=dict(Accept=hdr), validate_skip=True)

    def test_project_browse(self):
        com_cat = M.ProjectCategory.query.find(
            dict(label='Communications')).first()
        M.Project.query.find(
            dict(shortname='adobe-1')).first().category_id = com_cat._id
        response = self.app.get('/browse')
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-1/'})) == 1
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-2/'})) == 1
        response = self.app.get('/browse/communications')
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-1/'})) == 1
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-2/'})) == 0
        response = self.app.get('/browse/communications/fax')
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-1/'})) == 0
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-2/'})) == 0

    def test_neighborhood_home(self):
        setup_trove_categories()
        # Install home app
        nb = M.Neighborhood.query.get(name='Adobe')
        p = nb.neighborhood_project
        with push_config(c, user=M.User.query.get(username='******')):
            p.install_app('home', 'home', 'Home', ordinal=0)

        response = self.app.get('/adobe/')
        projects = response.html.findAll('div',
                                         {'class': 'list card proj_icon'})
        assert_equal(len(projects), 2)
        cat_links = response.html.find('div', {
            'id': 'sidebar'
        }).findAll('ul')[1].findAll('li')
        assert len(cat_links) == 3, cat_links
        assert cat_links[0].find('a').get('href') == '/adobe/browse/clustering'
        assert cat_links[0].find('a').find('span').string == 'Clustering'

    def test_neighborhood_project_browse(self):
        com_cat = M.ProjectCategory.query.find(
            dict(label='Communications')).first()
        fax_cat = M.ProjectCategory.query.find(dict(label='Fax')).first()
        M.Project.query.find(
            dict(shortname='adobe-1')).first().category_id = com_cat._id
        M.Project.query.find(
            dict(shortname='adobe-2')).first().category_id = fax_cat._id
        response = self.app.get('/adobe/browse')
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-1/'})) == 1
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-2/'})) == 1
        response = self.app.get('/adobe/browse/communications')
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-1/'})) == 1
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-2/'})) == 1
        response = self.app.get('/adobe/browse/communications/fax')
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-1/'})) == 0
        assert len(response.html.findAll('a',
                                         {'href': '/adobe/adobe-2/'})) == 1

    @td.with_wiki
    def test_markdown_to_html(self):
        n = M.Neighborhood.query.get(name='Projects')
        r = self.app.get(
            '/nf/markdown_to_html?markdown=*aaa*bb[wiki:Home]&project=test&app=bugs&neighborhood=%s'
            % n._id,
            validate_chunk=True)
        assert '<p><em>aaa</em>bb<a class="alink" href="/p/test/wiki/Home/">[wiki:Home]</a></p>' in r, r

    def test_slash_redirect(self):
        self.app.get('/p', status=301)
        self.app.get('/p/', status=302)

    @skipif(module_not_available('newrelic'))
    def test_newrelic_set_transaction_name(self):
        from allura.controllers.project import NeighborhoodController
        with mock.patch('newrelic.agent.callable_name') as callable_name,\
                mock.patch('newrelic.agent.set_transaction_name') as set_transaction_name:
            callable_name.return_value = 'foo'
            self.app.get('/p/')
            arg = callable_name.call_args[0][0]
            assert_equal(arg.undecorated,
                         NeighborhoodController.index.undecorated)
            set_transaction_name.assert_called_with('foo')
Beispiel #25
0
def test_split_select_field_options():
    assert_equals(h.split_select_field_options('"test message" test2'),
                  ['test message', 'test2'])
    assert_equals(h.split_select_field_options('"test message test2'),
                  ['test', 'message', 'test2'])


def test_notifications_disabled():
    project = Mock(notifications_disabled=False)
    with h.notifications_disabled(project):
        assert_equals(project.notifications_disabled, True)
    assert_equals(project.notifications_disabled, False)


@skipif(module_not_available('html2text'))
def test_plain2markdown_with_html2text():
    """Test plain2markdown using html2text to escape markdown, if available."""
    text = '''paragraph

    4 spaces before this

    *blah*

here's a <tag> that should be <b>preserved</b>
Literal &gt; &Ograve; &frac14; &amp; &#38; &#x123F;
M & Ms - doesn't get escaped
http://blah.com/?x=y&a=b - not escaped either
'''

    expected = '''paragraph
Beispiel #26
0
from IPython.testing.decorators import module_not_available, skipif

from forgewiki import converters


@skipif(module_not_available("mediawiki"))
def test_mediawiki2markdown():
    mediawiki_text = """
'''bold''' ''italics''
== Getting started ==
* [http://www.mediawiki.org/wiki/Manual:Configuration_settings Configuration]
* [http://www.mediawiki.org/wiki/Manual:FAQ MediaWiki FAQ]
<plugin>.plugin()
    """
    mediawiki_output = converters.mediawiki2markdown(mediawiki_text)
    assert "**bold** _italics_" in mediawiki_output
    assert "## Getting started" in mediawiki_output
    assert "* [MediaWiki FAQ](http://www.mediawiki.org/wiki/Manual:FAQ)" in mediawiki_output
    assert "&lt;plugin&gt;.plugin()" in mediawiki_output


def test_convert_toc():
    """Test that Table of Contents (TOC) converts properly"""
    wiki_html = """<div>Some html before toc</div>
<div id="toc">
    Table of Contents
    <ul>
        <li><a href="#h1">Some heading</a></li>
    </ul>
</div>
<div>Some html after toc</div>
Beispiel #27
0
class TestSession(SessionTestCase):
    def test_msg(self):
        """message format"""
        msg = self.session.msg('execute')
        thekeys = set(
            'header parent_header metadata content msg_type msg_id'.split())
        s = set(msg.keys())
        self.assertEqual(s, thekeys)
        self.assertTrue(isinstance(msg['content'], dict))
        self.assertTrue(isinstance(msg['metadata'], dict))
        self.assertTrue(isinstance(msg['header'], dict))
        self.assertTrue(isinstance(msg['parent_header'], dict))
        self.assertTrue(isinstance(msg['msg_id'], str))
        self.assertTrue(isinstance(msg['msg_type'], str))
        self.assertEqual(msg['header']['msg_type'], 'execute')
        self.assertEqual(msg['msg_type'], 'execute')

    def test_serialize(self):
        msg = self.session.msg('execute', content=dict(a=10, b=1.1))
        msg_list = self.session.serialize(msg, ident=b'foo')
        ident, msg_list = self.session.feed_identities(msg_list)
        new_msg = self.session.deserialize(msg_list)
        self.assertEqual(ident[0], b'foo')
        self.assertEqual(new_msg['msg_id'], msg['msg_id'])
        self.assertEqual(new_msg['msg_type'], msg['msg_type'])
        self.assertEqual(new_msg['header'], msg['header'])
        self.assertEqual(new_msg['content'], msg['content'])
        self.assertEqual(new_msg['parent_header'], msg['parent_header'])
        self.assertEqual(new_msg['metadata'], msg['metadata'])
        # ensure floats don't come out as Decimal:
        self.assertEqual(type(new_msg['content']['b']),
                         type(new_msg['content']['b']))

    def test_default_secure(self):
        self.assertIsInstance(self.session.key, bytes)
        self.assertIsInstance(self.session.auth, hmac.HMAC)

    def test_send(self):
        ctx = zmq.Context.instance()
        A = ctx.socket(zmq.PAIR)
        B = ctx.socket(zmq.PAIR)
        A.bind("inproc://test")
        B.connect("inproc://test")

        msg = self.session.msg('execute', content=dict(a=10))
        self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])

        ident, msg_list = self.session.feed_identities(B.recv_multipart())
        new_msg = self.session.deserialize(msg_list)
        self.assertEqual(ident[0], b'foo')
        self.assertEqual(new_msg['msg_id'], msg['msg_id'])
        self.assertEqual(new_msg['msg_type'], msg['msg_type'])
        self.assertEqual(new_msg['header'], msg['header'])
        self.assertEqual(new_msg['content'], msg['content'])
        self.assertEqual(new_msg['parent_header'], msg['parent_header'])
        self.assertEqual(new_msg['metadata'], msg['metadata'])
        self.assertEqual(new_msg['buffers'], [b'bar'])

        content = msg['content']
        header = msg['header']
        header['date'] = datetime.now()
        parent = msg['parent_header']
        metadata = msg['metadata']
        msg_type = header['msg_type']
        self.session.send(A,
                          None,
                          content=content,
                          parent=parent,
                          header=header,
                          metadata=metadata,
                          ident=b'foo',
                          buffers=[b'bar'])
        ident, msg_list = self.session.feed_identities(B.recv_multipart())
        new_msg = self.session.deserialize(msg_list)
        self.assertEqual(ident[0], b'foo')
        self.assertEqual(new_msg['msg_id'], msg['msg_id'])
        self.assertEqual(new_msg['msg_type'], msg['msg_type'])
        self.assertEqual(new_msg['header'], msg['header'])
        self.assertEqual(new_msg['content'], msg['content'])
        self.assertEqual(new_msg['metadata'], msg['metadata'])
        self.assertEqual(new_msg['parent_header'], msg['parent_header'])
        self.assertEqual(new_msg['buffers'], [b'bar'])

        header['date'] = datetime.now()

        self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
        ident, new_msg = self.session.recv(B)
        self.assertEqual(ident[0], b'foo')
        self.assertEqual(new_msg['msg_id'], msg['msg_id'])
        self.assertEqual(new_msg['msg_type'], msg['msg_type'])
        self.assertEqual(new_msg['header'], msg['header'])
        self.assertEqual(new_msg['content'], msg['content'])
        self.assertEqual(new_msg['metadata'], msg['metadata'])
        self.assertEqual(new_msg['parent_header'], msg['parent_header'])
        self.assertEqual(new_msg['buffers'], [b'bar'])

        A.close()
        B.close()
        ctx.term()

    def test_args(self):
        """initialization arguments for Session"""
        s = self.session
        self.assertTrue(s.pack is ss.default_packer)
        self.assertTrue(s.unpack is ss.default_unpacker)
        self.assertEqual(s.username, os.environ.get('USER', u'username'))

        s = ss.Session()
        self.assertEqual(s.username, os.environ.get('USER', u'username'))

        self.assertRaises(TypeError, ss.Session, pack='hi')
        self.assertRaises(TypeError, ss.Session, unpack='hi')
        u = str(uuid.uuid4())
        s = ss.Session(username=u'carrot', session=u)
        self.assertEqual(s.session, u)
        self.assertEqual(s.username, u'carrot')

    def test_tracking(self):
        """test tracking messages"""
        a, b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
        s = self.session
        s.copy_threshold = 1
        stream = ZMQStream(a)
        msg = s.send(a, 'hello', track=False)
        self.assertTrue(msg['tracker'] is ss.DONE)
        msg = s.send(a, 'hello', track=True)
        self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
        M = zmq.Message(b'hi there', track=True)
        msg = s.send(a, 'hello', buffers=[M], track=True)
        t = msg['tracker']
        self.assertTrue(isinstance(t, zmq.MessageTracker))
        self.assertRaises(zmq.NotDone, t.wait, .1)
        del M
        t.wait(1)  # this will raise

    def test_unique_msg_ids(self):
        """test that messages receive unique ids"""
        ids = set()
        for i in range(2**12):
            h = self.session.msg_header('test')
            msg_id = h['msg_id']
            self.assertTrue(msg_id not in ids)
            ids.add(msg_id)

    def test_feed_identities(self):
        """scrub the front for zmq IDENTITIES"""
        theids = "engine client other".split()
        content = dict(code='whoda', stuff=object())
        themsg = self.session.msg('execute', content=content)
        pmsg = theids

    def test_session_id(self):
        session = ss.Session()
        # get bs before us
        bs = session.bsession
        us = session.session
        self.assertEqual(us.encode('ascii'), bs)
        session = ss.Session()
        # get us before bs
        us = session.session
        bs = session.bsession
        self.assertEqual(us.encode('ascii'), bs)
        # change propagates:
        session.session = 'something else'
        bs = session.bsession
        us = session.session
        self.assertEqual(us.encode('ascii'), bs)
        session = ss.Session(session='stuff')
        # get us before bs
        self.assertEqual(session.bsession, session.session.encode('ascii'))
        self.assertEqual(b'stuff', session.bsession)

    def test_zero_digest_history(self):
        session = ss.Session(digest_history_size=0)
        for i in range(11):
            session._add_digest(uuid.uuid4().bytes)
        self.assertEqual(len(session.digest_history), 0)

    def test_cull_digest_history(self):
        session = ss.Session(digest_history_size=100)
        for i in range(100):
            session._add_digest(uuid.uuid4().bytes)
        self.assertTrue(len(session.digest_history) == 100)
        session._add_digest(uuid.uuid4().bytes)
        self.assertTrue(len(session.digest_history) == 91)
        for i in range(9):
            session._add_digest(uuid.uuid4().bytes)
        self.assertTrue(len(session.digest_history) == 100)
        session._add_digest(uuid.uuid4().bytes)
        self.assertTrue(len(session.digest_history) == 91)

    def test_bad_pack(self):
        try:
            session = ss.Session(pack=_bad_packer)
        except ValueError as e:
            self.assertIn("could not serialize", str(e))
            self.assertIn("don't work", str(e))
        else:
            self.fail("Should have raised ValueError")

    def test_bad_unpack(self):
        try:
            session = ss.Session(unpack=_bad_unpacker)
        except ValueError as e:
            self.assertIn("could not handle output", str(e))
            self.assertIn("don't work either", str(e))
        else:
            self.fail("Should have raised ValueError")

    def test_bad_packer(self):
        try:
            session = ss.Session(packer=__name__ + '._bad_packer')
        except ValueError as e:
            self.assertIn("could not serialize", str(e))
            self.assertIn("don't work", str(e))
        else:
            self.fail("Should have raised ValueError")

    def test_bad_unpacker(self):
        try:
            session = ss.Session(unpacker=__name__ + '._bad_unpacker')
        except ValueError as e:
            self.assertIn("could not handle output", str(e))
            self.assertIn("don't work either", str(e))
        else:
            self.fail("Should have raised ValueError")

    def test_bad_roundtrip(self):
        with self.assertRaises(ValueError):
            session = ss.Session(unpack=lambda b: 5)

    def _datetime_test(self, session):
        content = dict(t=datetime.now())
        metadata = dict(t=datetime.now())
        p = session.msg('msg')
        msg = session.msg('msg',
                          content=content,
                          metadata=metadata,
                          parent=p['header'])
        smsg = session.serialize(msg)
        msg2 = session.deserialize(session.feed_identities(smsg)[1])
        assert isinstance(msg2['header']['date'], datetime)
        self.assertEqual(msg['header'], msg2['header'])
        self.assertEqual(msg['parent_header'], msg2['parent_header'])
        self.assertEqual(msg['parent_header'], msg2['parent_header'])
        assert isinstance(msg['content']['t'], datetime)
        assert isinstance(msg['metadata']['t'], datetime)
        assert isinstance(msg2['content']['t'], string_types)
        assert isinstance(msg2['metadata']['t'], string_types)
        self.assertEqual(msg['content'],
                         jsonutil.extract_dates(msg2['content']))
        self.assertEqual(msg['content'],
                         jsonutil.extract_dates(msg2['content']))

    def test_datetimes(self):
        self._datetime_test(self.session)

    def test_datetimes_pickle(self):
        session = ss.Session(packer='pickle')
        self._datetime_test(session)

    @skipif(module_not_available('msgpack'))
    def test_datetimes_msgpack(self):
        import msgpack

        session = ss.Session(
            pack=msgpack.packb,
            unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
        )
        self._datetime_test(session)

    def test_send_raw(self):
        ctx = zmq.Context.instance()
        A = ctx.socket(zmq.PAIR)
        B = ctx.socket(zmq.PAIR)
        A.bind("inproc://test")
        B.connect("inproc://test")

        msg = self.session.msg('execute', content=dict(a=10))
        msg_list = [
            self.session.pack(msg[part])
            for part in ['header', 'parent_header', 'metadata', 'content']
        ]
        self.session.send_raw(A, msg_list, ident=b'foo')

        ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
        new_msg = self.session.deserialize(new_msg_list)
        self.assertEqual(ident[0], b'foo')
        self.assertEqual(new_msg['msg_type'], msg['msg_type'])
        self.assertEqual(new_msg['header'], msg['header'])
        self.assertEqual(new_msg['parent_header'], msg['parent_header'])
        self.assertEqual(new_msg['content'], msg['content'])
        self.assertEqual(new_msg['metadata'], msg['metadata'])

        A.close()
        B.close()
        ctx.term()
        )
        entry.update(e)
        entry["updated_parsed"] = entry["updated"].timetuple()
        if "content" in entry:
            entry["content"] = [attrdict(type=entry["content_type"], value=entry["content"])]
        if "summary_detail" in entry:
            entry["summary_detail"] = attrdict(entry["summary_detail"])
        feed.entries.append(entry)

    return feed


_mock_feed.i = 0


@skipif(module_not_available("html2text"))
@mock.patch.object(feedparser, "parse")
def test_pull_rss_feeds(parsefeed):
    html_content = (
        "<p>1. foo</p>\n"
        "\n"
        "<p>\n"
        "#foo bar <a href='baz'>baz</a>\n"
        "foo bar\n"
        "</p>\n"
        "\n"
        "<p>#foo bar <a href='baz'>\n"
        "baz\n"
        "</a></p>\n"
    )