def news(request, source): if request.method == 'GET': finder = FeedFinder() try: feeds = finder.populate_feeds(source) except (SPARQLQueryProcessorError, SPARQLQueryBuilderError), e: return render_to_response('debian/error.html', {'reason': e}) replydata = {'source': source, 'feeds': feeds} return render_to_response('debian/news.html', replydata)
def setUp(self): self.finder = FeedFinder() self.mox = Mox() debian.services.RES_BASEURI = "base"
class FeedFinderTest(unittest.TestCase): def setUp(self): self.finder = FeedFinder() self.mox = Mox() debian.services.RES_BASEURI = "base" def test__fetch_feeduris(self): unversionedsourceuri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(unversionedsourceuri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {'feeduri': {'value': "feed1"}} binding2 = {'feeduri': {'value': "feed2"}} bindings = [binding1, binding2] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults feeds = self.finder._fetch_feeduris(unversionedsourceuri) self.mox.VerifyAll() self.assertEqual(2, len(feeds)) self.assertEqual("feed1", feeds[0].feeduri) self.assertEqual("feed2", feeds[1].feeduri) # This cannot never happen, homepages without alternatives # won't match SPARQL patterns and won't be in the result set def test__fetch_feeduris_no_feeduris_in_bindings(self): unversionedsourceuri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(unversionedsourceuri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {} binding2 = {} bindings = [binding1, binding2] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults feeds = self.finder._fetch_feeduris(unversionedsourceuri) self.mox.VerifyAll() self.assertEqual(0, len(feeds)) def test__fetch_feeduris_no_bindings(self): unversionedsourceuri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(unversionedsourceuri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) bindings = [] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults feeds = self.finder._fetch_feeduris(unversionedsourceuri) self.mox.VerifyAll() self.assertEqual(0, len(feeds)) def test__fetch_feeditems(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {'title': {'value': "title1"}, 'link': {'value': "link1"}} binding2 = {'title': {'value': "title2"}} bindings = [binding1, binding2] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults items = self.finder._fetch_feeditems(feeduri) self.mox.VerifyAll() self.assertEqual(2, len(items)) self.assertEqual("title1", items[0]['title']) self.assertEqual("link1", items[0]['link']) self.assertEqual("title2", items[1]['title']) self.assertEqual(None, items[1]['link']) def test__fetch_feeditems_no_bindings(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) bindings = [] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults items = self.finder._fetch_feeditems(feeduri) self.mox.VerifyAll() self.assertEqual(0, len(items)) def test__fill_feeds(self): input = [RSSFeed("uri1"), RSSFeed("uri2")] self.mox.StubOutWithMock(self.finder, "_fetch_feeditems") self.mox.StubOutWithMock(self.finder, "_fetch_feed_channel_information") uri1items = [] uri2items = [{'title': "title21"}, {'title': "title22"}] uri1channel = {'title': "title1"} uri2channel = {'title': "title2"} self.finder._fetch_feed_channel_information("uri1").AndReturn(uri1channel) self.finder._fetch_feeditems("uri1").AndReturn(uri1items) self.finder._fetch_feed_channel_information("uri2").AndReturn(uri2channel) self.finder._fetch_feeditems("uri2").AndReturn(uri2items) self.mox.ReplayAll() feeds = self.finder._fill_feeds(input) self.assertEqual("uri1", feeds[0].feeduri) self.assertEqual("uri2", feeds[1].feeduri) self.assertEqual([], feeds[0].items) self.assertEqual(2, len(feeds[1].items)) self.assertEqual("title21", feeds[1].items[0]['title']) self.assertEqual("title22", feeds[1].items[1]['title']) self.assertEqual("title1", feeds[0].channel['title']) self.assertEqual("title2", feeds[1].channel['title']) def test__fetch_feed_channel_information(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {'title': {'value': "title1"}} bindings = [binding1] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults channel = self.finder._fetch_feed_channel_information(feeduri) self.mox.VerifyAll() self.assertEqual("title1", channel['title']) def test__fetch_feed_channel_information_channel_but_no_title(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {} bindings = [binding1] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults channel = self.finder._fetch_feed_channel_information(feeduri) self.mox.VerifyAll() self.assertEqual(None, channel['title']) def test__fetch_feed_channel_information_no_channel(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) bindings = [] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults channel = self.finder._fetch_feed_channel_information(feeduri) self.mox.VerifyAll() self.assertEqual(None, channel) def test_populate_feeds_forbidden_characters(self): self.assertRaises(SPARQLQueryBuilderPackageNameSchemeError, self.finder.populate_feeds, "{}@") def test_populate_feeds_escape(self): srcpkgname = "source.+-" srcpkguri = "base/source/%s" % "source.%2B-" self.mox.StubOutWithMock(self.finder, "_fetch_feeduris") self.finder._fetch_feeduris(srcpkguri).AndReturn([]) self.mox.StubOutWithMock(self.finder, "_fill_feeds") self.finder._fill_feeds([]).AndReturn([]) self.mox.ReplayAll() data = self.finder.populate_feeds(srcpkgname) self.mox.VerifyAll() self.assertEqual(0, len(data))
processor.execute_sanitized_query(query) except SPARQLQueryProcessorError, e: return render_to_response('debian/error.html', {'reason': e}) if builder.source_search(): results = processor.format_source_results() elif builder.binary_search(): results = processor.format_binary_results() else: raise UnexpectedSituationError() sourcenames = [x.sourcename for x in results] if builder.binary_search(): sourcenames = remove_duplicates(sourcenames, lambda x: x) finder = FeedFinder() aggregated_feeds = [] for sourcename in sourcenames: aggregated_feeds.extend(finder.populate_feeds(sourcename)) replydata = {'source': '', 'feeds': aggregated_feeds} return render_to_response('debian/news.html', replydata) else: return HttpResponse("405 - Method not allowed", status=405) def source_detail(request, source, version): if request.method == 'GET': builder = SPARQLQueryBuilder() try: query = builder.create_binaries_query(source, version)
class FeedFinderTest(unittest.TestCase): def setUp(self): self.finder = FeedFinder() self.mox = Mox() debian.services.RES_BASEURI = "base" def test__fetch_feeduris(self): unversionedsourceuri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(unversionedsourceuri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {'feeduri': {'value': "feed1"}} binding2 = {'feeduri': {'value': "feed2"}} bindings = [binding1, binding2] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults feeds = self.finder._fetch_feeduris(unversionedsourceuri) self.mox.VerifyAll() self.assertEqual(2, len(feeds)) self.assertEqual("feed1", feeds[0].feeduri) self.assertEqual("feed2", feeds[1].feeduri) # This cannot never happen, homepages without alternatives # won't match SPARQL patterns and won't be in the result set def test__fetch_feeduris_no_feeduris_in_bindings(self): unversionedsourceuri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(unversionedsourceuri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {} binding2 = {} bindings = [binding1, binding2] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults feeds = self.finder._fetch_feeduris(unversionedsourceuri) self.mox.VerifyAll() self.assertEqual(0, len(feeds)) def test__fetch_feeduris_no_bindings(self): unversionedsourceuri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(unversionedsourceuri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) bindings = [] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults feeds = self.finder._fetch_feeduris(unversionedsourceuri) self.mox.VerifyAll() self.assertEqual(0, len(feeds)) def test__fetch_feeditems(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {'title': {'value': "title1"}, 'link': {'value': "link1"}} binding2 = {'title': {'value': "title2"}} bindings = [binding1, binding2] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults items = self.finder._fetch_feeditems(feeduri) self.mox.VerifyAll() self.assertEqual(2, len(items)) self.assertEqual("title1", items[0]['title']) self.assertEqual("link1", items[0]['link']) self.assertEqual("title2", items[1]['title']) self.assertEqual(None, items[1]['link']) def test__fetch_feeditems_no_bindings(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) bindings = [] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults items = self.finder._fetch_feeditems(feeduri) self.mox.VerifyAll() self.assertEqual(0, len(items)) def test__fill_feeds(self): input = [RSSFeed("uri1"), RSSFeed("uri2")] self.mox.StubOutWithMock(self.finder, "_fetch_feeditems") self.mox.StubOutWithMock(self.finder, "_fetch_feed_channel_information") uri1items = [] uri2items = [{'title': "title21"}, {'title': "title22"}] uri1channel = {'title': "title1"} uri2channel = {'title': "title2"} self.finder._fetch_feed_channel_information("uri1").AndReturn( uri1channel) self.finder._fetch_feeditems("uri1").AndReturn(uri1items) self.finder._fetch_feed_channel_information("uri2").AndReturn( uri2channel) self.finder._fetch_feeditems("uri2").AndReturn(uri2items) self.mox.ReplayAll() feeds = self.finder._fill_feeds(input) self.assertEqual("uri1", feeds[0].feeduri) self.assertEqual("uri2", feeds[1].feeduri) self.assertEqual([], feeds[0].items) self.assertEqual(2, len(feeds[1].items)) self.assertEqual("title21", feeds[1].items[0]['title']) self.assertEqual("title22", feeds[1].items[1]['title']) self.assertEqual("title1", feeds[0].channel['title']) self.assertEqual("title2", feeds[1].channel['title']) def test__fetch_feed_channel_information(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {'title': {'value': "title1"}} bindings = [binding1] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults channel = self.finder._fetch_feed_channel_information(feeduri) self.mox.VerifyAll() self.assertEqual("title1", channel['title']) def test__fetch_feed_channel_information_channel_but_no_title(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) binding1 = {} bindings = [binding1] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults channel = self.finder._fetch_feed_channel_information(feeduri) self.mox.VerifyAll() self.assertEqual(None, channel['title']) def test__fetch_feed_channel_information_no_channel(self): feeduri = "http://example.org/p" mock = self.mox.CreateMock(SPARQLQueryProcessor) expectedarg = r"SELECT.+\<%s\>.+" % re.escape(feeduri) mock.execute_query(Regex(expectedarg, flags=re.DOTALL)) bindings = [] fakeresults = {'results': {'bindings': bindings}} self.finder.processor = mock self.mox.ReplayAll() self.finder.processor.results = fakeresults channel = self.finder._fetch_feed_channel_information(feeduri) self.mox.VerifyAll() self.assertEqual(None, channel) def test_populate_feeds_forbidden_characters(self): self.assertRaises(SPARQLQueryBuilderPackageNameSchemeError, self.finder.populate_feeds, "{}@") def test_populate_feeds_escape(self): srcpkgname = "source.+-" srcpkguri = "base/source/%s" % "source.%2B-" self.mox.StubOutWithMock(self.finder, "_fetch_feeduris") self.finder._fetch_feeduris(srcpkguri).AndReturn([]) self.mox.StubOutWithMock(self.finder, "_fill_feeds") self.finder._fill_feeds([]).AndReturn([]) self.mox.ReplayAll() data = self.finder.populate_feeds(srcpkgname) self.mox.VerifyAll() self.assertEqual(0, len(data))
processor.execute_sanitized_query(query) except SPARQLQueryProcessorError, e: return render_to_response('debian/error.html', {'reason': e}) if builder.source_search(): results = processor.format_source_results() elif builder.binary_search(): results = processor.format_binary_results() else: raise UnexpectedSituationError() sourcenames = [x.sourcename for x in results] if builder.binary_search(): sourcenames = remove_duplicates(sourcenames, lambda x: x) finder = FeedFinder() aggregated_feeds = [] for sourcename in sourcenames: aggregated_feeds.extend(finder.populate_feeds(sourcename)) replydata = {'source': '', 'feeds': aggregated_feeds} return render_to_response('debian/news.html', replydata) else: return HttpResponse("405 - Method not allowed", status=405) def source_detail(request, source, version): if request.method == 'GET': builder = SPARQLQueryBuilder() try: query = builder.create_binaries_query(source, version) except SPARQLQueryBuilderError, e: