def test_multifile_paste(self): badPastes = [ pastebin.BadPaste(u'testbadpb', u'first'), pastebin.BadPaste(u'testbadpb', u'second'), ] expectedRepastedUrl = u'https://paste.example.com/outputid' self.fakeContentFromPaste.reset([ b"first's content\n", b"second's content\n", ]) self.fakeCreatePaste.reset([expectedRepastedUrl]) self.repaster._cache._now = lambda: 1 repastedUrl = yield self.repaster.repaste(badPastes) self.assertEqual(repastedUrl, expectedRepastedUrl) expectedPastedContent = (b'### testbadpb::first.py\n' b"first's content\n" b'\n' b'### testbadpb::second.py\n' b"second's content\n") self.assertEqual( self.fakeContentFromPaste.calls, [sp.Call(bp) for bp in badPastes], ) self.assertEqual( self.fakeCreatePaste.calls, [sp.Call(expectedPastedContent, u'multi')], )
def test_multiple(self): self.assertResults( b' and '.join([ b'https://pastebin.com/pwZA', b'http://pastebin.com/RwZA', b'hastebin.com/aasdfkjgog', ]), [ pastebin.BadPaste(u'pastebin.com', u'pwZA'), pastebin.BadPaste(u'pastebin.com', u'RwZA'), pastebin.BadPaste(u'hastebin.com', u'aasdfkjgog'), ])
def test_dedupe(self): self.assertResults( b' '.join([ b'http://pastebin.com/Same', b'https://pastebin.com/Same', b'pastebin.com/Same', b'http://www.pastebin.com/Same', b'https://www.pastebin.com/Same', b'www.pastebin.com/Same', ]), [ pastebin.BadPaste(u'pastebin.com', u'Same'), ])
def test_identify_delegates(self): idFinder = sp.SequentialReturner([u'1234']) bp = self.makeOne(idFinder) result = bp.identifyPaste( u'paste.example.com', u'/id/path', u'', u'', u'', ) self.assertEqual( result, pastebin.BadPaste(u'paste.example.com', u'1234'), ) expectedCall = sp.Call(u'/id/path') self.assertEqual(idFinder.calls, [expectedCall])
def test_cache_lru(self): # max cache size is 10 badPasteIds = [unicode(n) for n in range(1, 11 + 1)] badPastes = [pastebin.BadPaste(u'testbadpb', id) for id in badPasteIds] badPasteContents = tuple([b'' for _ in badPasteIds]) badPasteRepastedUrls = tuple([ u'https://paste.example.com/repasted{0}'.format(id) for id in badPasteIds ]) self.fakeCreatePaste.reset(badPasteRepastedUrls) self.fakeContentFromPaste.reset(badPasteContents) self.repaster._cache._now = lambda: 1 # Don't do the last one yet for bp, expUrl in zip(badPastes[:-1], badPasteRepastedUrls): resUrl = yield self.repaster.repaste([bp]) self.assertEqual(resUrl, expUrl) # Now the cache should be full. self.assertEqual(len(self.repaster._cache), 10) self.repaster._cache._now = lambda: 20 # Cache hit on all but the first badPaste should make # it be the one removed later on add. for bp, expUrl in zip(badPastes[1:-1], badPasteRepastedUrls[1:]): resUrl = yield self.repaster.repaste([bp]) self.assertEqual(resUrl, expUrl) self.assertEqual(len(self.repaster._cache), 10) # still 10 # First badPaste still in the cache self.assertIn(badPastes[0].identity, self.repaster._cache) # Make a new one get added to the cache... resUrl = yield self.repaster.repaste([badPastes[-1]]) self.assertEqual(resUrl, badPasteRepastedUrls[-1]) self.assertEqual(len(self.repaster._cache), 10) # still 10 # ...and verify the first badPaste was dropped from the cache self.assertNotIn(badPastes[0].identity, self.repaster._cache)
def test_basic_cacheing(self): badPaste = pastebin.BadPaste(u'testbadpb', u'allgood') expectedRepastedUrl = u'https://paste.example.com/outputid' self.fakeContentFromPaste.reset([b'testing testing']) self.fakeCreatePaste.reset([expectedRepastedUrl]) self.repaster._cache._now = lambda: 1 repastedUrl = yield self.repaster.repaste([badPaste]) self.assertEqual( self.fakeContentFromPaste.calls, [sp.Call(badPaste)], ) self.assertEqual( self.fakeCreatePaste.calls, [sp.Call(b'testing testing', u'python')], ) self.assertEqual(repastedUrl, expectedRepastedUrl) # Now make sure these aren't called, only the cache is involved. self.fakeContentFromPaste.reset([]) self.fakeCreatePaste.reset([]) self.repaster._cache._now = lambda: 2 repastedUrl = yield self.repaster.repaste([badPaste]) self.assertEqual(self.fakeContentFromPaste.calls, []) self.assertEqual(self.fakeCreatePaste.calls, []) # Too soon, so should be None. self.assertIsNone(repastedUrl) self.repaster._cache._now = lambda: 20 repastedUrl = yield self.repaster.repaste([badPaste]) self.assertEqual(self.fakeContentFromPaste.calls, []) self.assertEqual(self.fakeCreatePaste.calls, []) # After enough of a delay, the url comes through. self.assertEqual(repastedUrl, expectedRepastedUrl)
def test_raw_url_paths(self, domain, path, pasteid): url = u'https://{0}{1}'.format(domain, path) expected = pastebin.BadPaste(domain, pasteid) self.assertResults(url.encode('ascii'), [expected])
def test_extra_netloc_crap(self, message): self.assertResults(message, [pastebin.BadPaste(u'pastebin.com', u'idid')])
def test_scheme_optional(self, message, domain, pasteid): expected = pastebin.BadPaste(domain, pasteid) self.assertResults(message, [expected])
def test_contentFromPaste_rejects_foreign_badPaste(self): foreignBadPaste = pastebin.BadPaste(u'other.example.com', u'1234') errPattern = (r"Cannot retrieve paste .*u?'other.example.com'.*, " "not created by .*u?'paste.example.com'") with self.assertRaisesRegexp(ValueError, errPattern): yield self.bp.contentFromPaste(foreignBadPaste)
def test_contentFromPaste(self): badPaste = pastebin.BadPaste(u'paste.example.com', u'1234') content = yield self.bp.contentFromPaste(badPaste) self.assertEqual(content, b'content for 1234')
def test_identify_domains(self, domain): result = self.bp.identifyPaste(domain, u'/1234', None, None, None) expected = pastebin.BadPaste(u'paste.example.com', u'1234') self.assertEqual(result, expected)
def test_hastebin_suffix_ignored(self, path, pasteid): url = u'https://hastebin.com{0}'.format(path) expected = pastebin.BadPaste(u'hastebin.com', pasteid) self.assertResults(url.encode('ascii'), [expected])