def test_create_new_domain(self): """If the source has a URL set, extract its domain.""" for user_json in None, {}, {'url': 'not<a>url'}, {'url': 'http://t.co/foo'}: auth_entity = None if user_json is not None: auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps(user_json)) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEqual([], source.domains) self.assertEqual([], source.domain_urls) # good URLs for url in ('http://foo.com/bar', 'https://www.foo.com/bar', 'http://foo.com/\nhttp://baz.com/', 'http://FoO.cOm', # should be normalized to lowercase ): auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'url': url})) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals([url.split('\n')[0]], source.domain_urls) self.assertEquals(['foo.com'], source.domains) # also look in urls field auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'url': 'not<a>url', 'urls': [{'value': 'also<not>'}, {'value': 'http://foo.com/'}], })) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://foo.com/'], source.domain_urls) self.assertEquals(['foo.com'], source.domains)
def test_create_new_webmention(self): """We should subscribe to webmention sources in Superfeedr.""" self.expect_requests_get('http://primary/', 'no webmention endpoint', verify=False) self.mox.StubOutWithMock(superfeedr, 'subscribe') superfeedr.subscribe(mox.IsA(FakeSource), self.handler) self.mox.ReplayAll() FakeSource.create_new(self.handler, features=['webmention'], domains=['primary/'], domain_urls=['http://primary/'])
def test_create_new_domain(self): """If the source has a URL set, extract its domain.""" # bad URLs for user_json in (None, {}, {'url': 'not<a>url'}, # t.co is in the webmention blacklist {'url': 'http://t.co/foo'}): auth_entity = None if user_json is not None: auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps(user_json)) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEqual([], source.domains) self.assertEqual([], source.domain_urls) self.expect_requests_get('http://foo.com') self.expect_requests_get('https://www.foo.com') self.expect_requests_get('https://baj') self.mox.ReplayAll() # good URLs for url in ('http://foo.com/bar', 'https://www.foo.com/bar', 'http://FoO.cOm/', # should be normalized to lowercase ): auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'url': url})) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals([url.lower()], source.domain_urls) self.assertEquals(['foo.com'], source.domains) # multiple good URLs and one that's in the webmention blacklist auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps({ 'url': 'http://foo.org', 'urls': [{'value': u} for u in 'http://bar.com', 'http://t.co/x', 'http://baz', # utm_* query params should be stripped 'https://baj/biff?utm_campaign=x&utm_source=y'], })) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://foo.org/', 'http://bar.com/', 'http://baz/', 'https://baj/biff'], source.domain_urls) self.assertEquals(['foo.org', 'bar.com', 'baz', 'baj'], source.domains) # a URL that redirects auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'url': 'http://orig'})) auth_entity.put() self.expect_requests_head('http://orig', redirected_url='http://final') self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://final/'], source.domain_urls) self.assertEquals(['final'], source.domains)
def _test_create_new(self, **kwargs): FakeSource.create_new(self.handler, domains=['foo'], domain_urls=['http://foo.com'], webmention_endpoint='http://x/y', **kwargs) self.assertEqual(1, FakeSource.query().count()) tasks = self.taskqueue_stub.GetTasks('poll') self.assertEqual(1, len(tasks)) source = FakeSource.query().get() self.assertEqual('/_ah/queue/poll', tasks[0]['url']) self.assertEqual(source.key.urlsafe(), testutil.get_task_params(tasks[0])['source_key'])
def test_create_new_webmention(self): """We should subscribe to webmention sources in Superfeedr.""" self.expect_webmention_requests_get('http://primary/', 'no webmention endpoint', verify=False) self.mox.StubOutWithMock(superfeedr, 'subscribe') def check_source(source): assert isinstance(source, FakeSource) assert source.is_saved return True superfeedr.subscribe(mox.Func(check_source), self.handler) self.mox.ReplayAll() FakeSource.create_new(self.handler, features=['webmention'], domains=['primary/'], domain_urls=['http://primary/'])
def test_create_new_domain_url_path_fails(self): auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'urls': [{'value': 'http://flaky/foo'}]})) self.expect_requests_get('http://flaky', status_code=500) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://flaky/foo'], source.domain_urls) self.assertEquals(['flaky'], source.domains)
def test_create_new_domain_url_path_connection_fails(self): auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'urls': [{'value': 'http://flaky/foo'}]})) self.expect_requests_get('http://flaky').AndRaise( requests.ConnectionError('DNS lookup failed for URL: http://bad/')) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://flaky/foo'], source.domain_urls) self.assertEquals(['flaky'], source.domains)
def test_create_new_dedupes_domains(self): auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'urls': [{'value': 'http://foo'}, {'value': 'https://foo/'}, {'value': 'http://foo/'}, {'value': 'http://foo'}, ]})) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['https://foo/'], source.domain_urls) self.assertEquals(['foo'], source.domains)
def test_create_new_domain_url_path_fails(self): auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'urls': [{ 'value': 'http://flaky/foo' }]})) self.expect_requests_get('http://flaky', status_code=500) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://flaky/foo'], source.domain_urls) self.assertEquals(['flaky'], source.domains)
def test_create_new_domain_url_no_root_relme(self): """If a profile URL contains a path, check the root for a rel=me to the path.""" auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'url': 'http://site/path'})) auth_entity.put() self.expect_requests_get('http://site') self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://site/path'], source.domain_urls) self.assertEquals(['site'], source.domains)
def test_create_new_domain_url_redirects_to_path(self): """If a profile URL is a root that redirects to a path, keep the root.""" auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'url': 'http://site'})) auth_entity.put() self.expect_requests_head('http://site', redirected_url='https://site/path') self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://site/'], source.domain_urls) self.assertEquals(['site'], source.domains)
def test_create_new_domain_url_path_connection_fails(self): auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'urls': [{ 'value': 'http://flaky/foo' }]})) self.expect_requests_get('http://flaky').AndRaise( requests.ConnectionError('DNS lookup failed for URL: http://bad/')) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://flaky/foo'], source.domain_urls) self.assertEquals(['flaky'], source.domains)
def test_create_new_too_many_domains(self): urls = ['http://%s/' % i for i in range(10)] auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'urls': [{'value': u} for u in urls]})) # we should only check the first 5 for url in urls[:models.MAX_AUTHOR_URLS]: self.expect_requests_head(url) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(urls, source.domain_urls) self.assertEquals([str(i) for i in range(10)], source.domains)
def test_create_new_rereads_domains(self): FakeSource.new(None, features=['listen'], domain_urls=['http://foo'], domains=['foo']).put() FakeSource.string_id_counter -= 1 auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'urls': [{'value': 'http://bar'}, {'value': 'http://baz'}]})) self.expect_requests_get('http://bar', 'no webmention endpoint', verify=False) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://bar', 'http://baz'], source.domain_urls) self.assertEquals(['bar', 'baz'], source.domains)
def test_create_new_rereads_domains(self): FakeSource.new(None, features=['listen'], domain_urls=['http://foo'], domains=['foo']).put() FakeSource.string_id_counter -= 1 auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'urls': [{'value': 'http://bar'}, {'value': 'http://baz'}]})) self.expect_webmention_requests_get('http://bar/', 'no webmention endpoint', verify=False) self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://bar/', 'http://baz/'], source.domain_urls) self.assertEquals(['bar', 'baz'], source.domains)
def test_create_new_domain(self): """If the source has a URL set, extract its domain.""" # bad URLs for user_json in (None, {}, {'url': 'not<a>url'}, # t.co is in the webmention blacklist {'url': 'http://t.co/foo'}): auth_entity = None if user_json is not None: auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps(user_json)) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEqual([], source.domains) self.assertEqual([], source.domain_urls) # good URLs for url in ('http://foo.com/bar', 'https://www.foo.com/bar', 'http://FoO.cOm', # should be normalized to lowercase ): auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({'url': url})) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals([url], source.domain_urls) self.assertEquals(['foo.com'], source.domains) # multiple good URLs, a bad URL, and a good URL that returns fails a HEAD # request. auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps({ 'url': 'http://foo.org', 'urls': [{'value': u} for u in 'http://bar.com', 'http://t.co/x', 'http://baz'], })) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://foo.org', 'http://bar.com', 'http://baz'], source.domain_urls) self.assertEquals(['foo.org', 'bar.com', 'baz'], source.domains)
def test_create_new_domain(self): """If the source has a URL set, extract its domain.""" # bad URLs for user_json in ( None, {}, { 'url': 'not<a>url' }, # t.co is in the webmention blacklist { 'url': 'http://t.co/foo' }): auth_entity = None if user_json is not None: auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps(user_json)) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEqual([], source.domains) self.assertEqual([], source.domain_urls) # good URLs for url in ( 'http://foo.com/bar', 'https://www.foo.com/bar', 'http://FoO.cOm/', # should be normalized to lowercase ): auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'url': url})) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals([url.lower()], source.domain_urls) self.assertEquals(['foo.com'], source.domains) # multiple good URLs and one that's in the webmention blacklist auth_entity = testutil.FakeAuthEntity( id='x', user_json=json.dumps({ 'url': 'http://foo.org', 'urls': [ { 'value': u } for u in 'http://bar.com', 'http://t.co/x', 'http://baz', # utm_* query params should be stripped 'https://baj/biff?utm_campaign=x&utm_source=y' ], })) auth_entity.put() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals([ 'http://foo.org/', 'http://bar.com/', 'http://baz/', 'https://baj/biff' ], source.domain_urls) self.assertEquals(['foo.org', 'bar.com', 'baz', 'baj'], source.domains) # a URL that redirects auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps( {'url': 'http://orig'})) auth_entity.put() self.expect_requests_head('http://orig', redirected_url='http://final') self.mox.ReplayAll() source = FakeSource.create_new(self.handler, auth_entity=auth_entity) self.assertEquals(['http://final/'], source.domain_urls) self.assertEquals(['final'], source.domains)
def test_create_new_unicode_chars(self): """We should handle unusual unicode chars in the source's name ok.""" # the invisible character in the middle is an unusual unicode character source = FakeSource.create_new(self.handler, name=u'a ✁ b')
def test_create_new_publish(self): """If a source is publish only, we shouldn't insert a poll task.""" FakeSource.create_new(self.handler, features=['publish']) self.assertEqual(0, len(self.taskqueue_stub.GetTasks('poll')))
def test_create_new_unicode_chars(self): """We should handle unusual unicode chars in the source's name ok.""" # the invisible character in the middle is an unusual unicode character FakeSource.create_new(self.handler, name=u'a ✁ b')
def test_create_new_publish(self): """If a source is publish only, we shouldn't insert a poll task.""" FakeSource.create_new(self.handler, features=['publish']) self.assertEqual(0, len(self.taskqueue_stub.GetTasks('poll'))) self.assertEqual(0, len(self.taskqueue_stub.GetTasks('poll-now')))