def test_already_published(self): """We shouldn't allow duplicating an existing, *completed* publish.""" page = PublishedPage(id='http://foo.com/bar') # these are all fine Publish(parent=page.key, source=self.source.key, status='new').put() Publish(parent=page.key, source=self.source.key, status='failed').put() Publish(parent=page.key, source=self.source.key, status='complete', type='preview').put() for i in range(2): self.expect_requests_get('http://foo.com/bar', self.post_html % 'foo') self.mox.ReplayAll() # first attempt should work self.assert_created('foo - http://foo.com/bar') self.assertEquals(4, Publish.query().count()) self.assertEquals(2, Publish.query(Publish.status == 'complete').count()) # now that there's a complete Publish entity, more attempts should fail self.assert_error("Sorry, you've already published that page") # try again to test for a bug we had where a second try would succeed self.assert_error("Sorry, you've already published that page") # should still be able to preview though self.assert_success('preview of foo', preview=True)
def _get_or_add_publish_entity(self, source_url): page = PublishedPage.get_or_insert(source_url) # Detect concurrent publish request for the same page # https://github.com/snarfed/bridgy/issues/996 pending = Publish.query(Publish.status == 'new', Publish.type != 'preview', Publish.source == self.source.key, ancestor=page.key).get() if pending: logging.warning( f'Collided with publish: {pending.key.urlsafe().decode()}') raise CollisionError() entity = Publish.query(Publish.status == 'complete', Publish.type != 'preview', Publish.source == self.source.key, ancestor=page.key).get() if entity is None: entity = Publish(parent=page.key, source=self.source.key) if self.PREVIEW: entity.type = 'preview' entity.put() logging.debug("Publish entity: '%s'", entity.key.urlsafe().decode()) return entity
def test_source_missing_mf2(self): self.expect_requests_get('http://foo.com/bar', '') self.mox.ReplayAll() self.assert_error('No microformats2 data found in http://foo.com/') self.assertTrue(PublishedPage.get_by_id('http://foo.com/bar')) publish = Publish.query().get() self.assertEquals('failed', publish.status) self.assertEquals(self.source.key, publish.source)
def _check_entity(self): self.assertTrue(PublishedPage.get_by_id('http://foo.com/bar')) publish = Publish.query().get() self.assertEquals(self.source.key, publish.source) self.assertEquals('complete', publish.status) self.assertEquals('post', publish.type) self.assertEquals('FakeSource post label', publish.type_label) expected_html = (self.post_html % 'foo') + self.backlink self.assertEquals(expected_html, publish.html) self.assertEquals({'id': 'fake id', 'url': 'http://fake/url', 'content': 'foo - http://foo.com/bar'}, publish.published)
def test_user_page_publish_url_with_unicode_char(self): """Check the custom mf2 we render on social user pages.""" self.sources[0].features = ['publish'] self.sources[0].put() url = 'https://ptt.com/ransomw…ocks-user-access/' Publish(parent=PublishedPage(id=url).key, source=self.sources[0].key).put() user_url = self.sources[0].bridgy_path() resp = self.client.get(user_url) self.assertEqual(200, resp.status_code) parsed = util.parse_mf2(resp.get_data(as_text=True), user_url) publish = parsed['items'][0]['children'][0]
def test_user_page_publish_url_with_unicode_char(self): """Check the custom mf2 we render on social user pages.""" self.sources[0].features = ['publish'] self.sources[0].put() url = u'https://ptt.com/ransomw…ocks-user-access/' Publish(parent=PublishedPage(id=url.encode('utf-8')).key, source=self.sources[0].key).put() user_url = self.sources[0].bridgy_path() resp = app.application.get_response(user_url) self.assertEquals(200, resp.status_int) parsed = util.mf2py_parse(resp.body, user_url) publish = parsed['items'][0]['children'][0]
def _get_or_add_publish_entity(self, source_url): page = PublishedPage.get_or_insert(source_url) entity = Publish.query(Publish.status == 'complete', Publish.type != 'preview', Publish.source == self.source.key, ancestor=page.key).get() if entity is None: entity = Publish(parent=page.key, source=self.source.key) if self.PREVIEW: entity.type = 'preview' entity.put() logging.debug("Publish entity: '%s'", entity.key.urlsafe().decode()) return entity
def test_success(self): html = '<article class="h-entry"><p class="e-content">foo</p></article>' self.expect_requests_get('http://foo.com/bar', html) self.mox.ReplayAll() self.assert_success('foo - http://foo.com/bar') self.assertTrue(PublishedPage.get_by_id('http://foo.com/bar')) publish = Publish.query().get() self.assertEquals(self.source.key, publish.source) self.assertEquals('complete', publish.status) self.assertEquals('post', publish.type) self.assertEquals('FakeSource post label', publish.type_label) self.assertEquals(html, publish.html) self.assertEquals({'id': 'fake id', 'url': 'http://fake/url', 'content': 'foo - http://foo.com/bar'}, publish.published)
def _check_entity(self, content='foo', html_content=None): if html_content is None: html_content = content self.assertTrue(PublishedPage.get_by_id('http://foo.com/bar')) publish = Publish.query().get() self.assertEquals(self.source.key, publish.source) self.assertEquals('complete', publish.status) self.assertEquals('post', publish.type) self.assertEquals('FakeSource post label', publish.type_label) expected_html = (self.post_html % html_content) + self.backlink self.assertEquals(expected_html, publish.html) self.assertEquals({ 'id': 'fake id', 'url': 'http://fake/url', 'content': '%s - http://foo.com/bar' % content, 'granary_message': 'granary message', }, publish.published)
def _check_entity(self, content='foo', html_content=None): if html_content is None: html_content = content self.assertTrue(PublishedPage.get_by_id('http://foo.com/bar')) publish = Publish.query().get() self.assertEquals(self.source.key, publish.source) self.assertEquals('complete', publish.status) self.assertEquals('post', publish.type) self.assertEquals('FakeSource post label', publish.type_label) expected_html = (self.post_html % html_content) + self.backlink self.assertEquals(expected_html, publish.html) self.assertEquals( { 'id': 'fake id', 'url': 'http://fake/url', 'content': '%s - http://foo.com/bar' % content, 'granary_message': 'granary message', }, publish.published)
def get_or_add_publish_entity(self, source_url): """Creates and stores Publish and (if necessary) PublishedPage entities. Args: source_url: string """ page = PublishedPage.get_or_insert(source_url) entity = Publish.query(Publish.status == 'complete', Publish.type != 'preview', Publish.source == self.source.key, ancestor=page.key).get() if entity is None: entity = Publish(parent=page.key, source=self.source.key) if self.PREVIEW: entity.type = 'preview' entity.put() logging.debug('Publish entity: %s', entity.key.urlsafe()) return entity
def get_or_add_publish_entity(self, source_url): """Creates and stores Publish and (if necessary) PublishedPage entities. Args: source_url: string """ page = PublishedPage.get_or_insert(source_url) entity = Publish.query( Publish.status == 'complete', Publish.type != 'preview', Publish.source == self.source.key, ancestor=page.key).get() if entity is None: entity = Publish(parent=page.key, source=self.source.key) if self.PREVIEW: entity.type = 'preview' entity.put() logging.debug('Publish entity: %s', entity.key.urlsafe()) return entity
def get_or_add_publish_entity(self, source_url): """Creates and stores :class:`models.Publish` entity. ...and if necessary, :class:`models.PublishedPage` entity. Args: source_url: string """ page = PublishedPage.get_or_insert(source_url) entity = Publish.query( Publish.status == 'complete', Publish.type != 'preview', Publish.source == self.source.key, ancestor=page.key).get() if entity is None: entity = Publish(parent=page.key, source=self.source.key) if self.PREVIEW: entity.type = 'preview' entity.put() logging.debug("Publish entity: '%s'", entity.key.urlsafe().decode()) return entity
def get_or_add_publish_entity(self, source_url): """Creates and stores :class:`models.Publish` entity. ...and if necessary, :class:`models.PublishedPage` entity. Args: source_url: string """ page = PublishedPage.get_or_insert(native_str(source_url.encode('utf-8'))) entity = Publish.query( Publish.status == 'complete', Publish.type != 'preview', Publish.source == self.source.key, ancestor=page.key).get() if entity is None: entity = Publish(parent=page.key, source=self.source.key) if self.PREVIEW: entity.type = 'preview' entity.put() logging.debug("Publish entity: '%s'", entity.key.urlsafe()) return entity
def setUp(self): super(ModelsTest, self).setUp() # sources auth_entities = [ FakeAuthEntity(key=ndb.Key('FakeAuthEntity', '01122334455'), user_json=json_dumps({ 'id': '0123456789', 'name': 'Fake User', 'url': 'http://fakeuser.com/', })), FakeAuthEntity(key=ndb.Key('FakeAuthEntity', '0022446688'), user_json=json_dumps({ 'id': '0022446688', 'name': 'Another Fake', 'url': 'http://anotherfake.com/', })) ] for entity in auth_entities: entity.put() self.sources = [ FakeSource.new(None, auth_entity=auth_entities[0]), FakeSource.new(None, auth_entity=auth_entities[1]) ] for entity in self.sources: entity.features = ['listen'] entity.put() # activities self.activities = [{ 'id': 'tag:source.com,2013:%s' % id, 'url': 'http://fa.ke/post/url', 'object': { 'objectType': 'note', 'id': 'tag:source.com,2013:%s' % id, 'url': 'http://fa.ke/post/url', 'content': 'foo http://target1/post/url bar', 'to': [{ 'objectType': 'group', 'alias': '@public' }], 'replies': { 'items': [{ 'objectType': 'comment', 'id': 'tag:source.com,2013:1_2_%s' % id, 'url': 'http://fa.ke/comment/url', 'content': 'foo bar', }], 'totalItems': 1, }, 'tags': [{ 'objectType': 'activity', 'verb': 'like', 'id': 'tag:source.com,2013:%s_liked_by_alice' % id, 'object': { 'url': 'http://example.com/abc' }, 'author': { 'url': 'http://example.com/alice' }, }, { 'id': 'tag:source.com,2013:%s_reposted_by_bob' % id, 'objectType': 'activity', 'verb': 'share', 'object': { 'url': 'http://example.com/def' }, 'author': { 'url': 'http://example.com/bob' }, }, { 'id': 'tag:source.com,2013:%s_scissors_by_bob' % id, 'objectType': 'activity', 'verb': 'react', 'content': '✁', 'object': { 'url': 'http://example.com/def' }, 'author': { 'url': 'http://example.com/bob' }, }], }, } for id in ('a', 'b', 'c')] FakeGrSource.activities = self.activities # responses self.responses = [] created = datetime.datetime.utcnow() - datetime.timedelta(days=10) for activity in self.activities: obj = activity['object'] pruned_activity = { 'id': activity['id'], 'url': 'http://fa.ke/post/url', 'object': { 'content': 'foo http://target1/post/url bar', } } comment = obj['replies']['items'][0] self.responses.append( Response(id=comment['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(comment), type='comment', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += datetime.timedelta(hours=1) like = obj['tags'][0] self.responses.append( Response(id=like['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(like), type='like', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += datetime.timedelta(hours=1) share = obj['tags'][1] self.responses.append( Response(id=share['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(share), type='repost', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += datetime.timedelta(hours=1) reaction = obj['tags'][2] self.responses.append( Response(id=reaction['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(reaction), type='react', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += datetime.timedelta(hours=1) # publishes self.publishes = [ Publish( parent=PublishedPage(id='https://post').key, source=self.sources[0].key, status='complete', published={'url': 'http://fa.ke/syndpost'}, ) ] # blogposts self.blogposts = [ BlogPost( id='https://post', source=self.sources[0].key, status='complete', feed_item={'title': 'a post'}, sent=['http://a/link'], ) ]
def setUp(self): super().setUp() FakeGrSource.clear() util.now_fn = lambda: NOW # add FakeSource everywhere necessary util.BLOCKLIST.add('fa.ke') util.webmention_endpoint_cache.clear() self.stubbed_create_task = False tasks_client.create_task = lambda *args, **kwargs: Task(name='foo') self.client = self.app.test_client() self.client.__enter__() self.clear_datastore() self.ndb_context = ndb_client.context() self.ndb_context.__enter__() # sources self.auth_entities = [ FakeAuthEntity(key=ndb.Key('FakeAuthEntity', '01122334455'), user_json=json_dumps({ 'id': '0123456789', 'name': 'Fake User', 'url': 'http://fakeuser.com/', })), FakeAuthEntity(key=ndb.Key('FakeAuthEntity', '0022446688'), user_json=json_dumps({ 'id': '0022446688', 'name': 'Another Fake', 'url': 'http://anotherfake.com/', })) ] self.sources = [ FakeSource.new(auth_entity=self.auth_entities[0]), FakeSource.new(auth_entity=self.auth_entities[1]) ] for entity in self.sources: entity.features = ['listen'] with self.app.test_request_context(): self.source_bridgy_url = self.sources[0].bridgy_url() self.actor = FakeGrSource.actor = { 'objectType': 'person', 'id': 'tag:fa.ke,2013:212038', 'username': '******', 'displayName': 'Ryan B', 'url': 'https://snarfed.org/', 'image': { 'url': 'http://pic.ture/url' }, } # activities self.activities = FakeGrSource.activities = [{ 'id': f'tag:source.com,2013:{id}', 'url': 'http://fa.ke/post/url', 'object': { 'objectType': 'note', 'id': f'tag:source.com,2013:{id}', 'url': 'http://fa.ke/post/url', 'content': 'foo http://target1/post/url bar', 'to': [{ 'objectType': 'group', 'alias': '@public' }], 'replies': { 'items': [{ 'objectType': 'comment', 'id': f'tag:source.com,2013:1_2_{id}', 'url': 'http://fa.ke/comment/url', 'content': 'foo bar', }], 'totalItems': 1, }, 'tags': [{ 'objectType': 'activity', 'verb': 'like', 'id': f'tag:source.com,2013:{id}_liked_by_alice', 'object': { 'url': 'http://example.com/abc' }, 'author': { 'id': 'tag:source.com,2013:alice', 'url': 'http://example.com/alice', }, }, { 'id': f'tag:source.com,2013:{id}_reposted_by_bob', 'objectType': 'activity', 'verb': 'share', 'object': { 'url': 'http://example.com/def' }, 'author': { 'url': 'http://example.com/bob' }, }, { 'id': f'tag:source.com,2013:{id}_scissors_by_bob', 'objectType': 'activity', 'verb': 'react', 'content': '✁', 'object': { 'url': 'http://example.com/def' }, 'author': { 'url': 'http://example.com/bob' }, }], }, } for id in ('a', 'b', 'c')] # responses self.responses = [] created = datetime.now(timezone.utc) - timedelta(days=10) for activity in self.activities: obj = activity['object'] pruned_activity = { 'id': activity['id'], 'url': 'http://fa.ke/post/url', 'object': { 'content': 'foo http://target1/post/url bar', } } comment = obj['replies']['items'][0] self.responses.append( Response(id=comment['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(comment), type='comment', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += timedelta(hours=1) like = obj['tags'][0] self.responses.append( Response(id=like['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(like), type='like', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += timedelta(hours=1) share = obj['tags'][1] self.responses.append( Response(id=share['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(share), type='repost', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += timedelta(hours=1) reaction = obj['tags'][2] self.responses.append( Response(id=reaction['id'], activities_json=[json_dumps(pruned_activity)], response_json=json_dumps(reaction), type='react', source=self.sources[0].key, unsent=['http://target1/post/url'], created=created)) created += timedelta(hours=1) # publishes self.publishes = [ Publish( parent=PublishedPage(id='https://post').key, source=self.sources[0].key, status='complete', published={'url': 'http://fa.ke/syndpost'}, ) ] # blogposts self.blogposts = [ BlogPost( id='https://post', source=self.sources[0].key, status='complete', feed_item={'title': 'a post'}, sent=['http://a/link'], ) ]