def setUp(self): super().setUp() self.auth_entity = TumblrAuth(id='name', user_json=json_dumps({ 'user': {'blogs': [{'url': 'other'}, {'url': 'http://primary/', 'primary': True}]}})) self.tumblr = Tumblr(id='my id', disqus_shortname='my-disqus-name') tumblr.DISQUS_API_KEY = 'my key' tumblr.DISQUS_API_SECRET = 'my secret' tumblr.DISQUS_ACCESS_TOKEN = 'my token'
def test_new(self): t = Tumblr.new(auth_entity=self.auth_entity) self.assertEqual(self.auth_entity.key, t.auth_entity) self.assertEqual('name', t.name) self.assertEqual(['http://primary/'], t.domain_urls) self.assertEqual(['primary'], t.domains) self.assertEqual('http://api.tumblr.com/v2/blog/primary/avatar/512', t.picture)
def _test_verify_finds_disqus(self, snippet): self.expect_requests_get( 'http://primary/', f'<html>\nstuff\n{snippet}\n</html>') self.mox.ReplayAll() t = Tumblr.new(auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertEqual('my-disqus-name', t.disqus_shortname)
def test_new(self): t = Tumblr.new(self.handler, auth_entity=self.auth_entity) self.assertEquals(self.auth_entity.key, t.auth_entity) self.assertEquals('name', t.name) self.assertEquals(['http://primary/'], t.domain_urls) self.assertEquals(['primary'], t.domains) self.assertEquals('http://api.tumblr.com/v2/blog/primary/avatar/512', t.picture)
def test_verify_without_disqus(self): self.expect_webmention_requests_get('http://primary/', 'no disqus here!', verify=False) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertIsNone(t.disqus_shortname)
def test_verify_without_disqus(self): self.expect_requests_get('http://primary/', 'no disqus here!') self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertIsNone(t.disqus_shortname)
def _test_verify_finds_disqus(self, snippet): # this requests.get is called by webmention-tools self.expect_requests_get( 'http://primary/', '<html>\nstuff\n%s\n</html>' % snippet, verify=False) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertEquals('my-disqus-name', t.disqus_shortname)
def test_new_with_blog_name(self): self.auth_entity.user_json = json_dumps({ 'user': {'blogs': [{'url': 'foo'}, {'name': 'bar', 'url': 'baz'}, {'name': 'biff', 'url': 'http://boff/'}, ]}}) got = Tumblr.new(auth_entity=self.auth_entity, blog_name='biff') self.assertEqual(['http://boff/'], got.domain_urls) self.assertEqual(['boff'], got.domains)
def test_new_with_blog_name(self): self.auth_entity.user_json = json.dumps({ 'user': {'blogs': [{'url': 'foo'}, {'name': 'bar', 'url': 'baz'}, {'name': 'biff', 'url': 'http://boff/'}, ]}}) got = Tumblr.new(self.handler, auth_entity=self.auth_entity, blog_name='biff') self.assertEquals(['http://boff/'], got.domain_urls) self.assertEquals(['boff'], got.domains)
def setUp(self): super(TumblrTest, self).setUp() self.auth_entity = TumblrAuth(id='name', user_json=json.dumps({ 'user': {'blogs': [{'url': 'other'}, {'url': 'http://primary/', 'primary': True}]}})) self.tumblr = Tumblr(id='my id', disqus_shortname='my-disqus-name') appengine_config.DISQUS_API_KEY = 'my key' appengine_config.DISQUS_API_SECRET = 'my secret' appengine_config.DISQUS_ACCESS_TOKEN = 'my token'
def _test_verify_finds_disqus(self, snippet): # this requests.get is called by webmention-tools self.expect_webmention_requests_get( 'http://primary/', '<html>\nstuff\n%s\n</html>' % snippet) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertEquals('my-disqus-name', t.disqus_shortname)
def parse(): url, err = '', '' if request.method == 'GET': return render_template('index.html', url=url, err=err) url = request.form.get('url', '').strip() compail = re.compile('https?://(\w+)\.tumblr.com/post/(\d+)/?.+') ret = compail.search(url) if not ret: err = "地址错误" try: post_author, post_id = ret.groups() ret = Tumblr(post_id, post_author) url = ret.get_mp4_video_url() except Exception as e: url, err = '', str(e) return render_template('index.html', url=url, err=err)
def test_new_no_primary_blog(self): self.auth_entity.user_json = json.dumps( {'user': { 'blogs': [{ 'url': 'foo' }] }}) self.assertIsNone( Tumblr.new(self.handler, auth_entity=self.auth_entity)) self.assertIn('Tumblr blog not found', next(iter(self.handler.messages)))
def test_verify(self): # based on http://snarfed.tumblr.com/ # this requests.get is called by webmention-tools self.expect_requests_get('http://primary/', """ <html><body> some stuff <script charset="utf-8" type="text/javascript" src="http://disqus.com/forums/my-disqus-name/get_num_replies.js?url131=...&"></script> </body></html>""", verify=False) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertEquals('my-disqus-name', t.disqus_shortname)
def template_vars(self): if not self.source: return {} vars = super(UserHandler, self).template_vars() vars.update({ 'source': self.source, 'epoch': util.EPOCH, }) # Blog webmention promos if 'webmention' not in self.source.features: if self.source.SHORT_NAME in ('blogger', 'tumblr', 'wordpress'): vars[self.source.SHORT_NAME + '_promo'] = True else: for domain in self.source.domains: if ('.blogspot.' in domain and # Blogger uses country TLDs not Blogger.query(Blogger.domains == domain).get()): vars['blogger_promo'] = True elif (domain.endswith('tumblr.com') and not Tumblr.query(Tumblr.domains == domain).get()): vars['tumblr_promo'] = True elif (domain.endswith('wordpress.com') and not WordPress.query(WordPress.domains == domain).get()): vars['wordpress_promo'] = True # Responses if 'listen' in self.source.features: vars['responses'] = [] for i, r in enumerate(Response.query() .filter(Response.source == self.source.key)\ .order(-Response.updated)): r.response = json.loads(r.response_json) if r.activity_json: # handle old entities r.activities_json.append(r.activity_json) r.activities = [json.loads(a) for a in r.activities_json] if (not gr_source.Source.is_public(r.response) or not all(gr_source.Source.is_public(a) for a in r.activities)): continue r.actor = r.response.get('author') or r.response.get('actor', {}) if not r.response.get('content'): phrases = { 'like': 'liked this', 'repost': 'reposted this', 'rsvp-yes': 'is attending', 'rsvp-no': 'is not attending', 'rsvp-maybe': 'might attend', 'invite': 'is invited', } r.response['content'] = '%s %s.' % ( r.actor.get('displayName') or '', phrases.get(r.type) or phrases.get(r.response.get('verb'))) # convert image URL to https if we're serving over SSL image_url = r.actor.setdefault('image', {}).get('url') if image_url: r.actor['image']['url'] = util.update_scheme(image_url, self) # generate original post links r.links = self.process_webmention_links(r) vars['responses'].append(r) if len(vars['responses']) >= 10 or i > 200: break # Publishes if 'publish' in self.source.features: publishes = Publish.query().filter(Publish.source == self.source.key)\ .order(-Publish.updated)\ .fetch(10) for p in publishes: p.pretty_page = util.pretty_link( p.key.parent().id(), a_class='original-post', new_tab=True) vars['publishes'] = publishes if 'webmention' in self.source.features: # Blog posts blogposts = BlogPost.query().filter(BlogPost.source == self.source.key)\ .order(-BlogPost.created)\ .fetch(10) for b in blogposts: b.links = self.process_webmention_links(b) try: text = b.feed_item.get('title') except ValueError: text = None b.pretty_url = util.pretty_link(b.key.id(), text=text, a_class='original-post', max_length=40, new_tab=True) # Blog webmentions webmentions = BlogWebmention.query()\ .filter(BlogWebmention.source == self.source.key)\ .order(-BlogWebmention.updated)\ .fetch(10) for w in webmentions: w.pretty_source = util.pretty_link(w.source_url(), a_class='original-post', new_tab=True) try: target_is_source = (urlparse.urlparse(w.target_url()).netloc in self.source.domains) except BaseException: target_is_source = False w.pretty_target = util.pretty_link(w.target_url(), a_class='original-post', new_tab=True, keep_host=target_is_source) vars.update({'blogposts': blogposts, 'webmentions': webmentions}) return vars
def user(site, id): """View for a user page.""" cls = models.sources.get(site) if not cls: return render_template('user_not_found.html'), 404 source = cls.lookup(id) if not source: key = cls.query( ndb.OR(*[ ndb.GenericProperty(prop) == id for prop in ('domains', 'inferred_username', 'name', 'username') ])).get(keys_only=True) if key: return redirect(cls(key=key).bridgy_path(), code=301) if not source or not source.features: return render_template('user_not_found.html'), 404 source.verify() source = util.preprocess_source(source) vars = { 'source': source, 'logs': logs, 'REFETCH_HFEED_TRIGGER': models.REFETCH_HFEED_TRIGGER, 'RECENT_PRIVATE_POSTS_THRESHOLD': RECENT_PRIVATE_POSTS_THRESHOLD, } # Blog webmention promos if 'webmention' not in source.features: if source.SHORT_NAME in ('blogger', 'medium', 'tumblr', 'wordpress'): vars[source.SHORT_NAME + '_promo'] = True else: for domain in source.domains: if ('.blogspot.' in domain and # Blogger uses country TLDs not Blogger.query(Blogger.domains == domain).get()): vars['blogger_promo'] = True elif (util.domain_or_parent_in(domain, ['tumblr.com']) and not Tumblr.query(Tumblr.domains == domain).get()): vars['tumblr_promo'] = True elif (util.domain_or_parent_in(domain, 'wordpress.com') and not WordPress.query(WordPress.domains == domain).get()): vars['wordpress_promo'] = True # Responses if 'listen' in source.features or 'email' in source.features: vars['responses'] = [] query = Response.query().filter(Response.source == source.key) # if there's a paging param (responses_before or responses_after), update # query with it def get_paging_param(param): val = request.values.get(param) try: return util.parse_iso8601(val.replace(' ', '+')) if val else None except BaseException: error(f"Couldn't parse {param}, {val!r} as ISO8601") before = get_paging_param('responses_before') after = get_paging_param('responses_after') if before and after: error("can't handle both responses_before and responses_after") elif after: query = query.filter(Response.updated > after).order( Response.updated) elif before: query = query.filter( Response.updated < before).order(-Response.updated) else: query = query.order(-Response.updated) query_iter = query.iter() for i, r in enumerate(query_iter): r.response = json_loads(r.response_json) r.activities = [json_loads(a) for a in r.activities_json] if (not source.is_activity_public(r.response) or not all( source.is_activity_public(a) for a in r.activities)): continue elif r.type == 'post': r.activities = [] verb = r.response.get('verb') r.actor = (r.response.get('object') if verb == 'invite' else r.response.get('author') or r.response.get('actor')) or {} activity_content = '' for a in r.activities + [r.response]: if not a.get('content'): obj = a.get('object', {}) a['content'] = activity_content = ( obj.get('content') or obj.get('displayName') or # historical, from a Reddit bug fixed in granary@4f9df7c obj.get('name') or '') response_content = r.response.get('content') phrases = { 'like': 'liked this', 'repost': 'reposted this', 'rsvp-yes': 'is attending', 'rsvp-no': 'is not attending', 'rsvp-maybe': 'might attend', 'rsvp-interested': 'is interested', 'invite': 'is invited', } phrase = phrases.get(r.type) or phrases.get(verb) if phrase and (r.type != 'repost' or activity_content.startswith(response_content)): r.response[ 'content'] = f'{r.actor.get("displayName") or ""} {phrase}.' # convert image URL to https if we're serving over SSL image_url = r.actor.setdefault('image', {}).get('url') if image_url: r.actor['image']['url'] = util.update_scheme( image_url, request) # generate original post links r.links = process_webmention_links(r) r.original_links = [ util.pretty_link(url, new_tab=True) for url in r.original_posts ] vars['responses'].append(r) if len(vars['responses']) >= 10 or i > 200: break vars['responses'].sort(key=lambda r: r.updated, reverse=True) # calculate new paging param(s) new_after = (before if before else vars['responses'][0].updated if vars['responses'] and query_iter.probably_has_next() and (before or after) else None) if new_after: vars[ 'responses_after_link'] = f'?responses_after={new_after.isoformat()}#responses' new_before = (after if after else vars['responses'][-1].updated if vars['responses'] and query_iter.probably_has_next() else None) if new_before: vars[ 'responses_before_link'] = f'?responses_before={new_before.isoformat()}#responses' vars['next_poll'] = max( source.last_poll_attempt + source.poll_period(), # lower bound is 1 minute from now util.now_fn() + datetime.timedelta(seconds=90)) # Publishes if 'publish' in source.features: publishes = Publish.query().filter(Publish.source == source.key)\ .order(-Publish.updated)\ .fetch(10) for p in publishes: p.pretty_page = util.pretty_link( p.key.parent().id(), attrs={'class': 'original-post u-url u-name'}, new_tab=True) vars['publishes'] = publishes if 'webmention' in source.features: # Blog posts blogposts = BlogPost.query().filter(BlogPost.source == source.key)\ .order(-BlogPost.created)\ .fetch(10) for b in blogposts: b.links = process_webmention_links(b) try: text = b.feed_item.get('title') except ValueError: text = None b.pretty_url = util.pretty_link( b.key.id(), text=text, attrs={'class': 'original-post u-url u-name'}, max_length=40, new_tab=True) # Blog webmentions webmentions = BlogWebmention.query()\ .filter(BlogWebmention.source == source.key)\ .order(-BlogWebmention.updated)\ .fetch(10) for w in webmentions: w.pretty_source = util.pretty_link( w.source_url(), attrs={'class': 'original-post'}, new_tab=True) try: target_is_source = (urllib.parse.urlparse( w.target_url()).netloc in source.domains) except BaseException: target_is_source = False w.pretty_target = util.pretty_link( w.target_url(), attrs={'class': 'original-post'}, new_tab=True, keep_host=target_is_source) vars.update({'blogposts': blogposts, 'webmentions': webmentions}) return render_template(f'{source.SHORT_NAME}_user.html', **vars)
def emogirls(): dl = Tumblr("emogirls") dl.run()
def unpoco(): dl = Tumblr("unpoco") dl.run()
def monorainbows(): dl = Tumblr("monorainbows") dl.run()
def onehandedtypist(): dl = Tumblr("onehandedtypist") dl.run()
def melc(): dl = Tumblr("melc") dl.run()
def test_new_no_primary_blog(self): self.auth_entity.user_json = json_dumps({'user': {'blogs': [{'url': 'foo'}]}}) with app.test_request_context(): self.assertIsNone(Tumblr.new(auth_entity=self.auth_entity)) self.assertIn('Tumblr blog not found', get_flashed_messages()[0])
def inspireawesome(): dl = Tumblr("inspireawesome") dl.run()
def max07min(): dl = Tumblr("max07min") dl.run()
def nylonfoxie(): dl = Tumblr("nylonfoxie") dl.run()
def milestone(): dl = Tumblr("milestone") dl.run()
def renka(): dl = Tumblr("renka") dl.run()
def labialounge(): dl = Tumblr("labialounge") dl.run()
def kenjikee(): dl = Tumblr("kenjikee") dl.run()
def djangomango(): dl = Tumblr("djangomango") dl.run()
class TumblrTest(testutil.HandlerTest): def setUp(self): super(TumblrTest, self).setUp() self.auth_entity = TumblrAuth(id='name', user_json=json.dumps({ 'user': {'blogs': [{'url': 'other'}, {'url': 'http://primary/', 'primary': True}]}})) self.tumblr = Tumblr(id='my id', disqus_shortname='my-disqus-name') appengine_config.DISQUS_API_KEY = 'my key' appengine_config.DISQUS_API_SECRET = 'my secret' appengine_config.DISQUS_ACCESS_TOKEN = 'my token' def disqus_params(self, params): params.update({ 'api_key': 'my key', 'api_secret': 'my secret', 'access_token': 'my token', }) return params def expect_thread_details(self, resp=None, **kwargs): if resp is None: resp = {'response': {'id': '87654'}} self.expect_requests_get( tumblr.DISQUS_API_THREAD_DETAILS_URL, json.dumps(resp), params=self.disqus_params({'forum': 'my-disqus-name', 'thread':'link:http://primary/post/123999'}), **kwargs) def test_new(self): t = Tumblr.new(self.handler, auth_entity=self.auth_entity) self.assertEquals(self.auth_entity.key, t.auth_entity) self.assertEquals('name', t.name) self.assertEquals(['http://primary/'], t.domain_urls) self.assertEquals(['primary'], t.domains) self.assertEquals('http://api.tumblr.com/v2/blog/primary/avatar/512', t.picture) def test_new_no_primary_blog(self): self.auth_entity.user_json = json.dumps({'user': {'blogs': [{'url': 'foo'}]}}) self.assertIsNone(Tumblr.new(self.handler, auth_entity=self.auth_entity)) self.assertIn('Tumblr blog not found', next(iter(self.handler.messages))) def test_new_with_blog_name(self): self.auth_entity.user_json = json.dumps({ 'user': {'blogs': [{'url': 'foo'}, {'name': 'bar', 'url': 'baz'}, {'name': 'biff', 'url': 'http://boff/'}, ]}}) got = Tumblr.new(self.handler, auth_entity=self.auth_entity, blog_name='biff') self.assertEquals(['http://boff/'], got.domain_urls) self.assertEquals(['boff'], got.domains) def test_verify_default(self): # based on http://snarfed.tumblr.com/ self._test_verify_finds_disqus('<script src="http://disqus.com/forums/my-disqus-name/get_num_replies.js?url131=...&"></script>') def test_verify_inspirewell_theme_1(self): # based on http://circusriot.tumblr.com/ self._test_verify_finds_disqus(" var disqus_shortname = 'my-disqus-name';") def test_verify_inspirewell_theme_2(self): # based on http://circusriot.tumblr.com/ self._test_verify_finds_disqus(' disqusUsername = "******";') def test_verify_require_aorcsik_theme(self): # based on http://require.aorcsik.com/ self._test_verify_finds_disqus( ' dsq.src = "http://my-disqus-name.disqus.com/embed.js";') def _test_verify_finds_disqus(self, snippet): # this requests.get is called by webmention-tools self.expect_requests_get( 'http://primary/', '<html>\nstuff\n%s\n</html>' % snippet, verify=False) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertEquals('my-disqus-name', t.disqus_shortname) def test_verify_without_disqus(self): self.expect_requests_get('http://primary/', 'no disqus here!', verify=False) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertIsNone(t.disqus_shortname) def test_create_comment(self): self.expect_thread_details() self.expect_requests_post( tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({'response': {'ok': 'sgtm'}}), params=self.disqus_params({ 'thread': '87654', 'message': '<a href="http://who">who</a>: foo bar'})) self.mox.ReplayAll() resp = self.tumblr.create_comment('http://primary/post/123999/xyz_abc?asdf', 'who', 'http://who', 'foo bar') self.assertEquals({'ok': 'sgtm'}, resp) def test_create_comment_with_unicode_chars(self): self.expect_thread_details() self.expect_requests_post( tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({}), params=self.disqus_params({ 'thread': '87654', 'message': '<a href="http://who">Degenève</a>: foo Degenève bar'})) self.mox.ReplayAll() resp = self.tumblr.create_comment('http://primary/post/123999/xyz_abc', u'Degenève', 'http://who', u'foo Degenève bar') self.assertEquals({}, resp) def test_create_comment_finds_disqus_shortname(self): self.tumblr.disqus_shortname = None self.expect_requests_get('http://primary/post/123999', "fooo var disqus_shortname = 'my-disqus-name';") self.expect_thread_details() self.expect_requests_post(tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({}), params=mox.IgnoreArg()) self.mox.ReplayAll() self.tumblr.create_comment('http://primary/post/123999', '', '', '') self.assertEquals('my-disqus-name', self.tumblr.key.get().disqus_shortname) def test_create_comment_doesnt_find_disqus_shortname(self): self.tumblr.disqus_shortname = None self.expect_requests_get('http://primary/post/123999', 'no shortname here') self.mox.ReplayAll() self.assertRaises( exc.HTTPBadRequest,#("Bridgy hasn't found your Disqus account yet. " #"See http://localhost/tumblr/name for details."), self.tumblr.create_comment, 'http://primary/post/123999', '', '', '')
def sukoyaka(): dl = Tumblr("sukoyaka") dl.run()
def backseam(): dl = Tumblr("backseam") dl.run()
def paste(): dl = Tumblr("paste") dl.run()
def uhya_hya(): dl = Tumblr("uhya-hya") dl.run()
def tumblr_run(blog): dl = Tumblr(blog) dl.run()
#coding=utf-8 '''Demo program which uses the live-journal module.''' import sys from tumblr import Tumblr #username='******', password='******' b = Tumblr('http://peter4test.tumblr.com/', None, '*****@*****.**', 'peter2win') if not b.isItMe(): print 'isItMe() False: invalid url' sys.exit() if not b.authenticate(): print "Authentication failed" sys.exit() print "My available tags:" for tag in b.getTags(): print tag print "My available categories:" for cat in b.getCategories(): print cat b.setTitle('draft title') b.setContent('draft content') # technology exists and was retrieved through getCategories, but # business doesn't exist so behind the scenes it must be added to
def keepithotfordaddy(): dl = Tumblr("keepithotfordaddy") dl.run()
def thegirlnextdoor(): dl = Tumblr("thegirlnextdoor") dl.run()
def nico2(): dl = Tumblr("nico2") dl.run()
class TumblrTest(testutil.HandlerTest): def setUp(self): super(TumblrTest, self).setUp() self.auth_entity = TumblrAuth(id='name', user_json=json.dumps({ 'user': {'blogs': [{'url': 'other'}, {'url': 'http://primary/', 'primary': True}]}})) self.tumblr = Tumblr(disqus_shortname='my-disqus-name') appengine_config.DISQUS_API_KEY = 'my key' appengine_config.DISQUS_API_SECRET = 'my secret' appengine_config.DISQUS_ACCESS_TOKEN = 'my token' def disqus_params(self, params): params.update({ 'api_key': 'my key', 'api_secret': 'my secret', 'access_token': 'my token', }) return params def expect_thread_details(self, resp=None, **kwargs): if resp is None: resp = {'response': {'id': '87654'}} self.expect_requests_get( tumblr.DISQUS_API_THREAD_DETAILS_URL, json.dumps(resp), params=self.disqus_params({'forum': 'my-disqus-name', 'thread':'link:http://primary/post/123999'}), **kwargs) def test_new(self): t = Tumblr.new(self.handler, auth_entity=self.auth_entity) self.assertEquals(self.auth_entity.key, t.auth_entity) self.assertEquals('name', t.name) self.assertEquals(['http://primary/'], t.domain_urls) self.assertEquals(['primary'], t.domains) self.assertEquals('http://api.tumblr.com/v2/blog/primary/avatar/512', t.picture) def test_new_no_primary_blog(self): self.auth_entity.user_json = json.dumps({'user': {'blogs': [{'url': 'foo'}]}}) self.assertIsNone(Tumblr.new(self.handler, auth_entity=self.auth_entity)) self.assertIn('Tumblr blog not found', next(iter(self.handler.messages))) def test_new_with_blog_name(self): self.auth_entity.user_json = json.dumps({ 'user': {'blogs': [{'url': 'foo'}, {'name': 'bar', 'url': 'baz'}, {'name': 'biff', 'url': 'http://boff/'}, ]}}) got = Tumblr.new(self.handler, auth_entity=self.auth_entity, blog_name='biff') self.assertEquals(['http://boff/'], got.domain_urls) self.assertEquals(['boff'], got.domains) def test_verify(self): # based on http://snarfed.tumblr.com/ # this requests.get is called by webmention-tools self.expect_requests_get('http://primary/', """ <html><body> some stuff <script charset="utf-8" type="text/javascript" src="http://disqus.com/forums/my-disqus-name/get_num_replies.js?url131=...&"></script> </body></html>""", verify=False) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertEquals('my-disqus-name', t.disqus_shortname) def test_verify_without_disqus(self): self.expect_requests_get('http://primary/', 'no disqus here!', verify=False) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertIsNone(t.disqus_shortname) def test_create_comment(self): self.expect_thread_details() self.expect_requests_post( tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({'response': {'ok': 'sgtm'}}), params=self.disqus_params({ 'thread': '87654', 'message': '<a href="http://who">who</a>: foo bar'})) self.mox.ReplayAll() resp = self.tumblr.create_comment('http://primary/post/123999/xyz_abc?asdf', 'who', 'http://who', 'foo bar') self.assertEquals({'ok': 'sgtm'}, resp) def test_create_comment_with_unicode_chars(self): self.expect_thread_details() self.expect_requests_post( tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({}), params=self.disqus_params({ 'thread': '87654', 'message': '<a href="http://who">Degenève</a>: foo Degenève bar'})) self.mox.ReplayAll() resp = self.tumblr.create_comment('http://primary/post/123999/xyz_abc', u'Degenève', 'http://who', u'foo Degenève bar')
def template_vars(self): vars = super(UserHandler, self).template_vars() vars.update({ 'source': self.source, 'EPOCH': util.EPOCH, 'REFETCH_HFEED_TRIGGER': models.REFETCH_HFEED_TRIGGER, 'RECENT_PRIVATE_POSTS_THRESHOLD': RECENT_PRIVATE_POSTS_THRESHOLD, }) if not self.source: return vars if isinstance(self.source, instagram.Instagram): auth = self.source.auth_entity vars['indieauth_me'] = ( auth.id if isinstance(auth, indieauth.IndieAuth) else self.source.domain_urls[0] if self.source.domain_urls else None) # Blog webmention promos if 'webmention' not in self.source.features: if self.source.SHORT_NAME in ('blogger', 'tumblr', 'wordpress'): vars[self.source.SHORT_NAME + '_promo'] = True else: for domain in self.source.domains: if ('.blogspot.' in domain and # Blogger uses country TLDs not Blogger.query(Blogger.domains == domain).get()): vars['blogger_promo'] = True elif (domain.endswith('tumblr.com') and not Tumblr.query(Tumblr.domains == domain).get()): vars['tumblr_promo'] = True elif (domain.endswith('wordpress.com') and not WordPress.query(WordPress.domains == domain).get()): vars['wordpress_promo'] = True # Responses if 'listen' in self.source.features: vars['responses'] = [] query = Response.query().filter(Response.source == self.source.key) # if there's a paging param (responses_before or responses_after), update # query with it def get_paging_param(param): val = self.request.get(param) try: return util.parse_iso8601(val) if val else None except: msg = "Couldn't parse %s %r as ISO8601" % (param, val) logging.exception(msg) self.abort(400, msg) before = get_paging_param('responses_before') after = get_paging_param('responses_after') if before and after: self.abort(400, "can't handle both responses_before and responses_after") elif after: query = query.filter(Response.updated > after).order(Response.updated) elif before: query = query.filter(Response.updated < before).order(-Response.updated) else: query = query.order(-Response.updated) query_iter = query.iter() for i, r in enumerate(query_iter): r.response = json.loads(r.response_json) r.activities = [json.loads(a) for a in r.activities_json] if (not self.source.is_activity_public(r.response) or not all(self.source.is_activity_public(a) for a in r.activities)): continue elif r.type == 'post': r.activities = [] r.actor = r.response.get('author') or r.response.get('actor', {}) for a in r.activities + [r.response]: if not a.get('content'): a['content'] = a.get('object', {}).get('content') if not r.response.get('content'): phrases = { 'like': 'liked this', 'repost': 'reposted this', 'rsvp-yes': 'is attending', 'rsvp-no': 'is not attending', 'rsvp-maybe': 'might attend', 'rsvp-interested': 'is interested', 'invite': 'is invited', } r.response['content'] = '%s %s.' % ( r.actor.get('displayName') or '', phrases.get(r.type) or phrases.get(r.response.get('verb'))) # convert image URL to https if we're serving over SSL image_url = r.actor.setdefault('image', {}).get('url') if image_url: r.actor['image']['url'] = util.update_scheme(image_url, self) # generate original post links r.links = self.process_webmention_links(r) r.original_links = [util.pretty_link(url, new_tab=True) for url in r.original_posts] vars['responses'].append(r) if len(vars['responses']) >= 10 or i > 200: break vars['responses'].sort(key=lambda r: r.updated, reverse=True) # calculate new paging param(s) new_after = ( before if before else vars['responses'][0].updated if vars['responses'] and query_iter.probably_has_next() and (before or after) else None) if new_after: vars['responses_after_link'] = ('?responses_after=%s#responses' % new_after.isoformat()) new_before = ( after if after else vars['responses'][-1].updated if vars['responses'] and query_iter.probably_has_next() else None) if new_before: vars['responses_before_link'] = ('?responses_before=%s#responses' % new_before.isoformat()) vars['next_poll'] = max( self.source.last_poll_attempt + self.source.poll_period(), # lower bound is 1 minute from now util.now_fn() + datetime.timedelta(seconds=90)) # Publishes if 'publish' in self.source.features: publishes = Publish.query().filter(Publish.source == self.source.key)\ .order(-Publish.updated)\ .fetch(10) for p in publishes: p.pretty_page = util.pretty_link( p.key.parent().id(), attrs={'class': 'original-post u-url u-name'}, new_tab=True) vars['publishes'] = publishes if 'webmention' in self.source.features: # Blog posts blogposts = BlogPost.query().filter(BlogPost.source == self.source.key)\ .order(-BlogPost.created)\ .fetch(10) for b in blogposts: b.links = self.process_webmention_links(b) try: text = b.feed_item.get('title') except ValueError: text = None b.pretty_url = util.pretty_link( b.key.id(), text=text, attrs={'class': 'original-post u-url u-name'}, max_length=40, new_tab=True) # Blog webmentions webmentions = BlogWebmention.query()\ .filter(BlogWebmention.source == self.source.key)\ .order(-BlogWebmention.updated)\ .fetch(10) for w in webmentions: w.pretty_source = util.pretty_link( w.source_url(), attrs={'class': 'original-post'}, new_tab=True) try: target_is_source = (urlparse.urlparse(w.target_url()).netloc in self.source.domains) except BaseException: target_is_source = False w.pretty_target = util.pretty_link( w.target_url(), attrs={'class': 'original-post'}, new_tab=True, keep_host=target_is_source) vars.update({'blogposts': blogposts, 'webmentions': webmentions}) return vars
def curvature(): dl = Tumblr("curvature") dl.run()
class TumblrTest(testutil.HandlerTest): def setUp(self): super(TumblrTest, self).setUp() self.auth_entity = TumblrAuth(id='name', user_json=json.dumps({ 'user': { 'blogs': [{ 'url': 'other' }, { 'url': 'http://primary/', 'primary': True }] } })) self.tumblr = Tumblr(id='my id', disqus_shortname='my-disqus-name') appengine_config.DISQUS_API_KEY = 'my key' appengine_config.DISQUS_API_SECRET = 'my secret' appengine_config.DISQUS_ACCESS_TOKEN = 'my token' def disqus_params(self, params): params.update({ 'api_key': 'my key', 'api_secret': 'my secret', 'access_token': 'my token', }) return params def expect_thread_details(self, resp=None, **kwargs): if resp is None: resp = {'response': {'id': '87654'}} self.expect_requests_get(tumblr.DISQUS_API_THREAD_DETAILS_URL, json.dumps(resp), params=self.disqus_params({ 'forum': 'my-disqus-name', 'thread': 'link:http://primary/post/123999' }), **kwargs) def test_new(self): t = Tumblr.new(self.handler, auth_entity=self.auth_entity) self.assertEquals(self.auth_entity.key, t.auth_entity) self.assertEquals('name', t.name) self.assertEquals(['http://primary/'], t.domain_urls) self.assertEquals(['primary'], t.domains) self.assertEquals('http://api.tumblr.com/v2/blog/primary/avatar/512', t.picture) def test_new_no_primary_blog(self): self.auth_entity.user_json = json.dumps( {'user': { 'blogs': [{ 'url': 'foo' }] }}) self.assertIsNone( Tumblr.new(self.handler, auth_entity=self.auth_entity)) self.assertIn('Tumblr blog not found', next(iter(self.handler.messages))) def test_new_with_blog_name(self): self.auth_entity.user_json = json.dumps({ 'user': { 'blogs': [ { 'url': 'foo' }, { 'name': 'bar', 'url': 'baz' }, { 'name': 'biff', 'url': 'http://boff/' }, ] } }) got = Tumblr.new(self.handler, auth_entity=self.auth_entity, blog_name='biff') self.assertEquals(['http://boff/'], got.domain_urls) self.assertEquals(['boff'], got.domains) def test_verify_default(self): # based on http://snarfed.tumblr.com/ self._test_verify_finds_disqus( '<script src="http://disqus.com/forums/my-disqus-name/get_num_replies.js?url131=...&"></script>' ) def test_verify_inspirewell_theme_1(self): # based on http://circusriot.tumblr.com/ self._test_verify_finds_disqus( " var disqus_shortname = 'my-disqus-name';") def test_verify_inspirewell_theme_2(self): # based on http://circusriot.tumblr.com/ self._test_verify_finds_disqus(' disqusUsername = "******";') def test_verify_require_aorcsik_theme(self): # based on http://require.aorcsik.com/ self._test_verify_finds_disqus( ' dsq.src = "http://my-disqus-name.disqus.com/embed.js";') def _test_verify_finds_disqus(self, snippet): # this requests.get is called by webmention-tools self.expect_webmention_requests_get( 'http://primary/', '<html>\nstuff\n%s\n</html>' % snippet) self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertEquals('my-disqus-name', t.disqus_shortname) def test_verify_without_disqus(self): self.expect_webmention_requests_get('http://primary/', 'no disqus here!') self.mox.ReplayAll() t = Tumblr.new(self.handler, auth_entity=self.auth_entity, features=['webmention']) t.verify() self.assertIsNone(t.disqus_shortname) def test_create_comment(self): self.expect_thread_details() self.expect_requests_post(tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({'response': { 'ok': 'sgtm' }}), params=self.disqus_params({ 'thread': '87654', 'message': '<a href="http://who">who</a>: foo bar' })) self.mox.ReplayAll() resp = self.tumblr.create_comment( 'http://primary/post/123999/xyz_abc?asdf', 'who', 'http://who', 'foo bar') self.assertEquals({'ok': 'sgtm'}, resp) def test_create_comment_with_unicode_chars(self): self.expect_thread_details() self.expect_requests_post( tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({}), params=self.disqus_params({ 'thread': '87654', 'message': '<a href="http://who">Degenève</a>: foo Degenève bar'.encode( 'utf-8'), })) self.mox.ReplayAll() resp = self.tumblr.create_comment('http://primary/post/123999/xyz_abc', 'Degenève', 'http://who', 'foo Degenève bar') self.assertEquals({}, resp) def test_create_comment_finds_disqus_shortname(self): self.tumblr.disqus_shortname = None self.expect_requests_get( 'http://primary/post/123999', "fooo var disqus_shortname = 'my-disqus-name';") self.expect_thread_details() self.expect_requests_post(tumblr.DISQUS_API_CREATE_POST_URL, json.dumps({}), params=mox.IgnoreArg()) self.mox.ReplayAll() self.tumblr.create_comment('http://primary/post/123999', '', '', '') self.assertEquals('my-disqus-name', self.tumblr.key.get().disqus_shortname) def test_create_comment_doesnt_find_disqus_shortname(self): self.tumblr.disqus_shortname = None self.expect_requests_get('http://primary/post/123999', 'no shortname here') self.mox.ReplayAll() self.assertRaises( exc. HTTPBadRequest, #("Bridgy hasn't found your Disqus account yet. " #"See http://localhost/tumblr/name for details."), self.tumblr.create_comment, 'http://primary/post/123999', '', '', '')
def deltaso(): dl = Tumblr("deltaso") dl.run()
def test_new_no_primary_blog(self): self.auth_entity.user_json = json.dumps({'user': {'blogs': [{'url': 'foo'}]}}) self.assertIsNone(Tumblr.new(self.handler, auth_entity=self.auth_entity)) self.assertIn('Tumblr blog not found', next(iter(self.handler.messages)))
def template_vars(self): vars = super(UserHandler, self).template_vars() vars.update({ 'source': self.source, 'EPOCH': util.EPOCH, 'REFETCH_HFEED_TRIGGER': models.REFETCH_HFEED_TRIGGER, 'RECENT_PRIVATE_POSTS_THRESHOLD': RECENT_PRIVATE_POSTS_THRESHOLD, }) if not self.source: return vars if isinstance(self.source, instagram.Instagram): auth = self.source.auth_entity vars['indieauth_me'] = ( auth.id if isinstance(auth, indieauth.IndieAuth) else self.source.domain_urls[0] if self.source.domain_urls else None) # Blog webmention promos if 'webmention' not in self.source.features: if self.source.SHORT_NAME in ('blogger', 'medium', 'tumblr', 'wordpress'): vars[self.source.SHORT_NAME + '_promo'] = True else: for domain in self.source.domains: if ('.blogspot.' in domain and # Blogger uses country TLDs not Blogger.query(Blogger.domains == domain).get()): vars['blogger_promo'] = True elif (domain.endswith('tumblr.com') and not Tumblr.query(Tumblr.domains == domain).get()): vars['tumblr_promo'] = True elif (domain.endswith('wordpress.com') and not WordPress.query(WordPress.domains == domain).get()): vars['wordpress_promo'] = True # Responses if 'listen' in self.source.features: vars['responses'] = [] query = Response.query().filter(Response.source == self.source.key) # if there's a paging param (responses_before or responses_after), update # query with it def get_paging_param(param): val = self.request.get(param) try: return util.parse_iso8601(val) if val else None except: msg = "Couldn't parse %s %r as ISO8601" % (param, val) logging.exception(msg) self.abort(400, msg) before = get_paging_param('responses_before') after = get_paging_param('responses_after') if before and after: self.abort(400, "can't handle both responses_before and responses_after") elif after: query = query.filter(Response.updated > after).order(Response.updated) elif before: query = query.filter(Response.updated < before).order(-Response.updated) else: query = query.order(-Response.updated) query_iter = query.iter() for i, r in enumerate(query_iter): r.response = json.loads(r.response_json) r.activities = [json.loads(a) for a in r.activities_json] if (not self.source.is_activity_public(r.response) or not all(self.source.is_activity_public(a) for a in r.activities)): continue elif r.type == 'post': r.activities = [] r.actor = r.response.get('author') or r.response.get('actor', {}) for a in r.activities + [r.response]: if not a.get('content'): a['content'] = a.get('object', {}).get('content') if not r.response.get('content'): phrases = { 'like': 'liked this', 'repost': 'reposted this', 'rsvp-yes': 'is attending', 'rsvp-no': 'is not attending', 'rsvp-maybe': 'might attend', 'rsvp-interested': 'is interested', 'invite': 'is invited', } r.response['content'] = '%s %s.' % ( r.actor.get('displayName') or '', phrases.get(r.type) or phrases.get(r.response.get('verb'))) # convert image URL to https if we're serving over SSL image_url = r.actor.setdefault('image', {}).get('url') if image_url: r.actor['image']['url'] = util.update_scheme(image_url, self) # generate original post links r.links = self.process_webmention_links(r) r.original_links = [util.pretty_link(url, new_tab=True) for url in r.original_posts] vars['responses'].append(r) if len(vars['responses']) >= 10 or i > 200: break vars['responses'].sort(key=lambda r: r.updated, reverse=True) # calculate new paging param(s) new_after = ( before if before else vars['responses'][0].updated if vars['responses'] and query_iter.probably_has_next() and (before or after) else None) if new_after: vars['responses_after_link'] = ('?responses_after=%s#responses' % new_after.isoformat()) new_before = ( after if after else vars['responses'][-1].updated if vars['responses'] and query_iter.probably_has_next() else None) if new_before: vars['responses_before_link'] = ('?responses_before=%s#responses' % new_before.isoformat()) vars['next_poll'] = max( self.source.last_poll_attempt + self.source.poll_period(), # lower bound is 1 minute from now util.now_fn() + datetime.timedelta(seconds=90)) # Publishes if 'publish' in self.source.features: publishes = Publish.query().filter(Publish.source == self.source.key)\ .order(-Publish.updated)\ .fetch(10) for p in publishes: p.pretty_page = util.pretty_link( p.key.parent().id().decode('utf-8'), attrs={'class': 'original-post u-url u-name'}, new_tab=True) vars['publishes'] = publishes if 'webmention' in self.source.features: # Blog posts blogposts = BlogPost.query().filter(BlogPost.source == self.source.key)\ .order(-BlogPost.created)\ .fetch(10) for b in blogposts: b.links = self.process_webmention_links(b) try: text = b.feed_item.get('title') except ValueError: text = None b.pretty_url = util.pretty_link( b.key.id(), text=text, attrs={'class': 'original-post u-url u-name'}, max_length=40, new_tab=True) # Blog webmentions webmentions = BlogWebmention.query()\ .filter(BlogWebmention.source == self.source.key)\ .order(-BlogWebmention.updated)\ .fetch(10) for w in webmentions: w.pretty_source = util.pretty_link( w.source_url(), attrs={'class': 'original-post'}, new_tab=True) try: target_is_source = (urlparse.urlparse(w.target_url()).netloc in self.source.domains) except BaseException: target_is_source = False w.pretty_target = util.pretty_link( w.target_url(), attrs={'class': 'original-post'}, new_tab=True, keep_host=target_is_source) vars.update({'blogposts': blogposts, 'webmentions': webmentions}) return vars
def beautifully(): dl = Tumblr("beautifully") dl.run()