Esempio n. 1
0
    def test_create_new_rereads_domains(self):
        FakeSource.new(None,
                       features=['listen'],
                       domain_urls=['http://foo'],
                       domains=['foo']).put()

        FakeSource.string_id_counter -= 1
        auth_entity = testutil.FakeAuthEntity(id='x',
                                              user_json=json.dumps({
                                                  'urls': [{
                                                      'value':
                                                      'http://bar'
                                                  }, {
                                                      'value':
                                                      'http://baz'
                                                  }]
                                              }))
        self.expect_webmention_requests_get('http://bar/',
                                            'no webmention endpoint',
                                            verify=False)

        self.mox.ReplayAll()
        source = FakeSource.create_new(self.handler, auth_entity=auth_entity)
        self.assertEquals(['http://bar/', 'http://baz/'], source.domain_urls)
        self.assertEquals(['bar', 'baz'], source.domains)
Esempio n. 2
0
    def test_create_new_already_exists(self):
        long_ago = datetime.datetime(year=1901, month=2, day=3)
        props = {
            'created': long_ago,
            'last_webmention_sent': long_ago + datetime.timedelta(days=1),
            'last_polled': long_ago + datetime.timedelta(days=2),
            'last_hfeed_refetch': long_ago + datetime.timedelta(days=3),
            'last_syndication_url': long_ago + datetime.timedelta(days=4),
            'superfeedr_secret': 'asdfqwert',
        }
        FakeSource.new(None, features=['listen'], **props).put()
        self.assert_equals(['listen'], FakeSource.query().get().features)

        FakeSource.string_id_counter -= 1
        auth_entity = testutil.FakeAuthEntity(id='x',
                                              user_json=json.dumps(
                                                  {'url': 'http://foo.com/'}))
        auth_entity.put()
        self._test_create_new(auth_entity=auth_entity, features=['publish'])

        source = FakeSource.query().get()
        self.assert_equals(['listen', 'publish'], source.features)
        for prop, value in props.items():
            self.assert_equals(value, getattr(source, prop), prop)

        self.assert_equals(
            {
                "Updated fake (FakeSource). Try previewing a post from your web site!"
            }, self.handler.messages)

        task_params = testutil.get_task_params(
            self.taskqueue_stub.GetTasks('poll')[0])
        self.assertEqual('1901-02-05-00-00-00', task_params['last_polled'])
Esempio n. 3
0
  def test_create_new_already_exists(self):
    long_ago = datetime.datetime(year=1901, month=2, day=3)
    props = {
      'created': long_ago,
      'last_webmention_sent': long_ago + datetime.timedelta(days=1),
      'last_polled': long_ago + datetime.timedelta(days=2),
      'last_hfeed_fetch': long_ago + datetime.timedelta(days=3),
      'last_syndication_url': long_ago + datetime.timedelta(days=4),
      'superfeedr_secret': 'asdfqwert',
      }
    FakeSource.new(None, features=['listen'], **props).put()
    self.assert_equals(['listen'], FakeSource.query().get().features)

    FakeSource.string_id_counter -= 1
    auth_entity = testutil.FakeAuthEntity(
      id='x', user_json=json.dumps({'url': 'http://foo.com/'}))
    auth_entity.put()
    self._test_create_new(auth_entity=auth_entity, features=['publish'])

    source = FakeSource.query().get()
    self.assert_equals(['listen', 'publish'], source.features)
    for prop, value in props.items():
      self.assert_equals(value, getattr(source, prop), prop)

    self.assert_equals(
      {"Updated fake (FakeSource). Try previewing a post from your web site!"},
      self.handler.messages)

    task_params = testutil.get_task_params(self.taskqueue_stub.GetTasks('poll')[0])
    self.assertEqual('1901-02-05-00-00-00', task_params['last_polled'])
Esempio n. 4
0
  def test_create_new_rereads_domains(self):
    FakeSource.new(None, features=['listen'],
                   domain_urls=['http://foo'], domains=['foo']).put()

    FakeSource.string_id_counter -= 1
    auth_entity = testutil.FakeAuthEntity(id='x', user_json=json.dumps(
        {'urls': [{'value': 'http://bar'}, {'value': 'http://baz'}]}))
    self.expect_requests_get('http://bar', 'no webmention endpoint',
                             verify=False)

    self.mox.ReplayAll()
    source = FakeSource.create_new(self.handler, auth_entity=auth_entity)
    self.assertEquals(['http://bar', 'http://baz'], source.domain_urls)
    self.assertEquals(['bar', 'baz'], source.domains)
Esempio n. 5
0
  def setUp(self):
    super(SyndicatedPostTest, self).setUp()

    self.source = FakeSource.new(None)
    self.source.put()

    self.relationships = []
    self.relationships.append(
        SyndicatedPost(parent=self.source.key,
                       original='http://original/post/url',
                       syndication='http://silo/post/url'))
    # two syndication for the same original
    self.relationships.append(
        SyndicatedPost(parent=self.source.key,
                       original='http://original/post/url',
                       syndication='http://silo/another/url'))
    # two originals for the same syndication
    self.relationships.append(
        SyndicatedPost(parent=self.source.key,
                       original='http://original/another/post',
                       syndication='http://silo/post/url'))
    self.relationships.append(
        SyndicatedPost(parent=self.source.key,
                       original=None,
                       syndication='http://silo/no-original'))
    self.relationships.append(
        SyndicatedPost(parent=self.source.key,
                       original='http://original/no-syndication',
                       syndication=None))

    for r in self.relationships:
      r.put()
Esempio n. 6
0
    def test_get_activities_injects_web_site_urls_into_user_mentions(self):
        source = FakeSource.new(None,
                                domain_urls=['http://site1/', 'http://site2/'])
        source.put()

        mention = {
            'object': {
                'tags': [{
                    'objectType': 'person',
                    'id': 'tag:fa.ke,2013:%s' % source.key.id(),
                    'url': 'https://fa.ke/me',
                }, {
                    'objectType': 'person',
                    'id': 'tag:fa.ke,2013:bob',
                }],
            },
        }
        FakeGrSource.activities = [mention]

        # check that we inject their web sites
        got = super(FakeSource, source).get_activities_response()
        mention['object']['tags'][0]['urls'] = [{
            'value': 'http://site1/'
        }, {
            'value': 'http://site2/'
        }]
        self.assert_equals([mention], got['items'])
Esempio n. 7
0
    def test_get_comment_injects_web_site_urls_into_user_mentions(self):
        source = FakeSource.new(None,
                                domain_urls=['http://site1/', 'http://site2/'])
        source.put()

        user_id = 'tag:fa.ke,2013:%s' % source.key.id()
        FakeGrSource.comment = {
            'id': 'tag:fa.ke,2013:a1-b2.c3',
            'tags': [
                {
                    'id': 'tag:fa.ke,2013:nobody'
                },
                {
                    'id': user_id
                },
            ],
        }

        # check that we inject their web sites
        self.assert_equals(
            {
                'id': 'tag:fa.ke,2013:%s' % source.key.id(),
                'urls': [{
                    'value': 'http://site1/'
                }, {
                    'value': 'http://site2/'
                }],
            },
            super(FakeSource, source).get_comment('x')['tags'][1])
Esempio n. 8
0
    def setUp(self):
        super(SyndicatedPostTest, self).setUp()

        self.source = FakeSource.new(None)
        self.source.put()

        self.relationships = []
        self.relationships.append(
            SyndicatedPost(parent=self.source.key,
                           original='http://original/post/url',
                           syndication='http://silo/post/url'))
        # two syndication for the same original
        self.relationships.append(
            SyndicatedPost(parent=self.source.key,
                           original='http://original/post/url',
                           syndication='http://silo/another/url'))
        # two originals for the same syndication
        self.relationships.append(
            SyndicatedPost(parent=self.source.key,
                           original='http://original/another/post',
                           syndication='http://silo/post/url'))
        self.relationships.append(
            SyndicatedPost(parent=self.source.key,
                           original=None,
                           syndication='http://silo/no-original'))
        self.relationships.append(
            SyndicatedPost(parent=self.source.key,
                           original='http://original/no-syndication',
                           syndication=None))

        for r in self.relationships:
            r.put()
Esempio n. 9
0
  def test_replace_poll_tasks(self):
    self.assertEqual([], self.taskqueue_stub.GetTasks('poll'))
    now = datetime.datetime.now()

    # a bunch of sources, one needs a new poll task
    five_min_ago = now - datetime.timedelta(minutes=5)
    day_and_half_ago = now - datetime.timedelta(hours=36)
    month_ago = now - datetime.timedelta(days=30)
    defaults = {
      'features': ['listen'],
      'last_webmention_sent': day_and_half_ago,
      }
    sources = [
      # doesn't need a new poll task
      FakeSource.new(None, last_poll_attempt=now, **defaults).put(),
      FakeSource.new(None, last_poll_attempt=five_min_ago, **defaults).put(),
      FakeSource.new(None, status='disabled', **defaults).put(),
      FakeSource.new(None, status='disabled', **defaults).put(),
      # need a new poll task
      FakeSource.new(None, status='enabled', **defaults).put(),
      # not signed up for listen
      FakeSource.new(None, last_webmention_sent=day_and_half_ago).put(),
      # never sent a webmention, past grace period. last polled is older than 2x
      # fast poll, but within 2x slow poll.
      FakeSource.new(None, features=['listen'], created=month_ago,
                     last_poll_attempt=day_and_half_ago).put(),
      ]
    resp = cron.application.get_response('/cron/replace_poll_tasks')
    self.assertEqual(200, resp.status_int)

    tasks = self.taskqueue_stub.GetTasks('poll')
    self.assertEqual(1, len(tasks))
    self.assert_equals(sources[4].urlsafe(),
                       testutil.get_task_params(tasks[0])['source_key'])
Esempio n. 10
0
  def test_replace_poll_tasks(self):
    self.assertEqual([], self.taskqueue_stub.GetTasks('poll'))
    now = datetime.datetime.now()

    # a bunch of sources, one needs a new poll task
    five_min_ago = now - datetime.timedelta(minutes=5)
    day_and_half_ago = now - datetime.timedelta(hours=36)
    month_ago = now - datetime.timedelta(days=30)
    defaults = {
      'features': ['listen'],
      'last_webmention_sent': day_and_half_ago,
      }
    sources = [
      # doesn't need a new poll task
      FakeSource.new(None, last_poll_attempt=now, **defaults).put(),
      FakeSource.new(None, last_poll_attempt=five_min_ago, **defaults).put(),
      FakeSource.new(None, status='disabled', **defaults).put(),
      FakeSource.new(None, status='disabled', **defaults).put(),
      # need a new poll task
      FakeSource.new(None, status='enabled', **defaults).put(),
      # not signed up for listen
      FakeSource.new(None, last_webmention_sent=day_and_half_ago).put(),
      # never sent a webmention, past grace period. last polled is older than 2x
      # fast poll, but within 2x slow poll.
      FakeSource.new(None, features=['listen'], created=month_ago,
                     last_poll_attempt=day_and_half_ago).put(),
      ]
    resp = cron.application.get_response('/cron/replace_poll_tasks')
    self.assertEqual(200, resp.status_int)

    tasks = self.taskqueue_stub.GetTasks('poll')
    self.assertEqual(1, len(tasks))
    self.assert_equals(sources[4].urlsafe(),
                       testutil.get_task_params(tasks[0])['source_key'])
Esempio n. 11
0
 def test_has_bridgy_webmention_endpoint(self):
   source = FakeSource.new(None)
   for endpoint, has in ((None, False),
                         ('http://foo', False ),
                         ('https://brid.gy/webmention/fake', True),
                         ('https://www.brid.gy/webmention/fake', True),
                         ):
     source.webmention_endpoint = endpoint
     self.assertEquals(has, source.has_bridgy_webmention_endpoint(), endpoint)
Esempio n. 12
0
  def test_get_comment(self):
    comment_obj = {'objectType': 'comment', 'content': 'qwert'}
    source = FakeSource.new(None)
    source.as_source = self.mox.CreateMock(as_source.Source)
    source.as_source.get_comment('123', activity_id=None, activity_author_id=None
                                 ).AndReturn(comment_obj)

    self.mox.ReplayAll()
    self.assert_equals(comment_obj, source.get_comment('123'))
Esempio n. 13
0
  def test_is_beta_user(self):
    source = FakeSource.new(self.handler)
    self.assertFalse(source.is_beta_user())

    self.mox.stubs.Set(util, 'BETA_USER_PATHS', set())
    self.assertFalse(source.is_beta_user())

    self.mox.stubs.Set(util, 'BETA_USER_PATHS', set([source.bridgy_path()]))
    self.assertTrue(source.is_beta_user())
Esempio n. 14
0
  def test_verify_without_webmention_endpoint(self):
    self.expect_webmention_requests_get(
      'http://primary/', 'no webmention endpoint here!', verify=False)
    self.mox.ReplayAll()

    source = FakeSource.new(self.handler, features=['webmention'],
                            domain_urls=['http://primary/'], domains=['primary'])
    source.verify()
    self.assertIsNone(source.webmention_endpoint)
Esempio n. 15
0
  def test_verify_without_webmention_endpoint(self):
    self.expect_requests_get('http://primary/', 'no webmention endpoint here!',
                             verify=False)
    self.mox.ReplayAll()

    source = FakeSource.new(self.handler, features=['webmention'],
                            domain_urls=['http://primary/'], domains=['primary'])
    source.verify()
    self.assertIsNone(source.webmention_endpoint)
Esempio n. 16
0
 def test_has_bridgy_webmention_endpoint(self):
   source = FakeSource.new(None)
   for endpoint, has in ((None, False),
                         ('http://foo', False ),
                         ('https://brid.gy/webmention/fake', True),
                         ('https://www.brid.gy/webmention/fake', True),
                         ):
     source.webmention_endpoint = endpoint
     self.assertEquals(has, source.has_bridgy_webmention_endpoint(), endpoint)
Esempio n. 17
0
  def test_verify(self):
    # this requests.get is called by webmention-tools
    self.expect_webmention_requests_get('http://primary/', """
<html><meta>
<link rel="webmention" href="http://web.ment/ion">
</meta></html>""", verify=False)
    self.mox.ReplayAll()

    source = FakeSource.new(self.handler, features=['webmention'],
                            domain_urls=['http://primary/'], domains=['primary'])
    source.verify()
    self.assertEquals('http://web.ment/ion', source.webmention_endpoint)
Esempio n. 18
0
  def test_verify_checks_blacklist(self):
    self.expect_webmention_requests_get('http://good/', """
<html><meta>
<link rel="webmention" href="http://web.ment/ion">
</meta></html>""", verify=False)
    self.mox.ReplayAll()

    source = FakeSource.new(self.handler, features=['webmention'],
                            domain_urls=['http://bad.app/', 'http://good/'],
                            domains=['bad.app', 'good'])
    source.verify()
    self.assertEquals('http://web.ment/ion', source.webmention_endpoint)
Esempio n. 19
0
  def test_verify(self):
    # this requests.get is called by webmention-tools
    self.expect_requests_get('http://primary/', """
<html><meta>
<link rel="webmention" href="http://web.ment/ion">
</meta></html>""", verify=False)
    self.mox.ReplayAll()

    source = FakeSource.new(self.handler, features=['webmention'],
                            domain_urls=['http://primary/'], domains=['primary'])
    source.verify()
    self.assertEquals('http://web.ment/ion', source.webmention_endpoint)
Esempio n. 20
0
  def test_put_updates(self):
    source = FakeSource.new(None)
    source.put()
    updates = source.updates = {'status': 'disabled'}

    try:
      # check that source.updates is preserved through pre-put hook since some
      # Source subclasses (e.g. FacebookPage) use it.
      FakeSource._pre_put_hook = lambda fake: self.assertEquals(updates, fake.updates)
      Source.put_updates(source)
      self.assertEquals('disabled', source.key.get().status)
    finally:
      del FakeSource._pre_put_hook
Esempio n. 21
0
  def test_put_updates(self):
    source = FakeSource.new(None)
    source.put()
    updates = source.updates = {'status': 'disabled'}

    try:
      # check that source.updates is preserved through pre-put hook since some
      # Source subclasses (e.g. FacebookPage) use it.
      FakeSource._pre_put_hook = lambda fake: self.assertEquals(updates, fake.updates)
      Source.put_updates(source)
      self.assertEquals('disabled', source.key.get().status)
    finally:
      del FakeSource._pre_put_hook
Esempio n. 22
0
    def test_verify_checks_blacklist(self):
        self.expect_webmention_requests_get('http://good/',
                                            """
<html><meta>
<link rel="webmention" href="http://web.ment/ion">
</meta></html>""",
                                            verify=False)
        self.mox.ReplayAll()

        source = FakeSource.new(
            self.handler,
            features=['webmention'],
            domain_urls=['http://bad.app/', 'http://good/'],
            domains=['bad.app', 'good'])
        source.verify()
        self.assertEquals('http://web.ment/ion', source.webmention_endpoint)
Esempio n. 23
0
  def test_verify_unicode_characters(self):
    """Older versions of BS4 had an issue where it would check short HTML
    documents to make sure the user wasn't accidentally passing a URL,
    but converting the utf-8 document to ascii caused exceptions in some cases.
    """
    # this requests.get is called by webmention-tools
    self.expect_webmention_requests_get(
      'http://primary/', """\xef\xbb\xbf<html><head>
<link rel="webmention" href="http://web.ment/ion"></head>
</html>""", verify=False)
    self.mox.ReplayAll()

    source = FakeSource.new(self.handler, features=['webmention'],
                            domain_urls=['http://primary/'],
                            domains=['primary'])
    source.verify()
    self.assertEquals('http://web.ment/ion', source.webmention_endpoint)
Esempio n. 24
0
  def test_verify_unicode_characters(self):
    """Older versions of BS4 had an issue where it would check short HTML
    documents to make sure the user wasn't accidentally passing a URL,
    but converting the utf-8 document to ascii caused exceptions in some cases.
    """
    # this requests.get is called by webmention-tools
    self.expect_requests_get(
      'http://primary/', """\xef\xbb\xbf<html><head>
<link rel="webmention" href="http://web.ment/ion"></head>
</html>""", verify=False)
    self.mox.ReplayAll()

    source = FakeSource.new(self.handler, features=['webmention'],
                            domain_urls=['http://primary/'],
                            domains=['primary'])
    source.verify()
    self.assertEquals('http://web.ment/ion', source.webmention_endpoint)
Esempio n. 25
0
  def test_get_comment_injects_web_site_urls_into_user_mentions(self):
    source = FakeSource.new(None, domain_urls=['http://site1/', 'http://site2/'])
    source.put()

    user_id = 'tag:fa.ke,2013:%s' % source.key.id()
    FakeGrSource.comment = {
      'id': 'tag:fa.ke,2013:a1-b2.c3',
      'tags': [
        {'id': 'tag:fa.ke,2013:nobody'},
        {'id': user_id},
      ],
    }

    # check that we inject their web sites
    self.assert_equals({
      'id': 'tag:fa.ke,2013:%s' % source.key.id(),
      'urls': [{'value': 'http://site1/'}, {'value': 'http://site2/'}],
    }, super(FakeSource, source).get_comment('x')['tags'][1])
Esempio n. 26
0
  def test_poll_period(self):
    source = FakeSource.new(None)
    source.put()

    self.assertEqual(source.FAST_POLL, source.poll_period())

    source.created = datetime.datetime(2000, 1, 1)
    self.assertEqual(source.SLOW_POLL, source.poll_period())

    now = datetime.datetime.now()
    source.last_webmention_sent = now - datetime.timedelta(days=8)
    self.assertEqual(source.FAST_POLL * 10, source.poll_period())

    source.last_webmention_sent = now
    self.assertEqual(source.FAST_POLL, source.poll_period())

    source.rate_limited = True
    self.assertEqual(source.RATE_LIMITED_POLL, source.poll_period())
Esempio n. 27
0
    def test_poll_period(self):
        source = FakeSource.new(None)
        source.put()

        self.assertEqual(source.FAST_POLL, source.poll_period())

        source.created = datetime.datetime(2000, 1, 1)
        self.assertEqual(source.SLOW_POLL, source.poll_period())

        now = datetime.datetime.now()
        source.last_webmention_sent = now - datetime.timedelta(days=8)
        self.assertEqual(source.FAST_POLL * 10, source.poll_period())

        source.last_webmention_sent = now
        self.assertEqual(source.FAST_POLL, source.poll_period())

        source.rate_limited = True
        self.assertEqual(source.RATE_LIMITED_POLL, source.poll_period())
Esempio n. 28
0
  def test_propagate_blogpost(self):
    """Blog post propagate task."""
    source_key = FakeSource.new(None, domains=['fake']).put()
    links = ['http://fake/post', '/no/domain', 'http://ok/one.png',
             'http://ok/two', 'http://ok/two', # repeated
             ]
    blogpost = models.BlogPost(id='x', source=source_key, unsent=links)
    blogpost.put()

    self.expect_requests_head('http://ok/two')
    self.expect_webmention(source_url='x', target='http://ok/two').AndReturn(True)
    self.expect_requests_head('http://ok/one.png', content_type='image/png')
    self.mox.ReplayAll()

    self.post_url = '/_ah/queue/propagate-blogpost'
    super(PropagateTest, self).post_task(
      expected_status=200,
      params={'key': blogpost.key.urlsafe()})
    self.assert_response_is('complete', NOW + LEASE_LENGTH,
                            sent=['http://ok/two'], response=blogpost)
    self.assert_equals(NOW, source_key.get().last_webmention_sent)
Esempio n. 29
0
  def test_should_refetch(self):
    source = FakeSource.new(None)  # haven't found a synd url yet
    self.assertFalse(source.should_refetch())

    source.last_hfeed_refetch = models.REFETCH_HFEED_TRIGGER  # override
    self.assertTrue(source.should_refetch())

    source.last_syndication_url = source.last_hfeed_refetch = testutil.NOW  # too soon
    self.assertFalse(source.should_refetch())

    source.last_poll_attempt = testutil.NOW  # too soon
    self.assertFalse(source.should_refetch())

    hour = datetime.timedelta(hours=1)
    source.last_hfeed_refetch -= (Source.FAST_REFETCH + hour)
    self.assertTrue(source.should_refetch())

    source.last_syndication_url -= datetime.timedelta(days=15)  # slow refetch
    self.assertFalse(source.should_refetch())

    source.last_hfeed_refetch -= (Source.SLOW_REFETCH + hour)
    self.assertTrue(source.should_refetch())
Esempio n. 30
0
  def test_should_refetch(self):
    source = FakeSource.new(None)  # haven't found a synd url yet
    self.assertFalse(source.should_refetch())

    source.last_hfeed_refetch = models.REFETCH_HFEED_TRIGGER  # override
    self.assertTrue(source.should_refetch())

    source.last_syndication_url = source.last_hfeed_refetch = testutil.NOW  # too soon
    self.assertFalse(source.should_refetch())

    source.last_poll_attempt = testutil.NOW  # too soon
    self.assertFalse(source.should_refetch())

    hour = datetime.timedelta(hours=1)
    source.last_hfeed_refetch -= (Source.FAST_REFETCH + hour)
    self.assertTrue(source.should_refetch())

    source.last_syndication_url -= datetime.timedelta(days=15)  # slow refetch
    self.assertFalse(source.should_refetch())

    source.last_hfeed_refetch -= (Source.SLOW_REFETCH + hour)
    self.assertTrue(source.should_refetch())
Esempio n. 31
0
  def test_get_activities_injects_web_site_urls_into_user_mentions(self):
    source = FakeSource.new(None, domain_urls=['http://site1/', 'http://site2/'])
    source.put()

    mention = {
      'object': {
        'tags': [{
          'objectType': 'person',
          'id': 'tag:fa.ke,2013:%s' % source.key.id(),
          'url': 'https://fa.ke/me',
        }, {
          'objectType': 'person',
          'id': 'tag:fa.ke,2013:bob',
        }],
      },
    }
    FakeGrSource.activities = [mention]

    # check that we inject their web sites
    got = super(FakeSource, source).get_activities_response()
    mention['object']['tags'][0]['urls'] = [
      {'value': 'http://site1/'}, {'value': 'http://site2/'}]
    self.assert_equals([mention], got['items'])