Esempio n. 1
0
def test_auto_sub(ctx):
    # tests autosubscription when feeds are indexed 
    # with <link rel="hub" /> entries. 
    from datetime import datetime
    from eventlet import sleep, spawn
    from melkman.db import RemoteFeed
    from melkman.fetch import push_feed_index
    from melkman.fetch.pubsubhubbub import WSGISubClient, callback_url_for
    from melkman.fetch.worker import run_feed_indexer

    
    w = WSGISubClient(ctx)
    client = spawn(w.run)
    indexer = spawn(run_feed_indexer, ctx)

    hub = FakeHub()
    hub_proc = spawn(hub.run)    
    hub_url = 'http://localhost:%d/' % hub.port

    feed_url = 'http://www.example.org/feeds/12'

    content = """<?xml version="1.0" encoding="utf-8"?>
      <feed xmlns="http://www.w3.org/2005/Atom">
      <id>%s</id>
      <title>Blah</title>
      <link rel="self" href="%s"/>
      <link rel="hub" href="%s" />
      <updated>%s</updated>
      <author>
        <name>Joop Doderer</name>
      </author>
      </feed>
    """ %  (feed_url, feed_url, hub_url,
            rfc3339_date(datetime.utcnow()))

    # push content in...
    push_feed_index(feed_url, content, ctx)
    sleep(.5)
    
    # check for automatic subscription...
    cb = callback_url_for(feed_url, ctx)
    assert hub.is_verified(cb, feed_url)

    rf = RemoteFeed.get_by_url(feed_url, ctx)
    assert rf.hub_info.enabled
    assert rf.hub_info.hub_url == hub_url

    client.kill()
    client.wait()
    indexer.kill()
    indexer.wait()
    hub_proc.kill()
    hub_proc.wait()
Esempio n. 2
0
def test_push_index(ctx):
    from melkman.db.remotefeed import RemoteFeed
    from melkman.fetch import push_feed_index
    from melkman.fetch.worker import run_feed_indexer
    from eventlet import sleep, spawn
    
    # start a feed indexer
    indexer = spawn(run_feed_indexer, ctx)
    
    try:
        url = 'http://www.example.com/feeds/2'
        content = random_atom_feed(url, 10)
        ids = melk_ids_in(content, url)
    
        push_feed_index(url, content, ctx)
        sleep(.5)
    
        rf = RemoteFeed.get_by_url(url, ctx)
        for iid in ids:
            assert iid in rf.entries
    finally:
        indexer.kill()
        indexer.wait()
Esempio n. 3
0
def test_push_index_digest(ctx):
    from melk.util.nonce import nonce_str
    from melkman.db.remotefeed import RemoteFeed
    from melkman.fetch import push_feed_index
    from melkman.fetch.worker import run_feed_indexer
    from eventlet import sleep, spawn
    from melkman.fetch.pubsubhubbub import psh_digest


    # start a feed indexer
    indexer = spawn(run_feed_indexer, ctx)

    url = 'http://www.example.com/feeds/2'
    rf = RemoteFeed.create_from_url(url, ctx)
    rf.hub_info.enabled = True
    rf.hub_info.subscribed = True
    rf.save()

    secret = nonce_str()

    content = random_atom_feed(url, 10)
    ids = melk_ids_in(content, url)

    correct_digest = 'sha1=%s' % psh_digest(content, secret)
    wrong_digest = 'wrong digest'

    #
    # no hub secret is specified on the feed
    #
    push_feed_index(url, content, ctx, digest=wrong_digest, from_hub=True)
    sleep(.5)
    rf = RemoteFeed.get_by_url(url, ctx)
    for iid in ids:
        assert iid not in rf.entries
    push_feed_index(url, content, ctx, digest=None, from_hub=True)
    sleep(.5)
    rf = RemoteFeed.get_by_url(url, ctx)
    for iid in ids:
        assert iid not in rf.entries
    # even the correct digest fails as no digest has been set 
    push_feed_index(url, content, ctx, digest=correct_digest, from_hub=True)
    sleep(.5)
    rf = RemoteFeed.get_by_url(url, ctx)
    for iid in ids:
        assert iid not in rf.entries

    #
    # now set the hub secret
    #
    rf.hub_info.secret = secret
    rf.save()

    push_feed_index(url, content, ctx, digest=wrong_digest, from_hub=True)
    sleep(.5)
    rf = RemoteFeed.get_by_url(url, ctx)
    for iid in ids:
        assert iid not in rf.entries
    push_feed_index(url, content, ctx, digest=None, from_hub=True)
    sleep(.5)
    rf = RemoteFeed.get_by_url(url, ctx)
    for iid in ids:
        assert iid not in rf.entries

    # finally, the correct digest should work now...
    push_feed_index(url, content, ctx, digest=correct_digest, from_hub=True)
    sleep(.5)
    rf = RemoteFeed.get_by_url(url, ctx)
    for iid in ids:
        assert iid in rf.entries

    indexer.kill()
    indexer.wait()