def fx_test_feeds(): authors = [Person(name='vio')] feed = Feed(id='http://feedone.com/', authors=authors, title='Feed One', updated_at=datetime.datetime(2013, 10, 29, 20, 55, 30, tzinfo=utc)) updated_feed = Feed(id='http://feedone.com/', authors=authors, title=Text(value='Feed One'), updated_at=datetime.datetime(2013, 10, 30, 20, 55, 30, tzinfo=utc)) entry = Entry(id='http://feedone.com/1', authors=authors, title=Text(value='Test Entry'), updated_at=datetime.datetime(2013, 10, 30, 20, 55, 30, tzinfo=utc)) updated_feed.entries.append(entry) return feed, updated_feed
def test_subscription_set_subscribe(subs): feed = Feed(id='urn:earthreader:test:test_subscription_set_subscribe', title=Text(value='Feed title')) feed.links.extend([ Link(uri='http://example.com/index.xml', relation='self', mimetype='application/atom+xml'), Link(uri='http://example.com/', relation='alternate', mimetype='text/html') ]) rv = subs.subscribe(feed, icon_uri='http://example.com/favicon.ico') sub = next(iter(subs)) assert rv is sub assert sub.feed_id == '0691e2f0c3ea1d7fa9da48e14a46ac8077815ad3' assert sub.icon_uri == 'http://example.com/favicon.ico' assert sub.label == 'Feed title' assert sub.feed_uri == 'http://example.com/index.xml' assert sub.alternate_uri == 'http://example.com/' subs.remove(sub) assert not subs feed.links.append( Link(uri='http://example.com/favicon.ico', relation='shortcut icon')) rv = subs.subscribe(feed) assert rv is next(iter(subs)) assert rv == sub
def test_add_as_subscription(subs): feed = Feed(id='urn:earthreader:test:test_subscription_set_subscribe', title=Text(value='Feed title'), links=[ Link(relation='self', mimetype='application/atom+xml', uri='http://example.com/atom.xml') ]) result = CrawlResult('http://example.com/atom.xml', feed, hints={}, icon_url='http://example.com/favicon.ico') sub = result.add_as_subscription(subs) assert len(subs) == 1 assert next(iter(subs)) is sub assert sub.feed_uri == result.url assert sub.label == feed.title.value assert sub.icon_uri == result.icon_url
def fx_filtering_entries(fx_test_stage): authors = [Person(name='vio')] now = datetime.datetime(2013, 10, 30, 20, 55, 30, tzinfo=utc) feed = Feed(id='http://feedone.com/feed/atom/', authors=authors, title=Text(value='Feed One'), updated_at=now) for i in range(10): feed.entries.append( Entry(id='http://feedone.com/feed/atom/' + str(i) + '/', authors=authors, title=Text(value=str(i + 1)), updated_at=now + datetime.timedelta(days=1) * i) ) for i in range(5): feed.entries[i].read = Mark(marked=True, updated_at=now) for i in range(3, 7): feed.entries[i].starred = Mark(marked=True, updated_at=now) with fx_test_stage as stage: stage.feeds[get_hash('http://feedone.com/feed/atom/')] = feed stage.subscriptions = read(SubscriptionList, opml_for_filtering)
def xmls_for_next(request, fx_test_stage): opml = ''' <opml version="1.0"> <head> <title>test opml</title> </head> <body> <outline text="categoryone" title="categoryone"> <outline type="atom" text="Feed One" title="Feed One" xmlUrl="http://feedone.com/" /> <outline type="atom" text="Feed Two" title="Feed Two" xmlUrl="http://feedtwo.com/" /> </outline> <outline type="atom" text="Feed Three" title="Feed Three" xmlUrl="http://feedthree.com/" /> <outline type="atom" text="Feed Four" title="Feed Three" xmlUrl="http://feedthree.com/" /> </body> </opml> ''' authors = [Person(name='vio')] feed_one = Feed(id='http://feedone.com/', authors=authors, title=Text(value='Feed One'), updated_at=datetime.datetime(2013, 10, 30, 20, 55, 30, tzinfo=utc)) feed_two = Feed(id='http://feedtwo.com/', authors=authors, title=Text(value='Feed Two'), updated_at=datetime.datetime(2013, 10, 30, 21, 55, 30, tzinfo=utc)) feed_three = Feed(id='http://feedthree.com/', authors=authors, title=Text(value='Feed Three'), updated_at=datetime.datetime(2013, 10, 30, 21, 55, 30, tzinfo=utc)) feed_four = Feed(id='http://feedfour.com/', authors=authors, title=Text(value='Feed Four'), updated_at=datetime.datetime(2013, 10, 30, 21, 55, 30, tzinfo=utc)) for i in range(25): feed_one.entries.append( Entry(id='http://feedone.com/' + str(24 - i), authors=authors, title=Text(value='Feed One: Entry ' + str(24 - i)), updated_at=datetime.datetime(2013, 10, 30, 20, 55, 30, tzinfo=utc) + datetime.timedelta(days=-1)*i) ) feed_two.entries.append( Entry(id='http://feedtwo.com/' + str(24 - i), authors=authors, title=Text(value='Feed Two: Entry ' + str(24 - i)), updated_at=datetime.datetime(2013, 10, 30, 19, 55, 30, tzinfo=utc) + datetime.timedelta(days=-1)*i) ) for i in range(20): feed_three.entries.append( Entry(id='http://feedthree.com/' + str(19 - i), authors=authors, title=Text(value='Feed Three: Entry ' + str(19 - i)), updated_at=datetime.datetime(2013, 10, 30, 20, 55, 30, tzinfo=utc) + datetime.timedelta(days=-1)*i) ) for i in range(50): feed_four.entries.append( Entry(id='http://feedfour.com/' + str(49 - i), authors=authors, title=Text(value='Feed Four: Entry ' + str(49 - i)), updated_at=datetime.datetime(2013, 10, 30, 20, 55, 30, tzinfo=utc) + datetime.timedelta(days=-1)*i) ) for i in range(5): feed_two.entries[i].read = True feed_two.entries[i+15].read = True for i in range(20, 50): feed_four.entries[i].read = True subscriptions = read(SubscriptionList, opml) with fx_test_stage as stage: stage.subscriptions = subscriptions stage.feeds[get_hash('http://feedone.com/')] = feed_one stage.feeds[get_hash('http://feedtwo.com/')] = feed_two stage.feeds[get_hash('http://feedthree.com/')] = feed_three stage.feeds[get_hash('http://feedfour.com/')] = feed_four