示例#1
0
    def test_rm_feed(self):
        """ Tests rm_feed.

        Add a feed to the collector.
        Add a feed to the indexer.

        Get the first item:
          1- Check if the webpage url exists in the indexer. 

        Remove the feed with the query
          2- Check if the same query retrieves no result.

        """
        name, url, tag, _ = Manager.get_feeds_info()[0]
        self.co.add_feed(name, url, tag)
        self.indexer.add_feed(name)
        _, item = self.co.get_items(name).next()

        field = "webpage_url"
        keyword = item.webpage_url
        
        query = self.indexer._Indexer__query(field, keyword)

        with self.indexer.ix.searcher() as s:
            self.assertEquals(len(s.search(query)), 1) # 1

        self.indexer.rm_feed(field, keyword, print_search=False)

        with self.indexer.ix.searcher() as s:
            self.assertEquals(len(s.search(query)), 0) # 2
示例#2
0
    def test_add_feed(self):
        """ Tests add_feed. 

        Add a feed to the collector.
        Add a feed to the indexer.

        For each item added:
         1- Check if the webpage url exists in the indexer. 

        """
        name, url, tag, _ = Manager.get_feeds_info()[0]
        self.co.add_feed(name, url, tag)
        self.indexer.add_feed(name)

        for _, item in self.co.get_items(name):
            query = self.indexer._Indexer__query("webpage_url", item.webpage_url)

            with self.indexer.ix.searcher() as s:
                self.assertGreater(len(s.search(query)), 0)
示例#3
0
    def test___query(self):
        """ Tests __query.

        Add feeds to the collector.
        Add feeds to the indexer.

        Build a query:
          1- Check if the webpage url exists in the indexer. 

        """
        name, url, tag, _ = Manager.get_feeds_info()[0]
        self.co.add_feed(name, url, tag)
        self.indexer.add_feed(name)
        _, item = self.co.get_items(name).next()

        field = "webpage_url"
        keyword = item.webpage_url

        query = self.indexer._Indexer__query(field, keyword)
        with self.indexer.ix.searcher() as s:
            self.assertEquals(len(s.search(query)), 1) # 1
示例#4
0
    def test_add_feeds(self):
        """ Tests add_feeds.

        Add feeds to the collector.
        Add feeds to the indexer.

        For each feed added, get the first item:
          1- Check if the webpage url exists in the indexer. 

        """
        for name, url, tag, _ in Manager.get_feeds_info():
            self.co.add_feed(name, url, tag)

        self.indexer.add_feeds()

        for name, _ in self.co.get_feeds():
            _, item = self.co.get_items(name).next()
            query = self.indexer._Indexer__query("webpage_url", item.webpage_url)

            with self.indexer.ix.searcher() as s:
                self.assertEquals(len(s.search(query)), 1)
示例#5
0
    def test_search_feeds(self):
        """ Tests search_feeds.

        Add a feed to the collector.
        Add a feed to the indexer.

        1- Search feeds.
        2- Search feeds by setting a query.
        Result is visual and must be the same.

        """
        name, url, tag, _ = Manager.get_feeds_info()[0]
        self.co.add_feed(name, url, tag)
        self.indexer.add_feed(name)
        _, item = self.co.get_items(name).next()

        field = "webpage_url"
        keyword = item.webpage_url

        self.indexer.search_feeds(field, keyword) # 1

        query = self.indexer._Indexer__query(field, keyword)
        self.indexer.search_feeds(field, keyword, query) # 2
示例#6
0
 def setUp(self):
     self.co = Collector()
     self.c = Classifier(CleanTextUtil("french"))
     self.m = Manager(self.co, self.c)
     self.feeds_info = Manager.get_feeds_info()
示例#7
0
 def setUp(self):
     self.co = Collector()
     self.feed_info = Manager.get_feeds_info()
示例#8
0
    def setUp(self):
        feed_info = Manager.get_feeds_info()
        _, url, _, _ = feed_info[0]
        feed_parsed = feedparser.parse(url)

        self.item_entry = feed_parsed["entries"][0]
示例#9
0
 def setUp(self):
     feed_info = Manager.get_feeds_info()
     self.name, self.url, self.tag, _ = feed_info[0]
示例#10
0
            item_data (feedparser.FeedParserDict): A dictionary full of item information.
            
        Returns:
            int: the hash.

        """
        return abs(hash(item_data["title"]+item_data["title_detail"]["base"]))

        
###########################################################################
# Collector Example
###########################################################################

if __name__ == "__main__":
    from manager import Manager
    feeds = Manager.get_feeds_info()
    collector = Collector()
    
    def add_feeds_test():
        logging.info("add feeds")
        for name, url, tag, _ in feeds:
            collector.add_feed(name, url, tag)
            
    def update_feeds_test():
        logging.info("update feeds")
        for name, _,_,_ in feeds:
            collector.update_feed(name)

    def rm_feeds_test():
        logging.info("remove feeds")
        for name, _,_,_ in feeds:
示例#11
0
 def print_random_feed_items_test():
     feeds = Manager.get_feeds_info()
     name, _,_,_, = random.choice(feeds)
     c.print_items(name)