Example #1
0
    def ImportFeed(self):

        view = self.view

        dir = os.path.dirname(os.path.abspath(__file__))
        path = os.path.join(dir, "osaf.blog.rss")

        data = file(path).read()

        channel = FeedChannel(itsView=view)
        count = channel.parse(data)

        self.assertEqual(channel.displayName, "OSAF News")
        self.assertEqual(5, len(channel))

        # Test successful lookup
        url = URL("http://www.osafoundation.org/archives/000966.html")
        item = indexes.valueLookup(channel, "link", "link", url)
        self.assertEqual(item.displayName, "OSAF Welcomes Priscilla Chung")

        # Test unsuccessful lookup
        nonExistent = URL("http://www.osafoundation.org/nonexistent/")
        item = indexes.valueLookup(channel, "link", "link", nonExistent)
        self.assertEqual(item, None)

        # Although the channels module doesn"t allow duplicate links, let"s
        # test the lookup mechanism"s ability to return dupes.  We"ll add a
        # duplicate, then pass the "multiple=True" arg to indexLookup.

        url = URL("http://www.osafoundation.org/archives/000964.html")
        item = FeedItem(itsView=view, link=url)
        channel.add(item)

        items = indexes.valueLookup(channel, "link", "link", url, multiple=True)
        self.assertEqual(len(items), 2)
Example #2
0
def findUID(view, uid):
    """
    Return the master event whose icalUID matched uid, or None.
    """
    iCalendarItems = schema.ns("osaf.sharing", view).iCalendarItems
    return indexes.valueLookup(iCalendarItems, 'icalUID',
                                    pim.Note.icalUID.name, uid)
    def ImportFeed(self):

        view = self.view

        dir = os.path.dirname(os.path.abspath(__file__))
        path = os.path.join(dir, "osaf.blog.rss")

        data = file(path).read()

        channel = FeedChannel(itsView=view)
        count = channel.parse(data)

        self.assertEqual(channel.displayName, "OSAF News")
        self.assertEqual(5, len(channel))

        # Test successful lookup
        url = URL("http://www.osafoundation.org/archives/000966.html")
        item = indexes.valueLookup(channel, "link", "link", url)
        self.assertEqual(item.displayName, "OSAF Welcomes Priscilla Chung")

        # Test unsuccessful lookup
        nonExistent = URL("http://www.osafoundation.org/nonexistent/")
        item = indexes.valueLookup(channel, "link", "link", nonExistent)
        self.assertEqual(item, None)

        # Although the channels module doesn"t allow duplicate links, let"s
        # test the lookup mechanism"s ability to return dupes.  We"ll add a
        # duplicate, then pass the "multiple=True" arg to indexLookup.

        url = URL("http://www.osafoundation.org/archives/000964.html")
        item = FeedItem(itsView=view, link=url)
        channel.add(item)

        items = indexes.valueLookup(channel,
                                    "link",
                                    "link",
                                    url,
                                    multiple=True)
        self.assertEqual(len(items), 2)
Example #4
0
    def NonASCII(self):

        view = self.view

        dir = os.path.dirname(os.path.abspath(__file__))
        path = os.path.join(dir, "japanese.rdf")

        data = file(path).read()

        channel = FeedChannel(itsView=view)
        count = channel.parse(data)

        self.assertEqual(channel.displayName, u"\u8fd1\u85e4\u6df3\u4e5f\u306e\u65b0\u30cd\u30c3\u30c8\u30b3\u30df\u30e5\u30cb\u30c6\u30a3\u8ad6")

        self.assertEqual(14, len(channel))
        url = "http://blog.japan.cnet.com/kondo/archives/002364.html"
        item = indexes.valueLookup(channel, "link", "link", url)
        self.assertEqual(item.displayName, u"\u30b3\u30e2\u30f3\u30bb\u30f3\u30b9\u306e\u78ba\u8a8d")
Example #5
0
    def NonASCII(self):

        view = self.view

        dir = os.path.dirname(os.path.abspath(__file__))
        path = os.path.join(dir, "japanese.rdf")

        data = file(path).read()

        channel = FeedChannel(itsView=view)
        count = channel.parse(data)

        self.assertEqual(
            channel.displayName,
            u"\u8fd1\u85e4\u6df3\u4e5f\u306e\u65b0\u30cd\u30c3\u30c8\u30b3\u30df\u30e5\u30cb\u30c6\u30a3\u8ad6"
        )

        self.assertEqual(14, len(channel))
        url = "http://blog.japan.cnet.com/kondo/archives/002364.html"
        item = indexes.valueLookup(channel, "link", "link", url)
        self.assertEqual(
            item.displayName,
            u"\u30b3\u30e2\u30f3\u30bb\u30f3\u30b9\u306e\u78ba\u8a8d")
Example #6
0
 def _parseItems(self, items):
     """
     This method parses all the news items in the RSS feed.
     """
     view = self.itsView
     
     count = 0
     
     for newItem in items:
         # Convert date to datetime object
         if getattr(newItem, "date_parsed", None):
             try:
                 # date_parsed is a tuple of 9 integers, like gmtime( )
                 # returns...
                 # date_parsed seems to always be converted to GMT, so
                 # let's make a datetime object using values from
                 # date_parsed, coupled with a GMT tzinfo...
                 kwds = dict(tzinfo=view.tzinfo.UTC)
                 itemDate = datetime(*newItem.date_parsed[:5], **kwds)
                 # logger.debug("%s, %s, %s" % \
                 #     (newItem.date, newItem.date_parsed, itemDate))
                 newItem.date = itemDate
             except:
                 logger.exception("Could not get date: %s (%s)" % \
                     (newItem.date, newItem.date_parsed))
                 newItem.date = None
         # Get the item content, using the "content" attribute first,
         # falling back to what"s in"description"
         content = newItem.get("content")
         if content:
             content = content[0]["value"]
         else:
             content = newItem.get("description")
         title = newItem.get("title")
         matchingItem = None
         link = getattr(newItem, "link", None)
         if link:
             # Find all FeedItems that have this link
             matchingItem = indexes.valueLookup(self, "link", "link", link)
         # If no matching items (based on link), it"s new
         # If matching item, if title or description have changed,
         # update the item and mark it unread
         if matchingItem is None:
             feedItem = FeedItem(itsView=view)
             feedItem.refresh(newItem)
             self.addFeedItem(feedItem)
             logger.debug("Added new item: %s", title)
             count += 1
         else:
             # A FeedItem exists within this Channel that has the
             # same link.  @@@MOR For now I am only going to allow one
             # FeedItem at a time (per Channel) to link to the same place,
             # since it seems like that gets the behavior we want.
             oldTitle = matchingItem.displayName
             titleDifferent = (oldTitle != title)
             # If no date in the item, just consider it a matching date;
             # otherwise do compare datestamps:
             dateDifferent = False
             haveFeedDate = "date" in newItem
             if haveFeedDate:
                 if matchingItem.date != newItem.date:
                     dateDifferent = True
             if not self.ignoreContentChanges:
                 oldContent = matchingItem.content.getReader().read()
                 contentDifferent = (oldContent != content)
             else:
                 contentDifferent = False
             if contentDifferent or titleDifferent or dateDifferent:
                 matchingItem.refresh(newItem)
                 if matchingItem.read:
                     matchingItem.updated = True
                 matchingItem.read = False
                 msg = "Updated item: %s (content %s, title %s, date %s)"
                 logger.debug(msg, title, contentDifferent, titleDifferent,
                              dateDifferent)
     return count