コード例 #1
0
ファイル: FeedProvider.py プロジェクト: avsm/py-shelf
 def getFeed(self, feed_url ):
     if not self.feed_url and self.provider.isDuplicateFeed( feed_url ):
         self.dead = True
         self.changed()
     else:
         self.feed_url = feed_url
         # if we have a feed object, then I'm not interested in re-parsing a stale file.
         wantStale = not self.feed
         Cache.getContentOfUrlAndCallback( callback = self.gotFeed, url = feed_url, username = self.username(), password = self.password(), timeout = self.timeout(), wantStale = wantStale, failure = self.failed )
コード例 #2
0
ファイル: Extractor.py プロジェクト: rcarmo/shelf
    def getSocialGraphFor( self, url, more_urls = [] ):
        if not re.match(r'http', url): return

        if url in Extractor.SOCIAL_GRAPH_CACHE:
            print_info("using cached social graph data")
            self.addClues( Extractor.SOCIAL_GRAPH_CACHE[url], more_urls )
            return
        api = "http://socialgraph.apis.google.com/lookup?pretty=1&fme=1&edo=1&edi=1"
        api += "&q=" + quote( url, '' )
        print_info("Social graph API call to " + api )
        # TODO _ respect more_urls here
        Cache.getContentOfUrlAndCallback( callback = self.gotSocialGraphData, url = api, timeout = 3600 * 48 ) # huge timeout here
コード例 #3
0
ファイル: DopplrProvider.py プロジェクト: avsm/py-shelf
    def __init__(self, provider, url):
        ProviderAtom.__init__( self, provider, url )
        self.username = re.search(r'/traveller/([^/]+)', self.url).group(1)
        self.name = "Dopplr / %s"%self.username
        self.response = None
        self.fail = None
        
        self.token = NSUserDefaults.standardUserDefaults().stringForKey_("dopplrToken")
        if not self.token: return

        url = "https://www.dopplr.com/api/traveller_info.js?token=%s&traveller=%s"%( self.token, self.username )
        Cache.getContentOfUrlAndCallback( callback = self.gotDopplrData, url = url, timeout = 3600, wantStale = True, failure = self.failed )
コード例 #4
0
ファイル: FeedProvider.py プロジェクト: avsm/py-shelf
 def getFeedUrl(self):
     # it's very unlikely that the feed source will move
     # TODO - check stale cache first. Man, the feed provider is too complicated.
     special = self.specialCaseFeedUrl( self.url )
     # return None to mean 'no special case', blank string to mean "no feed here"
     if special != None:
         if len(special) > 0:
             print_info("special-case feed url %s"%special)
             self.getFeed( special )
         else:
             # bad feed
             self.dead = True
             self.changed()
         return
 
     Cache.getContentOfUrlAndCallback( callback = self.gotMainPage, url = self.url, timeout = self.timeout() * 10, wantStale = False, failure = self.failed ) # TODO - use stale version somehow