Ejemplo n.º 1
0
 def delicious_data(self, url):
     delicious_api = "http://feeds.delicious.com/v2/json/urlinfo/data?url=%s&type=json" % url
     logging.info("trying to fetch delicious info %s " % delicious_api)
     json = LinkUtils.getJsonFromApi(delicious_api)
     link = Links()
     if json:
         try:
             if not link.title:
                 link.title = json[0]["title"].strip()
             link.categories = db.Text(unicode(simplejson.dumps(json[0]["top_tags"])))
             if link.categories is not None:
                 taskqueue.add(
                     queue_name="category-queue",
                     url="/link/category/delicious",
                     params={"url": url, "categories": link.categories},
                 )
             link.delicious_count = Cast.toInt(json[0]["total_posts"], 0)
             logging.info("delicious count %s" % link.delicious_count)
         except KeyError:
             e0, e1 = sys.exc_info()[0], sys.exc_info()[1]
             logging.info("key error [[%s, %s]] in %s" % (e0, e1, json))
     return link
Ejemplo n.º 2
0
        def get(self):
                logging.info('fetching more user links ...')
                cookie = self.request.get('cookie', None)
                offset = Cast.toInt(self.request.get('offset', None), 0)
                logging.info('row offset %s' % offset)
                offset = offset * self.link_batch
                ud = UserUtils.getUserDetailsFromCookie(cookie)
                sessions = SessionModel.gql('WHERE instaright_account =  :1 ORDER by date desc ', ud.instaright_account ).fetch(self.link_batch,offset)
                if sessions is None or len(sessions) == 0:
			logging.info('returned no sessions for offset %s' %offset)
                        self.response.headers["Content-type"] = "application/json"
                        self.response.out.write('{}')
                        return
		logging.info('fetched %s sessions for user %s' %(len(sessions), ud.instaright_account))
                d = {}
                for d_te, j in itertools.groupby(sessions, key= lambda s: s.date.date()):
                        ss = [ {'t':ss.title,'l':ss.url,'d':ss.domain,'h':ss.url_hash} for ss in list(j) ]
                        d[str(d_te)] = ss
		import operator
		#order by dates desc
		dates_sorted=sorted(d.iteritems(), key=operator.itemgetter(0), reverse=True)
                self.response.headers["Content-type"] = "application/json"
                self.response.out.write(simplejson.dumps(dates_sorted))
Ejemplo n.º 3
0
        def getAllData(self,url, count=0):

		domain = RequestUtils.getDomain(url)
		logging.info('from %s domain %s' %( url, domain))
		url=urllib2.quote(url.encode('utf-8'))
                url_hash = LinkUtils.getUrlHash(url)

                topsy_api='http://otter.topsy.com/stats.json?url=%s' % url
                tweet_meme_api='http://api.tweetmeme.com/url_info.json?url=%s' %url
                delicious_api='http://feeds.delicious.com/v2/json/urlinfo/data?url=%s&type=json' % url
                digg_api='http://services.digg.com/1.0/endpoint?method=story.getAll&link=%s&type=json' %url
                reddit_api='http://www.reddit.com/api/info.json?url=%s' %url
                facebook_api='https://api.facebook.com/method/fql.query?query=select%20%20like_count,share_count%20from%20link_stat%20where%20url=%22'+url+'%22&format=json'
                linkedin_api='http://www.linkedin.com/cws/share-count?url=%s' % url
		stumble_upon_api='http://www.stumbleupon.com/services/1.01/badge.getinfo?url=%s' %url
		buzz_api = 'https://www.googleapis.com/buzz/v1/activities/count?alt=json&url=%s' % url
		alternate_api='http://api.sharedcount.com/?url=%s' %url

		link = None
		alternate_twitter_score = None
		alternate_buzz_score = None
		alternate_digg_score = None
		alternate_facebook_share_score = None
		alternate_facebook_like_score = None
		alternate_su_score = None
                alternate_linkedin_score = None
		
		try:
                	link = Links.gql('WHERE url_hash  = :1', url_hash).get()
                        if link is None:
                	        link = Links.gql('WHERE url = :1', url).get()
		except BadValueError:
			logging.info('url property too long')
                if link is None:
                        link = Links()
			link.domain = domain
                        link.instapaper_count = Cast.toInt(count,0)
                        link.url = urllib2.unquote(url).decode('utf-8')
                        link.url_hash = LinkUtils.getUrlHash(link.url)
                        link.redditups = 0
                        link.redditdowns = 0
                        link.tweets = 0
                        link.diggs = 0
                        link.delicious_count = 0
                        link.overall_score = 0
                        link.shared = False
                else:
                        link.date_updated = datetime.datetime.now().date()
			link.domain = domain
                        if link.title:
                                link.title=link.title.strip()[:199]
                        if link.url_hash is None:
                                link.url_hash =url_hash 

		#relaxation 
		link.relaxation = 0

                logging.info('trying to fetch shared count info %s' %alternate_api )
                json = LinkUtils.getJsonFromApi(alternate_api)
                if json:
                        try:
                                alternate_twitter_score=Cast.toInt(json['Twitter'],0)
                                alternate_buzz_score=Cast.toInt(json['Buzz'],0)
                                alternate_digg_score=Cast.toInt(json['Diggs'],0)
                                facebook_info = LinkUtils.getJsonFieldSimple(json, "Facebook")
                                logging.info('facebook alternate info %s' % facebook_info)
                                if type(facebook_info) is int:
                                        alternate_facebook_share_score = Cast.toInt(facebook_info, 0)
                                elif type(facebook_info) is dict:
                                        logging.info('likes: %s' % LinkUtils.getJsonFieldSimple(facebook_info, "like_count"))
                                        logging.info('shares : %s' % LinkUtils.getJsonFieldSimple(facebook_info, "share_count"))
                                        alternate_facebook_like_score = Cast.toInt(LinkUtils.getJsonFieldSimple(facebook_info, "like_count"), 0)
                                        alternate_facebook_share_score = Cast.toInt(LinkUtils.getJsonFieldSimple(facebook_info, "share_count"), 0)
                                logging.info('alternate fb likes %s fb share %s ' % (alternate_facebook_like_score, alternate_facebook_share_score))
                                alternate_su_score=Cast.toInt(json['StumbleUpon'],0)
                                alternate_linkedin_score=Cast.toInt(json['LinkedIn'],0)

                        except KeyError:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('key error [[%s, %s]] in %s' %(e0, e1, json))

                logging.info('trying to fetch topsi info %s' %topsy_api)
                json = LinkUtils.getJsonFromApi(topsy_api)
                if json:
                        try:
                                link.influence_score=Cast.toInt(json['response']['influential'],0)
                                link.all_score=Cast.toInt(json['response']['all'],0)
                        except KeyError:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('key error [[%s, %s]] in %s' %(e0, e1, json))

                logging.info('trying to fetch digg info %s' %digg_api)
                json =LinkUtils.getJsonFromApi(digg_api)
                if json:
                        try:
                                link.diggs =Cast.toInt(json['count'],0)
				logging.info('diggs %s' %link.diggs)
                        except KeyError:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('key error [[%s, %s]] in %s' %(e0, e1, json))
		elif alternate_digg_score is not None:
			logging.info('using alternate digg score %s' % alternate_digg_score)
			link.diggs = alternate_digg_score
                if link.diggs is not None:
                        link.overall_score += link.diggs

                logging.info('trying to fetch tweet_meme info %s ' % tweet_meme_api )
                json = LinkUtils.getJsonFromApi(tweet_meme_api)
                if json and 'story' in json:
                        try:
                                link.tweets=Cast.toInt(json['story']['url_count'],0)
                                if json['story']['title'] is not None:
                                        link.title=json['story']['title'].strip()[:199]
			 	if 'excerpt' in json['story']:	
					logging.info('getting excerpt');
                                	link.excerpt = db.Text(unicode(json['story']['excerpt']))
				logging.info('tweets %s' % link.tweets)
                        except KeyError:
				link.relaxation = link.relaxation + 1
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('key error [[%s, %s]] in %s' %(e0, e1, json))
		elif alternate_twitter_score is not None:
			logging.info('using altenate twitter score %s' % alternate_twitter_score)
			link.tweets = alternate_twitter_score
                if link.tweets is not None:
                	link.overall_score += self.tw_factor * link.tweets

                logging.info('trying to fetch delicious info %s ' % delicious_api)
                json =LinkUtils.getJsonFromApi(delicious_api)
                if json:
                        try:
                                if not link.title and json[0]['title']:
                                        link.title = json[0]['title'].strip()[:199]
                                link.categories = db.Text(unicode(simplejson.dumps(json[0]['top_tags'])))
                                link.delicious_count = Cast.toInt(json[0]['total_posts'],0)
				logging.info('delicious count %s' % link.delicious_count)
                                if link.delicious_count is not None:
                                        link.overall_score += link.delicious_count
                        except KeyError:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('key error [[%s, %s]] in %s' %(e0, e1, json))

                logging.info('trying to fetch reddit info %s' % reddit_api)
                json = LinkUtils.getJsonFromApi(reddit_api)
                if json and 'data' in json:
                        try:
                                data = [ x for x in json['data']['children']]
                                top_upped = sorted(data, key=lambda ups: ups['data']['ups'], reverse=True)
                                if top_upped:
                                     link.redditups = Cast.toInt(top_upped[0]['data']['ups'],0)
                                     link.redditdowns = Cast.toInt(top_upped[0]['data']['downs'],0)
                                     link.created = Cast.toInt(top_upped[0]['data']['created'],0)
				     logging.info('reddit ups %s' % link.redditups)
                                     if link.redditups is not None:
                                                link.overall_score += link.redditups
                                     if link.redditdowns is not None:
                                                link.overall_score -= link.redditdowns
                        except KeyError:
				link.relaxation = link.relaxation + 1
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('key error [[%s, %s]] in %s' %(e0, e1, json))
                logging.info('trying to fetch facebook info %s' %facebook_api)
                json = LinkUtils.getJsonFromApi(facebook_api)
                if json:
                        try:
                                link.facebook_like=Cast.toInt(json[0]['like_count'], 0)
                                link.facebook_share=Cast.toInt(json[0]['share_count'], 0)
				logging.info('facebook likes %s' % link.facebook_like)
				logging.info('facebook share %s' % link.facebook_share)

                        except KeyError:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('request: %s == more info: key error [[%s, %s]] in %s' %(facebook_api, e0, e1, json))
		elif alternate_facebook_like_score is not None:
			logging.info('using alternate facebook like count %s' % alternate_facebook_like_score)
			link.facebook_like_score = alternate_facebook_like_score
		elif alternate_facebook_share_score is not None:
			logging.info('using alternate facebook share count %s' % alternate_facebook_share_score)
			link.facebook_share = alternate_facebook_share_score
                if link.facebook_like is not None:
                        link.overall_score += self.fb_factor * link.facebook_like
                if link.facebook_share is not None:
                        link.overall_score += link.facebook_share

		logging.info('trying to fetch stumple upon link %s' % stumble_upon_api)
		json = LinkUtils.getJsonFromApi(stumble_upon_api)
		if json:
			try:
				link.stumble_upons = Cast.toInt(json['result']['views'], 0)
				logging.info('stumle_score %s' % link.stumble_upons)
				if not link.title and json['result']['title']:
                                        link.title = json['result']['title'].strip()[:199]
					logging.info('settting stumble title: %s' % link.title)
			except KeyError:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('request: %s == more info: key error [[%s, %s]] in %s' %(stumble_upon_api, e0, e1, json))
		elif alternate_su_score is not None:
			logging.info('using alternate su score %s' % alternate_su_score )
			link.stumble_upons = alternate_su_score
		if link.stumble_upons is not None:
			link.overall_score += link.stumble_upons

		# specific from linkedin since response is in jsonp
		logging.info('trying to fetch linkedin upon link %s' % linkedin_api)
		try:
                        dta = urllib2.urlopen(linkedin_api)
			res = dta.read()
			res = res.replace('IN.Tags.Share.handleCount(','')
			res = res.replace(');','')
			json = simplejson.loads(res)
			
			link.linkedin_share = Cast.toInt(json['count'], 0)
			logging.info('linked in shares %s' % link.linkedin_share)
			if link.linkedin_share is not None:
					link.overall_score += link.linkedin_share
		except:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('request: %s == more info: [[%s, %s]] in %s' %(linkedin_api, e0, e1, json))

                if link.linkedin_share is None and alternate_linkedin_score is not None:
                        logging.info('using alternate linkedin score %s' % alternate_linkedin_score)
                        link.linkedin_share = alternate_linkedin_score
                        link.overall_score += alternate_linkedin_score

		logging.info('trying to fetch buzz upon link %s' % buzz_api)
		json = LinkUtils.getJsonFromApi(buzz_api)
		if json:
			try:
				link.buzz_count = Cast.toInt(json['data']['counts']["%s"][0]["count"] % url, 0)
				logging.info('buzz share %s' % link.buzz_count)
			except KeyError:
                                e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                                logging.info('request: %s == more info: key error [[%s, %s]] in %s' %(stumble_upon_api, e0, e1, json))
		elif alternate_buzz_score is not None:
			logging.info('using alternate buzz score %s' % alternate_buzz_score)
			link.buzz = alternate_buzz_score
		if link.buzz_count is not None:
			link.overall_score += link.buzz_count

                return link
Ejemplo n.º 4
0
        def post(self):
		url=self.request.get('url',None)
                url_hash = LinkUtils.getUrlHash(url)
                if url is None:
                        logging.info('no link in request. skipping')
                        return
                category_api='http://access.alchemyapi.com/calls/url/URLGetCategory?apikey=%s&url=%s&outputMode=json' %(self.alchemy_key, urllib2.quote(url.encode('utf-8')))
                logging.info('trying to fetch shared count info %s' %category_api)
                link=None
                language=None
                category=None

		try:
                	link = Links.gql('WHERE url_hash = :1', url_hash).get()
                        if link is None:
                	        link = Links.gql('WHERE url = :1', url).get()
		except BadValueError:
			logging.info('url property too long')
                if link is None:
                        link = Links()
                else:
                        link.date_updated = datetime.datetime.now().date()
                json = LinkUtils.getJsonFromApi(category_api)
                if json is None:
                        logging.info('alchemy api returned no category.skipping')
                        return
                try:
                    language=json['language']
                    category=json['category']
                    score=Cast.toFloat(json['score'],0)
                    if score is not None and score > 0.5 and category is not None:
                            logging.info('category %s score %s' %(category, score))
                            cats=category.split("_")
                            if cats is None:
                                    logging.info('no categories. exit')
                                    return
                            memcache_key=url_hash+'_category'
                            current_categories=memcache.get(memcache_key)
                            merge_cat=[]
                            if current_categories is not None:
                                    logging.info('merging with existing cats %s' %current_categories)
                                    merge_cat.extend(current_categories)
                                    merge_cat.extend(cats)
                            else: 
                                    merge_cat=cats 
                            model=None 
                            try: 
                                    model=SessionModel.gql('WHERE url_hash = :1 order by date desc', url).get() 
                                    if model is None:
                                        model=SessionModel.gql('WHERE url = :1 order by date desc', url).get()
                            except BadValueError:
                                logging.info('url too long ... %s' %url)
                            if model is None:
                                logging.info('model not defined ... skipping')
                                return

                            linkDetail=Links.gql('WHERE url_hash = :1' , url_hash).get()
                            if linkDetail is None:
                                linkDetail=Links.gql('WHERE url = :1' , url).get()
                            if linkDetail is not None and linkDetail.categories is not None:
                                    logging.info('category found from link details %s' % linkDetail.categories)
                                    delic_cats=eval(linkDetail.categories)
                                    d_cats=[ c for c in  delic_cats ]
                                    merge_cat.extend(d_cats)
                            merge_cat=set(merge_cat)
                            logging.info('caching cats %s for url %s' %(merge_cat, url))
                            memcache.set(memcache_key, list(set(merge_cat))[:4])

                            for c in merge_cat:
                                taskqueue.add(queue_name='message-broadcast-queue', url='/category/stream', params={'category':c, 'url': url_hash})
                                existingLinkCat = LinkCategory.gql('WHERE url_hash = :1 and category = :2', url_hash, c).get()
                                if existingLinkCat is None:
                                        existingLinkCat = LinkCategory.gql('WHERE url = :1 and category = :2', url, c).get()
                                if existingLinkCat is not None:
                                        existingLinkCat.updated=datetime.datetime.now()
                                        if existingLinkCat.url_hash is None:
                                                existingLinkCat.url_hash = url_hash
                                        existingLinkCat.put()
                                        logging.info('updated exisitng url(%s) category(%s) update time %s' % (url, c, existingLinkCat.updated))
                                else:
                                        logging.info('new pair: url%s) category(%s) ' % (url, c))
                                        linkCategory=LinkCategory()
                                        linkCategory.url=url
                                        linkCategory.url_hash = url_hash
                                        linkCategory.category=c
                                        if model is not None:
                                                linkCategory.model_details=model.key()
                                        linkCategory.put()

                    if language is not None:
                            link.language = language
                    link.url=url
                    link.url_hash=url_hash
                    link.put()
                except KeyError:
                    e0, e1 = sys.exc_info()[0],sys.exc_info()[1]
                    logging.info('key error [[%s, %s]] in %s' %(e0, e1, json))