def get(self, user): if user is None or len(user) == 0: logging.error('Empty user. Skipping') return user_decoded = urllib.unquote(user) logging.info('user: %s' %user_decoded) ud=UserDetails.gql('WHERE instapaper_account = :1' , user_decoded).get() if ud is None: logging.info('non existing user. redirect to home') self.redirect('/') return # sanity check if ud.instaright_account is None: ud.instaright_account = ud.instapaper_account ud.put() memcache_key ='user_info_' + user_decoded+'_'+str(datetime.datetime.now().date()) sessions = SessionModel.gql('WHERE instaright_account = :1 ORDER by date desc ' , user_decoded).fetch(100) links = [ s for s in sessions if s is not None ] cached_info = memcache.get(memcache_key) if cached_info: logging.info('getting from cache' ) template_variables = {'user':cached_info,'links':links} path= os.path.join(os.path.dirname(__file__), 'templates/user_info.html') self.response.headers["Content-type"] = "text/html" self.response.out.write(template.render(path,template_variables)) return user_detail= UserDetails.gql('WHERE mail = :1', user_decoded).get() if user_detail is None: logging.info('new user %s added to queue' %user_decoded) fetch_url = '/user/'+user+'/fetch' taskqueue.add(queue_name='user-info', url= fetch_url) ud = UserDetails() ud.name = user_decoded ud.instapaper_account = user_decoded ud.instaright_account = user_decoded ud.links_added = SessionModel.countAllForUser(user_decoded) # tmp put until we find more info for user ud.put() template_variables = {'user':ud, 'links': links} path= os.path.join(os.path.dirname(__file__), 'templates/user_info.html') self.response.headers["Content-type"] = "text/html" self.response.headers["Accept-Charset"] = "utf-8" self.response.out.write(template.render(path,template_variables)) return if user_detail.instaright_account is None: user_detail.instaright_account = user_decoded user_detail.put() memcache.set(memcache_key, user_detail) template_variables = {'user':user_detail, "links" : links} path= os.path.join(os.path.dirname(__file__), 'templates/user_info.html') self.response.headers["Content-type"] = "text/html" self.response.out.write(template.render(path,template_variables))
def test_update_encode(self): e = EncodeUtils() ss = SessionModel.gql('ORDER by url_counter_id desc').fetch(1000) test = SessionModel.countAllForUser('*****@*****.**') logging.info('count for user %s' % test) logging.info("fetch %s " %len(ss)) for s in ss: cnt = s.url_counter_id encode26 = e.enbase(cnt) logging.info("e26: before %s after %s" % (s.url_encode26, encode26)) s.url_encode26 = encode26 #s.put() self.assertEquals(True, True)
def _create_session(self, data={}) -> str: """Create a new session and return a dict where new information could be stored in Args: Return: (string) Id for the sessions to be stored in """ sid = uuid.uuid4().hex session = SessionModel(sid=sid, session_data=data) session.save() return sid
def get(self): memcache_key='feed_json_cache' cached_feed= memcache.get(memcache_key) format = self.request.get('format', None); cache_exp = datetime.datetime.now() + datetime.timedelta(minutes=5) cache_exp_ts = time.mktime(cache_exp.timetuple()) userUtil = UserUtils() if format == 'json' and cached_feed: logging.info('getting json from cache') self.response.headers['Content-Type'] = "application/json" self.response.out.write(simplejson.dumps(cached_feed, default=lambda o: {'u':{'id':str(o.key()), 't':unicode(o.title), 'dd': LinkUtils.generate_domain_link(o.domain), 'd':o.domain, 'user': urllib.unquote(o.instaright_account), 'source': o.client, 'u': int(time.mktime(o.date.timetuple())), 'l':LinkUtils.generate_instaright_link(o.url_encode26,LinkUtils.make_title(o.title)),'a':userUtil.getAvatar(o.instaright_account),'ol':o.url, 'lc':LinkUtils.getLinkCategory(o), 'html_lc':LinkUtils.getLinkCategoryHTML(o), 'e': o.embeded, 'n': int(time.mktime(datetime.datetime.now().timetuple()))}})) return entries = SessionModel.gql('ORDER by date DESC').fetch(10) memcache.set(memcache_key, entries, time = cache_exp_ts) if not entries: self.response.out.write('Nothing here') #now = datetime.datetime.now().strftime("%Y-%m-%dT%H\:%i\:%sZ") if format is None or format == 'xml' or format == 'valid_xml': updated_entries = [ (str(o.key()), unicode(o.title), LinkUtils.generate_domain_link(o.domain), LinkUtils.generate_instaright_link(o.url_encode26,LinkUtils.make_title(o.title)),userUtil.getAvatar(o.instaright_account), o.date, LinkUtils.generate_instaright_link(o.url_encode26,LinkUtils.make_title(o.title)) ) for o in entries ] template_variables = { 'entries' : updated_entries, 'dateupdated' : datetime.datetime.today()} if format == 'valid_xml': path= os.path.join(os.path.dirname(__file__), 'templates/feed_valid.html') else: path= os.path.join(os.path.dirname(__file__), 'templates/feed.html') self.response.headers['Content-Type'] = "application/atom+xml" self.response.out.write(template.render(path,template_variables)) return if format == 'json': self.response.headers['Content-Type'] = "application/json" self.response.out.write(simplejson.dumps(entries, default=lambda o: {'u':{'id':str(o.key()), 't':unicode(o.title), 'dd': LinkUtils.generate_domain_link(o.domain), 'd':o.domain, 'user': o.instaright_account, 'u': int(time.mktime(o.date.timetuple())), 'l':LinkUtils.generate_instaright_link(o.url_encode26,LinkUtils.make_title(o.title)), 'a':userUtil.getAvatar(o.instaright_account),'ol':o.url, 'source': o.client, 'e': o.embeded, 'lc':LinkUtils.getLinkCategory(o), 'html_lc':LinkUtils.getLinkCategoryHTML(o), 'n': int(time.mktime(datetime.datetime.now().timetuple()))}})) return
def yearStats(self, tDate): try: if tDate is None or tDate == "None": targetDate = None else: targetDate = datetime.datetime.strptime(tDate, "%Y-%m-%d").date() allYearStats = SessionModel.getYearStats(targetDate) logging.info('yearly stats for %s ' % targetDate ) if allYearStats: logging.info('retieved %s ' % len(allYearStats)) memcache_domains_key = "domains_year" + str(datetime.datetime.now().date()) if memcache.get(memcache_domains_key): logging.info('year stats::geting domain list from cache') allStats = memcache.get(memcache_domains_key) else: allStats = [ (stat.domain, sum([ x.count for x in allYearStats if x.domain == stat.domain])) for stat in allYearStats if stat.domain ] memcache.set(memcache_domains_key, allStats) for s in allStats: domain = s[0] count = s[1] memcache_year_key = 'year'+str(datetime.datetime.now().date())+domain if memcache.get(memcache_year_key): logging.info('found entry in cache. skipping %s' % domain) continue logging.info(' %s %s ' %(domain , count)) ys = YearDomainStats() ys.domain = domain ys.count = count ys.put() memcache.set(memcache_year_key,count) except: e0, e1 = sys.exc_info()[0], sys.exc_info()[1] logging.error('Error while running weekly cron task. %s \n %s' %( e0, e1))
def post(self): broadcaster = BroadcastMessage() userUtil = UserUtils() title = self.request.get('title', None) link = self.request.get('link', None) domain = self.request.get('domain', None) user_id = self.request.get('user_id', None) updated = self.request.get('updated', None) embeded = self.request.get('e', None) link_category = self.request.get('link_category', None) subscribers = simplejson.loads(self.request.get('subscribers', None)) message = Message( title = title, link = link , domain = domain) user = SessionModel.gql('WHERE __key__ = :1', db.Key(user_id)).get() if user is None: logging.info('can\'t determine user by id: %s' % user_id) return logging.info('user %s' % user.instaright_account) avatar = userUtil.getAvatar(user.instaright_account) logging.info('avatar %s' %avatar) messageAsJSON = [{'u':{'id':user_id, 't':title,'ol':link, 'l':LinkUtils.generate_instaright_link(user.url_encode26, LinkUtils.make_title(title)),'d':domain,'dd': LinkUtils.generate_domain_link(domain), 'a':avatar, 'u':updated, 'source': user.client, 'lc':link_category, 'html_lc':LinkUtils.getLinkCategoryHTML(user), 'e': embeded, 'n': int(time.mktime(datetime.datetime.now().timetuple()))}}] logging.info('sending message %s ' %messageAsJSON) broadcaster.send_message(messageAsJSON) xmpp_handler.send_message(subscribers, message)
def get(self, url_hash, title): try: self.redirect_perm() self.get_user() url_hash = urllib.unquote(url_hash) logging.info('url hash: %s' % url_hash) logging.info('category screen_name %s' %self.screen_name) category=None if self.avatar is None: self.avatar='/static/images/noavatar.png' sessionModel = SessionModel.gql('where url_encode26 = :1', url_hash).get() if sessionModel is None: logging.info('not article with hash %s ... redirecting' % url_hash) self.redirect('/') return generated_title = LinkUtils.make_title(sessionModel.title) if title != generated_title: self.redirect('/article/'+url_hash+'/'+generated_title) return instaright_link = LinkUtils.generate_instaright_link(url_hash, generated_title) links = Links.gql('where url_hash = :1', url_hash).get() userUtil = UserUtils() if links is not None: category = links.categories sessionTitle = LinkUtils.generateUrlTitle(sessionModel.title) template_variables = {'page_footer': PageUtils.get_footer(), 'user':self.screen_name, 'logout_url':'/account/logout', 'avatar':self.avatar,'story_avatar': userUtil.getAvatar(sessionModel.instaright_account), 'story_user': sessionModel.instaright_account, 'domain': sessionModel.domain, 'title':sessionModel.title, 'link': sessionModel.url, 'updated':sessionModel.date, 'id': str(sessionModel.key()), 'instaright_link': instaright_link, 'category': LinkUtils.getLinkCategoryHTML(sessionModel), 'dd': LinkUtils.generate_domain_link(sessionModel.domain)} path = os.path.join(os.path.dirname(__file__), 'templates/article.html') self.response.headers["Content-Type"] = "text/html; charset=utf-8" self.response.out.write(template.render(path, template_variables)) except: e,e0 = sys.exc_info()[0], sys.exc_info()[1] logging.error('handled error : %s, %s ' %( e, e0 )) self.redirect('/')
def getBadge(self): returnBadge=5 existingBadge=UserBadge.gql('WHERE user = :1 and badge = :2', self.user, returnBadge).get() if existingBadge is not None: logging.info('Already assigned 5 day usage badge. Skipping.') return None if self.version is None: logging.info('Older version of addon not usage badge defined!') return None yesterday=datetime.datetime.now().date() - datetime.timedelta(days=1) limit=datetime.datetime.now().date() - datetime.timedelta(days=4) active=True while yesterday >= limit: s=SessionModel.gql('WHERE date = :1 and instaright_account = :2', yesterday, self.user).get() if s is None: logging.info('user %s NOT active for date %s' %(self.user, yesterday)) active=False return None else: logging.info('user %s active for date %s' %(self.user, yesterday)) yesterday-=datetime.timedelta(days=1) if active: logging.info('user %s has been active in last %s' %(self.user, returnBadge)) return '5' logging.info('usage badge %s: not initialized' %self.user)
def getnytbadge(self): midnight = datetime.datetime.now().date() nyTotal=SessionModel.gql('where domain in :1 and date >= :2 and instaright_account = :3', self.nyDomains, midnight, self.user).count() logging.info('site specific badger(NY): fetched stats %s' % nyTotal) if nyTotal >= self.ny_tresshold: logging.info('setting ny badge for user %s ' %self.user) return 'ny' else: logging.info('for user %s still tresshold of %s still not reached %s' %(self.user, self.ny_tresshold, nyTotal)) return None
def getmoviebadge(self): midnight = datetime.datetime.now().date() currentCount=SessionModel.gql('where domain in :1 and date >= :2 and instaright_account = :3', self.movieDomains, midnight, self.user).count() logging.info('site specific badger(movie): fetched stats %s' % currentCount) if currentCount >= self.movie_tresshold: logging.info('setting movie badge for user %s ' %self.user) return 'movie' else: logging.info('for user %s still tresshold of %s still not reached %s' %(self.user, self.movie_tresshold, currentCount)) return None
def get(self, c): logging.info('updates for category %s' % c) lc=LinkCategory.gql('WHERE category = :1 order by updated desc', c).fetch(50) for l in lc: if hasattr(l,'model_details') and l.model_details is not None: logging.info('url %s already has details, skipping update' %l.url) continue logging.info('updating url details %s ' %l.url) s=SessionModel.gql('WHERE url = :1', l.url).get() if s is None: s=SessionModel.gql('WHERE feed_url = :1', l.url).get() if s is None: s=SessionModel.gql('WHERE feed_url = :1', l.url).get() if s is None: logging.info('ERROR: no session model url for %s' % l.url) continue logging.info('session model for url %s FOUND' %l.url) l.model_details=s.key() l.put()
def post(self): key = self.request.get("sessionKey", None) upper_limit_date = self.request.get("update_limit_date", None) currentSession = None currentSessionKey = db.Key(key) if currentSessionKey is not None: currentSession = SessionModel.gql("WHERE __key__ = :1", currentSessionKey).get() if currentSession is None: logging.info("Can't process None session model") return self.aggregateData(currentSession, upper_limit_date)
def get(self, user): if user is None or len(user) == 0: logging.error('Empty user. Skipping') return user_decoded = urllib.unquote(user) sessions = SessionModel.gql('WHERE instaright_account = :1 ORDER by date desc ' , user_decoded).fetch(1000) links = [ s.url for s in sessions ] template_variables = {'links' : links } path= os.path.join(os.path.dirname(__file__), 'templates/user_links.html') self.response.headers["Content-type"] = "text/html" self.response.out.write(template.render(path,template_variables))
def login_view(request): date = datetime.now() if request.method == "POST": form = LoginForm(request.POST) if form.is_valid(): username = form.cleaned_data['username'] password = form.cleaned_data['password'] user = SignUpModel.objects.filter(username=username).first() if user: # check for the password if check_password(password, user.password): token = SessionModel(user=user) token.create_token() token.save() response = redirect('/feed/') response.set_cookie(key='session_token', value=token.session_token) return response else: print("Incorrect Username or Password") ctypes.windll.user32.MessageBoxW( 0, u"Incorrect Username or Password", u"Error", 0) return render(request, 'login.html', {'invalid': True}) else: print("User does not exist") ctypes.windll.user32.MessageBoxW(0, u"User does not exist", u"Error", 0) return render(request, 'index.html', {'invalid': True}) else: print("Error: Invalid form") else: form = LoginForm() return render(request, 'login.html', {'form': form}, {'Date': date})
def getBadge(self): allForUser=SessionModel.all() allForUser.filter("instaright_account =", self.user) count=allForUser.count(10000) logging.info('club badger: fetched stats %s' % count) if count >= 10000: return '10000' if count >= 5000: return '5000' if count >= 1000: return '1000' logging.info('club badger %s: not initialized' % self.user ) return None
def getsportbadge(self): midnight = datetime.datetime.now().date() currentCount=SessionModel.gql('where domain in :1 and date >= :2 and instaright_account = :3', self.sportDomains, midnight, self.user).count() categoryCount = LinkCategory.gql('WHERE category in :1 and date >= :2', self.sportCategories, midnight).fetch(1000) categoryRefined = [ lc for lc in categoryCount if lc.model_details.instaright_account == self.user ] cat_user_count = len(categoryRefined) logging.info('site specific badger(sport): fetched stats %s and category count %s' % (currentCount, cat_user_count)) if currentCount + cat_user_count >= self.sport_tresshold: logging.info('setting news badge for user %s ' %self.user) return 'sport' else: logging.info('for user %s still tresshold of %s still not reached %s' %(self.user, self.sport_tresshold, currentCount)) return None
def get(self): dateStr = self.request.get("date", None) if dateStr is None: date = datetime.datetime.now().date() - datetime.timedelta(days=1) else: date = datetime.datetime.strptime(dateStr, "%Y-%m-%d").date() upperLimitDate = date + datetime.timedelta(days=1) cursor = None query = SessionModel.gql(" WHERE date >= :1 and date < :2 ", date, upperLimitDate) data = query.fetch(1000) logging.info("initial fetch got: %s" % len(data)) taskqueue.add(queue_name="data-consolidation", url="/user_consolidation", params={"date": date}) logging.info("added to queue task") while len(data) == 1000: cursor = query.cursor() query = SessionModel.gql(" WHERE date >= :1 and date < :2 ", date, upperLimitDate).with_cursor(cursor) data = query.fetch(1000) logging.info("fetch got: %s" % len(data)) taskqueue.add( queue_name="data-consolidation", url="/user_consolidation", params={"date": date, "last_cursor": cursor} ) logging.info("added to queue task")
def getBadge(self): midnight = datetime.datetime.now().date() currentCount=SessionModel.gql('WHERE date >= :1 and instaright_account = :2', midnight, self.user).count() logging.info('current daily user count : %s -> %s' %(self.user, currentCount)) if currentCount >= 105: return '105' if currentCount >= 65: return '65' if currentCount >= 55: return '55' if currentCount >= 25: return '25' logging.info('speed limit badge %s: not initialized' %self.user) return None
def get(self): e = EncodeUtils() query = SessionModel.all() query.filter("url_counter_id <= " ,6000) query.order("-url_counter_id") results=query.fetch(1000) logging.info("fetch %s " %query.count()) for s in results: cnt = s.url_counter_id encoded = e.encode(cnt) enbased26 = e.enbase(encoded) logging.info("cnt: %s => encoded: %s enbased %s (before %s)" % (str(s.url_counter_id), str(encoded), enbased26, s.url_encode26 )) s.url_encode26 = enbased26 s.put()
def _update_session(self, data: dict, sid: str): """Update existing session and add new information Args: data: (dict) Infomation that should be stored in the database sid: (str) Session id where the data should be stored on """ session = SessionModel.objects(sid=sid) if not session: return session = session[0] session.session_data = data session.save()
def get(self): feedproxy = "feedproxy.google.com" # 00:00:00 today = datetime.datetime.now().date() feedLinks = SessionModel.gql("WHERE domain= :1 and date < :2", feedproxy, today).fetch(5000) logging.info("fetched feedproxy links %s" % len(feedLinks)) for l in feedLinks: memcache_key = "link_transform" + str(l.key()) + "_" + str(datetime.datetime.now().date()) if memcache.get(memcache_key): logging.info("allready processed link %s" % l.url) continue logging.info("transforming link %s" % l.url) taskqueue.add(queue_name="default", url="/link/transform/feed", params={"key": l.key()}) memcache.set(memcache_key, 1)
def get(self): allData=LinkCategory.getAll() all_categories= [ c.category for c in allData if c is not None ] uniq_categories = set(all_categories) for c in uniq_categories: logging.info('updates for category %s' % c) lc=LinkCategory.gql('WHERE category = :1 order by updated desc', c).fetch(50) for l in lc: if hasattr(l,'model_details') and l.model_details is not None: #logging.info('url %s already has details, skipping update' %l.url) continue logging.info('updating url details %s ' %l.url) s=SessionModel.gql('WHERE url = :1 order by date desc', l.url).get() if s is None: logging.info('no session model for url %s trying feed url' %l.url) s=SessionModel.gql('WHERE feed_url = :1', l.url).get() if s is None: logging.info('no session model for url %s trying shprt url' %l.url) s=SessionModel.gql('WHERE feed_url = :1', l.url).get() if s is None: logging.info('ERROR: no session model url for %s' % l.url) continue l.model_details=s.key() l.put()
def get(self): short_bitly = "bit.ly" # 00:00:00 today = datetime.datetime.now().date() shortLinks = SessionModel.gql("WHERE domain= :1 and date < :2", short_bitly, today).fetch(5000) # TODO identify other shortners logging.info("fetched short links %s" % len(shortLinks)) for l in shortLinks: memcache_key = "link_transform" + str(l.key()) + "_" + str(datetime.datetime.now().date()) if memcache.get(memcache_key): logging.info("allready processed link %s" % l.url) continue logging.info("transforming link %s" % l.url) taskqueue.add(queue_name="link-queue", url="/link/transform/short", params={"key": l.key()}) memcache.set(memcache_key, 1)
def get(self, domain): format=self.request.get('format',None) if domain is None or len(domain) == 0: logging.info('not category in request. return empty') return if format == 'json': logging.info('domain %s json feed' % domain) userUtil = UserUtils() entries = SessionModel.gql('WHERE domain = :1 order by date desc', domain).fetch(10) self.response.headers['Content-Type'] = "application/json" #TODO insert categories for domain's view self.response.out.write(simplejson.dumps(entries, default=lambda o: {'u':{'id':str(o.key()), 't':unicode(o.title), 'l': LinkUtils.generate_instaright_link(o.url_encode26, LinkUtils.make_title(o.title), o.url), 'user': urllib.unquote(o.instaright_account), 'source': o.client, 'html_lc':LinkUtils.getLinkCategoryHTML(o), 'd': o.domain, 'lc': LinkUtils.getLinkCategory(o), 'dd':LinkUtils.generate_domain_link(o.domain), 'u': o.date.strftime("%Y-%m-%dT%I:%M:%SZ"), 'a':userUtil.getAvatar(o.instaright_account),'ol':o.url}})) return self.response.headers['Content-Type'] = "application/json" self.response.out.write("[{}]")
def dailyStats(self, tDate): try: if tDate is None or tDate == "None": targetDate = None else: targetDate = datetime.datetime.strptime(tDate, "%Y-%m-%d") allStats = SessionModel.getDailyStats(targetDate) logging.info('daily stats for %s ' % targetDate ) if allStats: logging.info('retieved %s ' % len(allStats)) self.calculateStatsPerDomain(allStats,'daily', targetDate) except: e0, e1 = sys.exc_info()[0], sys.exc_info()[1] logging.error('Error while running daily cron task. %s. More info %s' % (e0, e1))
def get(self): #redirect from appengine domain self.redirect_perm() self.get_user() if self.user_uuid is None or len(str(self.user_uuid)) == 0 or self.screen_name is None or self.user_detail_key is None: logging.info('No cookies, redirecting to home page') self.redirect('/?redirect=/user/dashboard&show_login=1') return logging.info('user: %s' %self.instaright_account) sessions = SessionModel.gql('WHERE instaright_account = :1 ORDER by date desc ' , self.instaright_account).fetch(self.link_batch) score = 0 links = None if sessions is not None: links = [ s for s in sessions if s is not None ] ud_key=db.Key(self.user_detail_key) logging.info('user detail key %s' % self.user_detail_key) template_variables=[] now=datetime.datetime.now().date() #start_of_week= time.asctime(time.strptime('%s %s 1' %(now.year, now.isocalendar()[1]), '%Y %W %w')) start_of_week= datetime.datetime.strptime('%s %s 1' %(now.year, now.isocalendar()[1]), '%Y %W %w') memcache_key='user_'+self.user_detail_key+'_score' cached_score=memcache.get(memcache_key) if cached_score is not None: logging.info('got score from cache( %s ): %s' %( memcache_key, cached_score )) score=cached_score else: logging.info('parameters: start of week %s now %s for user_key %s ' % ( start_of_week,now, ud_key)) score_entities = ScoreUsersDaily.gql('WHERE user = :1 and date >= :2', ud_key, start_of_week).fetch(100) #score_entities = ScoreUsersDaily.gql('WHERE user = :1 and date <= :2 and date >= :3', ud_key, now , start_of_week).fetch(100) logging.info('got %s score entities' % len(score_entities)) if score_entities is not None: scores = [ s.score for s in score_entities if s is not None ] score=sum(scores) logging.info('calculated score : %s' % score ) exp_ts=time.mktime((datetime.datetime.now() + datetime.timedelta(days=1)).timetuple()) memcache.set(memcache_key, score, time=exp_ts) badges = None all_badges = UserBadge.gql('WHERE user = :1 order by date desc', self.instaright_account).fetch(1000) if all_badges is not None: badges = set([ (b.badge, b.badge_property.badge_desc) for b in all_badges if b is not None and b.badge_property is not None ]) template_variables = {'user':self.screen_name, 'avatar':self.avatar,'instaright_account':self.instaright_account,'facebook_token':self.facebook_oauth_token,'facebook_profile': self.facebook_profile, 'twitter_profile': self.twitter_profile, 'twitter_token': self.twitter_oauth_token, 'google_profile': self.google_profile, 'google_token':self.google_oauth_token, 'picplz_profile': self.picplz_name, 'picplz_token': self.picplz_oauth_token, 'evernote_profile': self.evernote_name, 'evernote_token': self.evernote_oauth_token, 'links':links, 'score': score, 'visible_items_num': self.link_batch, 'badges': badges,'logout_url':'/account/logout'} logging.info('templates %s' %template_variables) path= os.path.join(os.path.dirname(__file__), 'templates/user_info.html') self.response.headers["Content-type"] = "text/html" self.response.out.write(template.render(path,template_variables))
def _get_session(self, sid: str): """Try to open a session with the given id or create a new one Args: sid: Session id Return: object (dict) Data extracted from the session (str) Session id """ model = SessionModel.objects(sid=sid) if not model: return {} model = dict(model[0].session_data) model.update({'_sid': sid}) return model
def post(self): domain = self.request.get('domain',None) if domain is None: logging.info('no domain in request') logging.info('fetching categories for domain %s' % domain) memcache_key='domain_lookup_%s' % domain logging.info('checking cache for key %s' %memcache_key) if memcache.get(memcache_key) is None: logging.info('domain already processed skipping. key %s expires %s' % (memcache_key, memcache.get(memcache_key))) return else: next_week=datetime.datetime.now().date() + datetime.timedelta(days=2) next_week_ts=time.mktime(next_week.timetuple()) memcache.set(memcache_key,1,time=next_week_ts) sessions = SessionModel.gql('WHERE domain = :1', domain).fetch(1000) for s in sessions: logging.info('task: determine categories for url %s ( domain: %s)' % (s.url, domain)) taskqueue.add(queue_name='category-queue', url='/link/category/task', params={'url':s.url, 'domain':domain})
def __init__(self, controller, response_factory, key, **params): ServerOperation.__init__(self, controller, response_factory, key) self.params = params session_id = params['session_id'] self.session = SessionModel({"_id": session_id}) controller.upstream_host = self.session["upstreamHost"] self.response.add_message("Upstream Host set to: " + str(controller.upstream_host)) controller.upstream_port = self.session["upstreamPort"] self.response.add_message("Upstream Port set to: " + str(controller.upstream_port)) stop_op = StopProxy(controller, response_factory, key) start_op = StartProxy(controller, response_factory, key) d = self.addCallback(stop_op.stop).addCallback(start_op.start) d.addCallback(self.start_session) d.addCallback(self.respond)
def post(self): k = self.request.get('key') key = db.Key(k) s = SessionModel.gql('WHERE __key__ = :1', key).get() util = LinkUtils() long_url=util.getShortOriginalUrl(s.url) if long_url is None: logging.info('could not retrieve long link.skipping') return logging.info('expanded url: %s' % long_url) if long_url.startswith('itms://'): logging.info('Skipping itunes item: %s' % long_url) return domain = RequestUtils.getDomain(long_url) s.short_url = s.url s.url = long_url s.domain = domain s.put() util.updateStats(s)
def post(self): user_details_key = self.request.get("user_details_key", None) if user_details_key is None: logging.info("user details key not defined ... skipping services submit") return session_key = self.request.get("session_key", None) if session_key is None: logging.info("session key not defined ... skipping services submit") return session = SessionModel.gql("WHERE __key__ = :1", db.Key(session_key)).get() user_token = UserTokens.gql("WHERE user_details = :1", db.Key(user_details_key)).get() if user_token is None: logging.info("skipping service submit no tokens found") return service_util = ServiceUtil() evernote_token = user_token.evernote_token evernote_token_additional_info = user_token.evernote_additional_info evernote_enabled = user_token.evernote_enabled flickr_token = user_token.flickr_token flickr_token_additional_info = user_token.flickr_additional_info flickr_enabled = user_token.flickr_enabled facebook_token = user_token.facebook_token facebook_enabled = user_token.facebook_enabled twitter_token = user_token.twitter_token twitter_secret = user_token.twitter_secret twitter_enabled = user_token.twitter_enabled picplz_token = user_token.picplz_token picplz_enabled = user_token.picplz_enabled if ( evernote_token is not None and evernote_enabled == True and session.selection is not None and session.selection != "None" ): service_util.send_to_evernote(urllib.unquote(evernote_token), session, evernote_token_additional_info) if picplz_token is not None and session.isImage(): service_util.send_to_picplz(picplz_token, session)
def domainScore(cls,user, domain): score=0 if user is None or domain is None: logging.info('domain score not enpugh data ... skipping') return score logging.info('domain score calc for for user %s' %user) config=ConfigParser.ConfigParser() config.read(os.path.split(os.path.realpath(__file__))[0]+'/../properties/score.ini') domain_points=int(config.get('domain_points','new_domain')) domain_memcache_key='visit_'+user+'_domain_'+domain visitedDomain=memcache.get(domain_memcache_key) if visitedDomain is None: visitedDomain=SessionModel.gql('WHERE domain = :1 and instaright_account = :2', domain, user).get() if visitedDomain is None: logging.info('new domain %s score for %s ' %(domain, user)) score=domain_points memcache.set(domain_memcache_key, '1') else: logging.info('user %s already visited domain %s ' %(user, domain)) return score
def post(self): k=self.request.get('key',None) if k is None: logging.info('error key has not been specified') return key=db.Key(k) if key is None: logging.info('error not valid key') return s = SessionModel.gql('WHERE __key__ = :1', key).get() logging.info('feedproxt url %s' % unicode(s.url)) util = LinkUtils() url = util.getFeedOriginalUrl(s.url) if url is None: logging.info('could not fetch original url. skipping.') return logging.info('original url %s' % url) domain = RequestUtils.getDomain(url) s.domain = domain s.feed_url=s.url s.url=url s.put() util.updateStats(s)
def get(self): logging.info('fetching more user links ...') cookie = self.request.get('cookie', None) offset = Cast.toInt(self.request.get('offset', None), 0) logging.info('row offset %s' % offset) offset = offset * self.link_batch ud = UserUtils.getUserDetailsFromCookie(cookie) sessions = SessionModel.gql('WHERE instaright_account = :1 ORDER by date desc ', ud.instaright_account ).fetch(self.link_batch,offset) if sessions is None or len(sessions) == 0: logging.info('returned no sessions for offset %s' %offset) self.response.headers["Content-type"] = "application/json" self.response.out.write('{}') return logging.info('fetched %s sessions for user %s' %(len(sessions), ud.instaright_account)) d = {} for d_te, j in itertools.groupby(sessions, key= lambda s: s.date.date()): ss = [ {'t':ss.title,'l':ss.url,'d':ss.domain,'h':ss.url_hash} for ss in list(j) ] d[str(d_te)] = ss import operator #order by dates desc dates_sorted=sorted(d.iteritems(), key=operator.itemgetter(0), reverse=True) self.response.headers["Content-type"] = "application/json" self.response.out.write(simplejson.dumps(dates_sorted))
def countDailySessions(self, tDate): try: if tDate is None or tDate == 'None': today = datetime.date.today() logging.info('Started session count for %s' % today) targetDate=datetime.date.today() - datetime.timedelta(days=1) else: targetDate = datetime.datetime.strptime(tDate, "%Y-%m-%d").date() logging.info('targetDate: %s', targetDate) dailyData=SessionModel.getDailyStats(targetDate) #totalCount=SessionModel.countAll() stats=StatsModel() if dailyData: stats.totalDailyNumber=len(dailyData) users = [ d.instaright_account for d in dailyData if d.instaright_account is not None ] user_set = set(users) stats.totalUserNumber = len(user_set) stats.date=targetDate stats.put() logging.info('Link volume for %s : link= %s users=%s' % (tDate , stats.totalDailyNumber , stats.totalUserNumber )) except: e = sys.exc_info()[1] logging.error('Error while running stats cron task. %s' % e)
def _destroy_session(self, sid: str) -> dict: """Delete session from the database""" SessionModel.objects(sid=sid).delete()