Example #1
0
 def page2search(self, link, text, page_type, date, all_tags = None):
     if all_tags is None:
         all_tags = memcache.get('previous_searches')
     found_tags = []
     if link != '' and text != '' and all_tags is not None:
         logging.info('Searching tags for link: ' + link)
         for tag in all_tags:
             if re.search('\\b'+tag[0]+'\\b', text.lower()) or re.search('\\b'+tag[0].replace('_', ' ')+'\\b', text.lower()):
                 found_tags.append(tag[0])
                 logging.info('Link: ' + link + ' found tag: ' + tag[0])
                 tag_links = memcache.get('search_' + tag[0])
                 if len(text) > 200:
                     element = [link, page_type, date, text[:200]+'...']
                 else:
                     element = [link, page_type, date, text]
                 if tag_links is None:
                     memcache.add('search_' + tag[0], [element])
                 else:
                     found = False
                     for ele in tag_links:
                         if element[0] == ele[0]:
                             ele[3] = element[3]
                             found = True
                     if not found:
                         tag_links.append(element)
                     memcache.replace('search_' + tag[0], tag_links)
     return found_tags
Example #2
0
File: lostab.py Project: lostab/log
	def post(self):
		key = self.request.get('key')
		if users.is_current_user_admin() and key:
			try:
				post = Post.all().order("-__key__").filter('__key__ =', db.Key(key)).get()
				if post:
					title = self.request.get('title').strip()
					content = self.request.get('content').strip()
					if content:
						post.title = title
						post.content = content
						post.put()
						mempost = memcache.get("post-" + key)
						if mempost is not None:
							memcache.replace("post-" + key, post)
						memposts = memcache.get("posts")
						if memposts is not None:
							for item in memposts:
								if item.key().__str__() == key:
									item.title = title
									item.content = content
							memcache.replace("posts", memposts)
					self.redirect('/post/' + key)
				else:
					self.redirect('/')
			except:
				self.redirect('/')
		else:
			self.redirect('/')
Example #3
0
		def get(self):
			user=users.get_current_user()
			smile=models.Smiley.get_by_id(int(self.request.get("id"))) 
			if smile:
				eVote=0
				if user:
					existVote=memcache.get("user"+user.nickname()+"smile"+str(smile.key().id()))
					if existVote is None:
						existVoteQ = models.db.GqlQuery("SELECT * FROM Votes WHERE voter = :1 AND smiley_id = :2 ",user,smile.key().id())		
						existVote=existVoteQ.get()
					if existVote:
						if not memcache.set("user"+user.nickname()+"smile"+str(smile.key().id()),existVote,3600):
							logging.error("Memcache set failed. Checking Vote")
						eVote=existVote.vote
					existFlag=memcache.get("user"+user.nickname()+"smile"+str(smile.key().id())+"flag")
					if existFlag is None:
						existFlagQ = models.db.GqlQuery("SELECT * FROM Flags WHERE voter = :1 AND smiley_id = :2 ",user,smile.key().id())		
						existFlag=existFlagQ.get()
					if existFlag:
						if not memcache.set("user"+user.nickname()+"smile"+str(smile.key().id())+"flag",existFlag,3600):
							logging.error("Memcache set failed. Adding Flag")				
				exist='neutral'
				eFlag=0
				if existFlag:
					eFlag=existFlag.flag
				if eVote>0:
					exist='up'
				elif eVote<0:
					exist='down'
				self.response.out.write("{'ups':'"+str(smile.ups)+"','downs':'"+str(smile.downs)+"','myVote':'"+str(exist)+"','flag':'"+str(eFlag)+"'}")
Example #4
0
def Popular():
    """
    RETURN POPULAR ARTITSTS FROM MTV WITH PANDORA URL AND ARTIST IMAGERY
    """
    if memcache.get('popular'):
        return memcache.get('popular')
    else:
        url = 'https://www.mtv.com/artists/popular/'
        html = urlfetch.fetch(url, validate_certificate=False, headers={
                              'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.65 Safari/537.31'}).content
        s = BeautifulSoup(html)
        html = []
        artists = s.findAll('div', {'class': 'title multiline'})
        for i in artists:
            artist = i.text
            html.append(i.text)
        for i in html[:30]:
            try:
                explorer = json.loads(getArtist(
                    i))['artistExplorer']['similar']
                for similar in explorer:
                    html.append(similar['@name'])
            except:
                pass
        d = []
        for i in list(set(html)):
            try:
                explorer = json.loads(getArtist(i))['artistExplorer']
                d.append({'artist': i, 'art': explorer[
                         '@artUrl'], 'pandora_url': explorer['@shareUrl']})
            except:
                pass
        memcache.set('popular', {'data': d})
        return {'data': d}
Example #5
0
File: lostab.py Project: lostab/log
	def get(self):
		site = memcache.get("site")
		if site is None:
			site = Site.all().get()
			memcache.add("site", site)
		if not site:
			self.redirect('/config')
		else:
			site.url = site.url.rstrip("/")
			posts = memcache.get("posts")
			if posts is None:
				query = Post.gql('ORDER BY time DESC, __key__ ASC')
				posts = query.fetch(PAGESIZE)
			
			for item in posts:
				item.content = re.sub(ur'<code(?P<index>.*)>(?P<content>[\s\S]*)</code(?P=index)>', lambda m: '<code>' + cgi.escape(m.group('content')) + '</code>', item.content)
				item.time += timedelta(hours=+8)

			template_values = {
				'site': site,
				'posts': posts
			}

			path = os.path.join(os.path.dirname(__file__), 'template/feed.xml')
			self.response.headers['Content-type'] = 'application/xml;charset=UTF-8'
			self.response.out.write(template.render(path, template_values))
Example #6
0
 def member(self):
     if not hasattr(self, "_member"):
         ip = False
         member = False
         if 'X-Real-IP' in self.request.headers:
             ip = self.request.headers['X-Real-IP']
         else:
             ip = self.request.remote_addr
         cookies = Cookies(self, max_age = 86400 * 14, path = '/')
         if 'auth' in cookies:
             auth = cookies['auth']
             member_num = memcache.get(auth)
             if member_num > 0:
                 member = memcache.get('Member_' + str(member_num))
                 if member is None:
                     q = db.GqlQuery("SELECT * FROM Member WHERE num = :1", member_num)
                     if q.count() == 1:
                         member = q[0]
                         memcache.set('Member_' + str(member_num), member, 86400 * 14)
                 if member:
                     member.ip = ip
             else:
                 q = db.GqlQuery("SELECT * FROM Member WHERE auth = :1", auth)
                 if (q.count() == 1):
                     member_num = q[0].num
                     member = q[0]
                     memcache.set(auth, member_num, 86400 * 14)
                     memcache.set('Member_' + str(member_num), member, 86400 * 14)
                     member.ip = ip
         self._member = member
     return self._member
Example #7
0
    def test_delete_rules(self):
        rules = [
            ('topic_1', 'name_1', True),
            ('topic_1', 'name_2', True),
            ('topic_2', 'name_1', False),
        ]
        user_acl = AclRules.insert_or_update(area='test', user='******', rules=rules)

        # Fetch the record again, and compare.
        user_acl = AclRules.get_by_area_and_user('test', 'test')
        self.assertEqual(user_acl.rules, rules)

        key_name = AclRules.get_key_name('test', 'test')
        acl = Acl('test', 'test')

        cached = memcache.get(key_name, namespace=AclRules.__name__)
        self.assertEqual(key_name in _rules_map, True)
        self.assertEqual(cached, _rules_map[key_name])

        user_acl.delete()
        user_acl2 = AclRules.get_by_area_and_user('test', 'test')

        cached = memcache.get(key_name, namespace=AclRules.__name__)
        self.assertEqual(user_acl2, None)
        self.assertEqual(key_name not in _rules_map, True)
        self.assertEqual(cached, None)
    def _test_with_lock(self, lock_timeout, update_fsa_fxn=None, api_json=None):
        expected_json = 'fake service account json'
        if not api_json:
            api_json = expected_json
        caller_uid = self._random_subject_id()
        real_user_id = self._random_subject_id()

        ftvm = FenceTokenVendingMachine(self._mock_fence_api(api_json), self._mock_sam_api(real_user_id, "*****@*****.**"),
                                        self._mock_oauth_adapter("fake_token"), provider_name)

        TokenStore.save(real_user_id, "fake_refresh_token", datetime.datetime.now(), "*****@*****.**", provider_name)

        fsa_key = ftvm._fence_service_account_key(real_user_id)
        stored_fsa = FenceServiceAccount(key_json="expired json",
                                         expires_at=datetime.datetime.now() - datetime.timedelta(days=5),
                                         update_lock_timeout=lock_timeout,
                                         key=fsa_key)
        stored_fsa.put()

        self.assertIsNone(memcache.get(namespace=provider_name, key=caller_uid))

        if update_fsa_fxn:
            threading.Thread(target=update_fsa_fxn, args=(fsa_key, expected_json)).start()

        service_account_json = ftvm.get_service_account_key_json(
            UserInfo(caller_uid, "*****@*****.**", "fake_token_too", 10))

        self.assertEqual(expected_json, service_account_json)
        self.assertIsNotNone(memcache.get(namespace=provider_name, key=caller_uid))
        stored_fsa = fsa_key.get()
        self.assertIsNotNone(stored_fsa)
        self.assertIsNone(stored_fsa.update_lock_timeout)
        self.assertEqual(expected_json, stored_fsa.key_json)
        self.assertGreater(stored_fsa.expires_at, datetime.datetime.now())
    def test_no_service_account(self):
        expected_json = 'fake service account json'
        caller_uid = self._random_subject_id()
        real_user_id = self._random_subject_id()

        ftvm = FenceTokenVendingMachine(self._mock_fence_api(expected_json),
                                        self._mock_sam_api(real_user_id, "*****@*****.**"),
                                        self._mock_oauth_adapter("fake_token"), provider_name)

        TokenStore.save(real_user_id, "fake_refresh_token", datetime.datetime.now(), "*****@*****.**", provider_name)

        self.assertIsNone(memcache.get(namespace=provider_name, key=caller_uid))
        fsa_key = ftvm._fence_service_account_key(real_user_id)
        self.assertIsNone(fsa_key.get())

        service_account_json = ftvm.get_service_account_key_json(
            UserInfo(caller_uid, "*****@*****.**", "fake_token_too", 10))

        self.assertEqual(expected_json, service_account_json)
        self.assertIsNotNone(memcache.get(namespace=provider_name, key=caller_uid))
        stored_fsa = fsa_key.get()
        self.assertIsNotNone(stored_fsa)
        self.assertIsNone(stored_fsa.update_lock_timeout)
        self.assertEqual(expected_json, stored_fsa.key_json)
        self.assertGreater(stored_fsa.expires_at, datetime.datetime.now())
    def test_updates_last_updated_when_popped(self):
        old_cache_val = memcache.get(self.datastore_dict.cache_key)

        self.datastore_dict._pop('key')

        new_cache_val = memcache.get(self.datastore_dict.cache_key)
        self.assertEqual(old_cache_val + 1, new_cache_val)
def test_memcache():
    assert memcache.get(key1) is None
    memcache.set(key1, data1)
    assert memcache.get(key1) == data1
    assert memcache.set(key1, data1, timeout)
    time.sleep(timeout + timeout_tolerance)
    assert memcache.get(key1) is None
Example #12
0
	def post(self):
		# Gets the parameters.
		user_id = self.request.get('user_id')
		timestamp = self.request.get('timestamp')
		index = int(self.request.get('resource_index'))

		memcache.set(get_key(self, 'counter'), index)

		# Moves to the next phase.
		if get_phase(self) == 3:
			memcache.set(get_key(self, 'counter'), 0)
			memcache.incr(get_key(self, 'phase'))

		# Terminates the process if canceled.
		if get_phase(self) != 4:
			logging.info('This request was canceled.')
			return

		if index < int(memcache.get(get_key(self, 'resource_num')) or 0):
			# Fetches a resource file.
			resource = fetch_content(memcache.get(get_resources_key(self, index)))

			# Updates download size.
			bytes = int(memcache.get(get_key(self, 'resources_bytes')) or 0)
			bytes += len(resource)
			memcache.set(get_key(self, 'resources_bytes'), bytes)

			taskqueue.add(url='/smartfm/memento/tasks/fetch', params={'user_id': user_id, 'timestamp': timestamp, 'resource_index': index + 1})

		else:
			# Executes the next task to move to the next phase.
			taskqueue.add(url='/smartfm/memento/tasks/divide', params={'user_id': user_id, 'timestamp': timestamp})
Example #13
0
File: main.py Project: zuoang/g2fts
    def get(self):
        try:
            mycfg=memcache.get("keyword")
            if not mycfg:
                return
            lastpost=""
            try:
                result = urlfetch.fetch(url="https://plus.google.com/"+self.request.get("id")+"/posts") 
            except Exception,e:
                logging.error("Error fetching last post of %s,Exception:%s" % (self.request.get("id"),str(e)))
                return
            cnt= result.content.split("%s\">" % (mycfg.encode("utf-8")))[:2]
            if len(cnt)>=2:
                cnt= cnt[1].split("</div>")[:1]
                if len(cnt)>=1:
                    lastpost=cnt[0].decode('utf-8')[:200] #只取前100个字符

            conf=self.request.get("id")
            #logging.info("lastpost of %s (%s) memcache:(%s)" % (conf,lastpost,memcache.get(conf)))
            
            cnt=lastpost
            oldcnt=memcache.get(conf)
            if (oldcnt==None ) or (cnt != memcache.get(conf)):
                logging.info("need sync %s (%s) and (%s) not match " % (conf,cnt,memcache.get(conf)))
                res=urlfetch.fetch(url="http://gplus2ft.appspot.com/sync?id="+conf)
                if int(res.status_code)==200:
                    memcache.set(conf,cnt)
                else:
                    logging.error("Server error of %s:%d,will check next time!" % (conf,int(res.status_code)))
Example #14
0
 def get_content(self):
   """Get the HTML output for the main page."""
   result = memcache.get(Key.WaterLevelStatus)
   if result is None:
     DataHandler().fetch_and_cache()
     result = memcache.get(Key.WaterLevelStatus)
   if (result['time'] - datetime.now(Eastern)) > timedelta(hours=3):
     send_admin_message("Stale Data", "Data is more than three hours old")
   status = result['status']
   if status == Status.Permit:
     logging.info("Status is Permit")
     text = "not without a permit"
   elif status == Status.Vest:
     logging.info("Status is Vest")
     text = "yes, but bring a vest"
   else:
     logging.info("Status is OK")
     text = "sure, go for it"
   v = {
     'text': text,
     'class': 'status'+str(status),
     'time': result['time'].strftime('%B %d at %I:%M %p %Z')
   }
   t = j.get_template('main.html')
   return t.render(v)
Example #15
0
    def get(self, login_user=None, template_values={}):

        # is import or update going on?
        template_values['update_status'] = memcache.get('update_status')
        template_values['import_status'] = memcache.get('import_status')

        # Check for existing data.
        try:
            counter = StopMeta.get(Key.from_path('StopMeta', 1))
            template_values['production_num_stops'] = counter.counter_stop_no_confirm
            template_values['production_num_stations'] = counter.counter_station_no_confirm
            template_values['production_num_places'] = counter.counter_place_no_confirm
            # confirmation outstanding
            template_values['update_num_stops'] = counter.counter_stop_update_confirm
            template_values['update_num_stations'] = counter.counter_station_update_confirm
            template_values['update_num_places'] = counter.counter_place_update_confirm
            template_values['new_num_stops'] = counter.counter_stop_new_confirm
            template_values['new_num_stations'] = counter.counter_station_new_confirm
            template_values['new_num_places'] = counter.counter_place_new_confirm
            # Administrative hierarchy
            template_values['gov_num'] = Comuna.all().count()+Region.all().count()+Country.all().count()
        except AttributeError:
            # no data in database. Redirect to import page
            self.redirect('/import')

        template_values['upload_url'] = blobstore.create_upload_url('/update/upload')
        path = os.path.join(os.path.dirname(__file__), "pages/update.html")
        self.response.out.write(template.render(path, template_values))
        return
Example #16
0
  def getAnnouncement(self, request):
      """Return Announcement from memcache."""
 
      announcement = ""
      if memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY):
          announcement = memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY)
      return StringMessage(data=announcement)
Example #17
0
def CheckAuth(handler):
    ip = GetIP(handler)
    cookies = handler.request.cookies
    if 'auth' in cookies:
        auth = cookies['auth']
        member_num = memcache.get(auth)
        if (member_num > 0):
            member = memcache.get('Member_' + str(member_num))
            if member is None:
                q = db.GqlQuery("SELECT * FROM Member WHERE num = :1", member_num)
                if q.count() == 1:
                    member = q[0]
                    memcache.set(auth, member.num)
                    memcache.set('Member_' + str(member_num), member)
                else:
                    member = False
            if member:
                member.ip = ip
            return member
        else:
            q = db.GqlQuery("SELECT * FROM Member WHERE auth = :1", auth)
            if (q.count() == 1):
                member_num = q[0].num
                member = q[0]
                memcache.set(auth, member_num)
                memcache.set('Member_' + str(member_num), member)
                member.ip = ip
                return member
            else:
                return False
    else:
        return False
    def get(self):
        self._require_admin()

        self.template_values['memcache_stats'] = memcache.get_stats()
        self.template_values['databasequery_stats'] = {
            'hits': sum(filter(None, [memcache.get(key) for key in DatabaseQuery.DATABASE_HITS_MEMCACHE_KEYS])),
            'misses': sum(filter(None, [memcache.get(key) for key in DatabaseQuery.DATABASE_MISSES_MEMCACHE_KEYS]))
        }

        # Gets the 5 recently created users
        users = Account.query().order(-Account.created).fetch(5)
        self.template_values['users'] = users

        self.template_values['suggestions_count'] = Suggestion.query().filter(
            Suggestion.review_state == Suggestion.REVIEW_PENDING).count()

        # version info
        try:
            fname = os.path.join(os.path.dirname(__file__), '../../version_info.json')

            with open(fname, 'r') as f:
                data = json.loads(f.read().replace('\r\n', '\n'))

            self.template_values['git_branch_name'] = data['git_branch_name']
            self.template_values['build_time'] = data['build_time']

            commit_parts = re.split("[\n]+", data['git_last_commit'])
            self.template_values['commit_hash'] = commit_parts[0].split(" ")
            self.template_values['commit_author'] = commit_parts[1]
            self.template_values['commit_date'] = commit_parts[2]
            self.template_values['commit_msg'] = commit_parts[3]

        except Exception, e:
            logging.warning("version_info.json parsing failed: %s" % e)
            pass
Example #19
0
 def get(self):
     if (self.request.get('msg') and self.request.get('nickname')):
         chats = []
         last_key = 0
         if memcache.get('lastid'):
             last_key = memcache.get('lastid')
         else:
             result = Chats.gql('ORDER BY cid').fetch(1000)
             chats.extend(result)
             if len(result) > 0:
                 last_key = result[-1].cid
             else:
                 last_key = 1
         while True:
             result = Chats.gql('WHERE cid > :1 ORDER BY cid', last_key).fetch(1000)
             chats.extend(result)
             if len(result) > 0:
                 last_key = result[-1].cid
             if len(result) < 1000:
                 break
         
         memcache.set('lastid', last_key, time=60000)
         chat = Chats(message = self.request.get('msg'), nickname = self.request.get('nickname'))
         chat.cid = last_key + 1
         chat.put()
Example #20
0
def setActiveServerInZoneGroup(zonegroup, instancesInZoneGroup):
	active = memcache.get('active-server-' + zonegroup)
	for instance in instancesInZoneGroup:
		ok = True
		instanceLoad = memcache.get('load-' + instance['name']) # Get the load for the server
		instanceStatus = memcache.get("status-" + instance['name'])
		if instanceLoad != None: # If we have no load info on this instance, it probably just started. `ok` stays True...
			if instanceStatus == 'sloping':
				for key, threshold in thresholds(PROJECT_ID).iteritems(): # Loop through all threshold checks:
					if re.match('^[0-9]+$', instanceLoad[key]) and int(instanceLoad[key]) > int(float(threshold['max']) / 100.0 * float(threshold['slope'])): 
						ok = False
			else:
				for key, threshold in thresholds(PROJECT_ID).iteritems(): # Loop through all threshold checks:
					if re.match('^[0-9]+$', instanceLoad[key]) and int(instanceLoad[key]) > int(float(threshold['max'])): 
						ok = False
						memcache.set("status-" + instance['name'], 'sloping')
		if ok:
			if active == None or not 'name' in active or instanceStatus != 'active' or active['name'] != instance['name']:
				logging.debug('Active server changed (or was not set).')
				memcache.set("status-" + instance['name'], 'active')
				memcache.set('active-server-' + zonegroup, instance)
				return True # True = we need to announce that something changed
			else:
				logging.debug('No change in active server.')
			return False # False = we do not need to announce anything
	logging.debug('There is no good candidate for being the active server.')
	return False # False = we do not need to announce anything
Example #21
0
def announceActiveServers():

	zoning = zoningConfig(PROJECT_ID)

	logging.debug('Announcing our instances');

	# Make HTTP POST request to the announce urls with active ip:s for each zonegroup.
	payload = {}
	for key, zonegroup in zoning.iteritems():
		instance = memcache.get("active-server-" + key)
		if instance is not None and 'ip' in instance:
			payload[key] = instance['ip']
			payload[key + "_data"] = ""
			logging.debug('Active server for %s is %s.' % (zonegroup, instance['ip']))
			instanceLoad = memcache.get('load-' + instance['name'])
			if instanceLoad is not None and 'data' in instanceLoad:
				payload[key + "_data"] = instanceLoad['data']

	for url in config(PROJECT_ID).announceUrls:
		logging.debug(' - Announcing to: %s' % url);
		urlfetch.fetch(
			url = url,
			method = urlfetch.POST,
			headers = { 'Content-Type': 'application/x-www-form-urlencoded' },
			payload = urllib.urlencode(payload)
		)
Example #22
0
    def get(self):
        tag_id = self.request.get('tag')
        tag = None
        if tag_id:
            tag = Tag.get_by_id(int(tag_id))

        if tag:
            mem_string = 'picture_tag_'+str(tag_id)
            data = memcache.get(mem_string)
            if data is None:

                data = Picture.all().filter('tags =',tag.title)
                memcache.set(mem_string, data)
        else:
            data = memcache.get('pictures_all')
            if data is None:
                self.pictures_update()
                data = memcache.get('pictures_all')
        
        self.template_values['pictures'] = data

        tags_data = memcache.get('tags_all')
        if tags_data is None:
            tags_data = Tag.all().order('-count').fetch(30)
            memcache.set('tags_all', tags_data)

        self.template_values['tags'] = tags_data
        self.template_values['tags_list'] = [x.title for x in tags_data]
        template = jinja_environment.get_template('index.html')
        self.response.out.write(template.render(self.template_values))
Example #23
0
    def test_update_or_insert(self):
        some_branch = Branch.create_if_possible('some-branch', 'Some Branch')
        some_platform = Platform.create_if_possible('some-platform', 'Some Platform')
        some_builder = Builder.get(Builder.create('some-builder', 'Some Builder'))
        some_test = Test.update_or_insert('some-test', some_branch, some_platform)
        self.assertThereIsNoInstanceOf(Runs)

        runs = Runs.update_or_insert(some_branch, some_platform, some_test)
        self.assertOnlyInstance(runs)
        self.assertEqual(runs.json_runs, '')
        self.assertEqual(runs.json_averages, '')
        self.assertEqual(runs.json_min, None)
        self.assertEqual(runs.json_max, None)
        old_memcache_value = memcache.get(Runs._key_name(some_branch.id, some_platform.id, some_test.id))
        self.assertTrue(old_memcache_value)

        runs.delete()
        self.assertThereIsNoInstanceOf(Runs)

        builds, results = self._create_results(some_branch, some_platform, some_builder, 'some-test', [50.0])
        runs = Runs.update_or_insert(some_branch, some_platform, some_test)
        self.assertOnlyInstance(runs)
        self.assertTrue(runs.json_runs.startswith('[5, [4, 0, 100, null],'))
        self.assertEqual(json.loads('{' + runs.json_averages + '}'), {"100": 50.0})
        self.assertEqual(runs.json_min, 50.0)
        self.assertEqual(runs.json_max, 50.0)
        self.assertNotEqual(memcache.get(Runs._key_name(some_branch.id, some_platform.id, some_test.id)), old_memcache_value)
Example #24
0
def CheckAuth(handler):
    ip = GetIP(handler)
    cookies = Cookies(handler, max_age=86400 * 14, path="/")
    if "auth" in cookies:
        auth = cookies["auth"]
        member_num = memcache.get(auth)
        if member_num > 0:
            member = memcache.get("Member_" + str(member_num))
            if member is None:
                q = db.GqlQuery("SELECT * FROM Member WHERE num = :1", member_num)
                if q.count() == 1:
                    member = q[0]
                    memcache.set("Member_" + str(member_num), member, 86400 * 14)
                else:
                    member = False
            if member:
                member.ip = ip
            return member
        else:
            q = db.GqlQuery("SELECT * FROM Member WHERE auth = :1", auth)
            if q.count() == 1:
                member_num = q[0].num
                member = q[0]
                memcache.set(auth, member_num, 86400 * 14)
                memcache.set("Member_" + str(member_num), member, 86400 * 14)
                member.ip = ip
                return member
            else:
                return False
    else:
        return False
Example #25
0
  def testMemcacheLocking(self):
    # See issue 66.  http://goo.gl/ANBns
    self.ctx.set_cache_policy(False)

    # Prepare: write some entity using Context.put().
    class EmptyModel(model.Model):
      pass
    key = model.Key(EmptyModel, 1)
    mkey = self.ctx._memcache_prefix + key.urlsafe()
    ent = EmptyModel(key=key)
    put_fut = self.ctx.put(ent)

    eventloop.run0()
    self.assertTrue(self.ctx._memcache_set_batcher._queues)
    eventloop.run0()
    self.assertTrue(self.ctx._memcache_set_batcher._running)
    while self.ctx._memcache_set_batcher._running:
      eventloop.run0()

    # Verify that memcache now contains the special _LOCKED value.
    val = memcache.get(mkey)
    self.assertEqual(val, context._LOCKED)

    put_fut.check_success()
    # Verify that memcache _LOCKED value has been removed..
    val = memcache.get(mkey)
    self.assertEqual(val, None)
Example #26
0
def _get_default_entity(entity, start_result, process_result, set_namespace, addr):
    result = start_result
    namespace = memcache.get(set_namespace + addr)

    if namespace is not None:
        for network_key in namespace.split(" / "):
            network_result = memcache.get("N" + set_namespace + network_key)
            if network_result is None:
                network_result = ""
                key = ndb.Key(urlsafe=network_key)
                for x in entity.query(entity.network == key):
                    network_result += process_result(x) + '\n'
                memcache.add("N" + set_namespace + network_key, network_result)
            result += network_result
        return result

    namespace = ""
    addr_ = netaddr.IPAddress(addr)

    for network in Network.query().order(Network.addr):
        if addr_ in network.netaddr:
            namespace += network.key.urlsafe() + " / "
            network_result = ""
            for x in entity.query(entity.network == network.key):
                network_result += process_result(x) + '\n'
            result += network_result
            memcache.add("N" + set_namespace + network.key.urlsafe(), network_result)
    if namespace != "":
        memcache.set(set_namespace + addr, namespace[:-3])  # remove final ' / '
    return result
Example #27
0
 def test_user_by_id_memcache(self):
     '''Get user by id should memcache the answer'''
     user_id = "memcache_ubi_user_id"
     user = SFUser.create(user_id, "fb_token", "secret")
     self.assertNone(memcache.get(USER_KEY % user_id))
     self.assertEqual(user.key(), SFUser.user_by_id(user_id).key())
     self.assertEqual(user.key(), memcache.get(USER_KEY % user_id).key())
Example #28
0
 def canvas(self):
     if not self.facebook.check_session(self.request):
         self.redirect(self.facebook.get_login_url(next=[self.request.path,""][self.request.path == '/']))
         return
     else:
         stats = memcache.get("%s_stats"%self.facebook.uid)
         links = memcache.get("%s_links"%self.facebook.uid)
         if stats and links:
             self.redirect("http://apps.facebook.com/link_stats/")
             return
         
         self.response.out.write("NOT CACHED!")
         try:
             #raise Exception
             t1 = time.time()
             stats = simplejson.loads(self.request.get("fb_sig_link_stats"))
             links = simplejson.loads(self.request.get("fb_sig_user_stats"))
             logging.info("Simplejson taken %s"%str(time.time()-t1))
         except Exception,e:
             self.response.out.write("Error, could not get your link stats! Please refresh")
             return
         memcache.set("%s_stats"%self.facebook.uid,stats,time=240)
         memcache.set("%s_links"%self.facebook.uid,links,time=240)
         self.response.out.write("Done!")
         self.render_stats(stats,links)
Example #29
0
 def get(self):
     c = 0
     c = memcache.get('planes_c')
     s = ''
     s = memcache.get('planes')
     if (s == None):
         c = 0
         s = ''
         q = db.GqlQuery("SELECT * FROM Section ORDER BY nodes DESC")
         if (q.count() > 0):
             for section in q:
                 q2 = db.GqlQuery("SELECT * FROM Node WHERE section_num = :1 ORDER BY topics DESC", section.num)
                 n = ''
                 if (q2.count() > 0):
                     nodes = []
                     i = 0
                     for node in q2:
                         nodes.append(node)
                         i = i + 1
                     random.shuffle(nodes)
                     for node in nodes:
                         fs = random.randrange(12, 16)
                         n = n + '<a href="/go/' + node.name + '" class="item_node">' + node.title + '</a>'
                         c = c + 1
                 s = s + '<div class="sep20"></div><div class="box"><div class="cell"><div class="fr"><strong class="snow">' + section.title_alternative + u'</strong><small class="snow"> • ' + str(section.nodes) + ' nodes</small></div>' + section.title + '</div><div class="inner">' + n + '</div></div>'
         memcache.set('planes', s, 3600)
         memcache.set('planes_c', c, 3600)
     self.values['c'] = c
     self.values['s'] = s
     self.values['page_title'] = self.site.title.decode('utf-8') + u' › ' + self.l10n.planes.decode('utf-8')
     self.finalize(template_name='planes')
Example #30
0
    def test_get_and_put_cached(self):
        storage = appengine.StorageByKeyName(
            appengine.CredentialsModel, 'foo', 'credentials', cache=memcache)

        self.assertEqual(None, storage.get())
        self.credentials.set_store(storage)

        http = http_mock.HttpMock(data=BASIC_RESP)
        self.credentials._refresh(http)
        credmodel = appengine.CredentialsModel.get_by_key_name('foo')
        self.assertEqual(BASIC_TOKEN, credmodel.credentials.access_token)

        # Now remove the item from the cache.
        memcache.delete('foo')

        # Check that getting refreshes the cache.
        credentials = storage.get()
        self.assertEqual(BASIC_TOKEN, credentials.access_token)
        self.assertNotEqual(None, memcache.get('foo'))

        # Deleting should clear the cache.
        storage.delete()
        credentials = storage.get()
        self.assertEqual(None, credentials)
        self.assertEqual(None, memcache.get('foo'))

        # Verify mock.
        self._verify_basic_refresh(http)
Example #31
0
def get_count(key):
  """Returns the counter"""
  count = memcache.get('simple24_%s_total' % key)
  if count is None:
    return full_count(key)
  return count
Example #32
0
 def get(self, path):
     if path.find('/') != -1:
         # Page
         parts = path.split('/')
         if len(parts) == 2:
             minisite_name = parts[0]
             page_name = parts[1]
             minisite = GetKindByName('Minisite', minisite_name)
             if minisite is not False:
                 page = memcache.get(path)
                 if page is None:
                     q = db.GqlQuery("SELECT * FROM Page WHERE name = :1 AND minisite = :2", page_name, minisite)
                     if q.count() == 1:
                         page = q[0]
                         memcache.set(path, page, 864000)
                 if page.mode == 1:
                     # Dynamic embedded page
                     template_values = {}
                     site = GetSite()
                     template_values['site'] = site
                     member = CheckAuth(self)
                     if member:
                         template_values['member'] = member
                     l10n = GetMessages(self, member, site)
                     template_values['l10n'] = l10n
                     template_values['rnd'] = random.randrange(1, 100)
                     template_values['page'] = page
                     template_values['minisite'] = page.minisite
                     template_values['page_title'] = site.title + u' › ' + page.minisite.title.decode('utf-8') + u' › ' + page.title.decode('utf-8')
                     taskqueue.add(url='/hit/page/' + str(page.key()))
                     path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'page.html')
                     output = template.render(path, template_values)
                     self.response.out.write(output)
                 else:
                     # Static standalone page
                     taskqueue.add(url='/hit/page/' + str(page.key()))
                     expires_date = datetime.datetime.utcnow() + datetime.timedelta(days=10)
                     expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT")
                     self.response.headers.add_header("Expires", expires_str)
                     self.response.headers['Cache-Control'] = 'max-age=864000, must-revalidate'
                     self.response.headers['Content-Type'] = page.content_type
                     self.response.out.write(page.content)
     else:
         # Site
         page = memcache.get(path + '/index.html')
         if page:
             taskqueue.add(url='/hit/page/' + str(page.key()))
             if page.mode == 1:
                 # Dynamic embedded page
                 template_values = {}
                 site = GetSite()
                 template_values['site'] = site
                 member = CheckAuth(self)
                 if member:
                     template_values['member'] = member
                 l10n = GetMessages(self, member, site)
                 template_values['l10n'] = l10n
                 template_values['rnd'] = random.randrange(1, 100)
                 template_values['page'] = page
                 template_values['minisite'] = page.minisite
                 template_values['page_title'] = site.title + u' › ' + page.minisite.title.decode('utf-8') + u' › ' + page.title.decode('utf-8')
                 taskqueue.add(url='/hit/page/' + str(page.key()))
                 path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'page.html')
                 output = template.render(path, template_values)
                 self.response.out.write(output)
             else:
                 expires_date = datetime.datetime.utcnow() + datetime.timedelta(days=10)
                 expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT")
                 self.response.headers.add_header("Expires", expires_str)
                 self.response.headers['Cache-Control'] = 'max-age=864000, must-revalidate'
                 self.response.headers['Content-Type'] = page.content_type
                 self.response.out.write(page.content)
Example #33
0
    def get(self, node_name):
        site = GetSite()
        browser = detect(self.request)
        self.session = Session()
        template_values = {}
        template_values['site'] = site
        template_values['rnd'] = random.randrange(1, 100)
        template_values['system_version'] = SYSTEM_VERSION
        member = CheckAuth(self)
        if member:
            template_values['member'] = member
        l10n = GetMessages(self, member, site)
        template_values['l10n'] = l10n
        node = GetKindByName('Node', node_name)
        template_values['node'] = node
        pagination = False
        pages = 1
        page = 1
        page_size = 15
        start = 0
        has_more = False
        more = 1
        has_previous = False
        previous = 1
        if node:
            template_values['canonical'] = 'http://' + site.domain + '/go/' + node.name
            if member:
                favorited = member.hasFavorited(node)
                template_values['favorited'] = favorited
                recent_nodes = memcache.get('member::' + str(member.num) + '::recent_nodes')
                recent_nodes_ids = memcache.get('member::' + str(member.num) + '::recent_nodes_ids')
                if recent_nodes and recent_nodes_ids:
                    if (node.num in recent_nodes_ids) is not True:
                        recent_nodes.insert(0, node)
                        recent_nodes_ids.insert(0, node.num)
                        memcache.set('member::' + str(member.num) + '::recent_nodes', recent_nodes, 7200)
                        memcache.set('member::' + str(member.num) + '::recent_nodes_ids', recent_nodes_ids, 7200)
                else:
                    recent_nodes = []
                    recent_nodes.append(node)
                    recent_nodes_ids = []
                    recent_nodes_ids.append(node.num)
                    memcache.set('member::' + str(member.num) + '::recent_nodes', recent_nodes, 7200)
                    memcache.set('member::' + str(member.num) + '::recent_nodes_ids', recent_nodes_ids, 7200)
                template_values['recent_nodes'] = recent_nodes
            template_values['page_title'] = site.title + u' › ' + node.title
            # Pagination
            if node.topics > page_size:
                pagination = True
            else:
                pagination = False
            if pagination:
                if node.topics % page_size == 0:
                    pages = int(node.topics / page_size)
                else:
                    pages = int(node.topics / page_size) + 1
                page = self.request.get('p')
                if (page == '') or (page is None):
                    page = 1
                else:
                    page = int(page)
                    if page > pages:
                        page = pages
                    else:
                        if page < 1:
                            page = 1
                if page < pages:
                    has_more = True
                    more = page + 1
                if page > 1:
                    has_previous = True
                    previous = page - 1
                start = (page - 1) * page_size
                template_values['canonical'] = 'http://' + site.domain + '/go/' + node.name + '?p=' + str(page)
        else:
            template_values['page_title'] = site.title + u' › 节点未找到'
        template_values['pagination'] = pagination
        template_values['pages'] = pages
        template_values['page'] = page
        template_values['page_size'] = page_size
        template_values['has_more'] = has_more
        template_values['more'] = more
        template_values['has_previous'] = has_previous
        template_values['previous'] = previous
        section = False
        if node:
#            section = GetKindByNum('Section', node.section_num)
            section = Node.all().filter('name =',node.category)
        template_values['section'] = section
        topics = False
        if node:
            q3 = db.GqlQuery("SELECT * FROM Topic WHERE node_num = :1 ORDER BY last_touched DESC LIMIT " + str(start) + ", " + str(page_size), node.num)
            topics = q3
        template_values['topics'] = topics
        if browser['ios']:
            if (node):
                path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'node.html')
            else:
                path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'node_not_found.html')
        else:
            if (node):
                path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'node.html')
            else:
                path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'node_not_found.html')
        output = template.render(path, template_values)
        self.response.out.write(output)
Example #34
0
    def get(self):
        host = self.request.headers['Host']
        site = GetSite()
        browser = detect(self.request)
        template_values = {}
        template_values['site'] = GetSite()
        template_values['canonical'] = 'http://' + site.domain + '/'
        template_values['rnd'] = random.randrange(1, 100)
        template_values['page_title'] = site.title
        template_values['system_version'] = SYSTEM_VERSION
        member = CheckAuth(self)
        l10n = GetMessages(self, member, site)
        template_values['l10n'] = l10n
        if member:
            self.response.headers['Set-Cookie'] = 'auth=' + member.auth + '; expires=' + (datetime.datetime.now() + datetime.timedelta(days=365)).strftime("%a, %d-%b-%Y %H:%M:%S GMT") + '; path=/'
            template_values['member'] = member
            try:
                blocked = pickle.loads(member.blocked.encode('utf-8'))
            except:
                blocked = []
            if (len(blocked) > 0):
                template_values['blocked'] = ','.join(map(str, blocked))
        if member:
            recent_nodes = memcache.get('member::' + str(member.num) + '::recent_nodes')
            if recent_nodes:
                template_values['recent_nodes'] = recent_nodes
        nodes_new = []
        nodes_new = memcache.get('home_nodes_new')
        if nodes_new is None:
            nodes_new = []
            qnew = db.GqlQuery("SELECT * FROM Node ORDER BY created DESC LIMIT 10")
            if (qnew.count() > 0):
                i = 0
                for node in qnew:
                    nodes_new.append(node)
                    i = i + 1
            memcache.set('home_nodes_new', nodes_new, 3600)
        template_values['nodes_new'] = nodes_new
        if browser['ios']:
            s = ''
            s = memcache.get('home_sections_neue')
            if (s == None):
                s = ''
#                q = db.GqlQuery("SELECT * FROM Section ORDER BY created ASC")
                q = Node.all().filter('caterogy =','')
                if (q.count() > 0):
                    for section in q:
                        q2 = db.GqlQuery("SELECT * FROM Node WHERE section_num = :1 ORDER BY created ASC", section.num)
                        n = ''
                        if (q2.count() > 0):
                            nodes = []
                            i = 0
                            for node in q2:
                                nodes.append(node)
                                i = i + 1
                            random.shuffle(nodes)
                            for node in nodes:
                                fs = random.randrange(12, 16)
                                n = n + '<a href="/go/' + node.name + '" style="font-size: ' + str(fs) + 'px;">' + node.title + '</a>&nbsp; '
                        s = s + '<div class="section">' + section.title + '</div><div class="cell">' + n + '</div>'
                memcache.set('home_sections_neue', s, 600)
            template_values['s'] = s
        if browser['ios']:
            home_rendered = memcache.get('home_rendered_mobile')
            if home_rendered is None:
                latest = memcache.get('q_latest_16')
                if (latest):
                    template_values['latest'] = latest
                else:
                    q2 = db.GqlQuery("SELECT * FROM Topic ORDER BY last_touched DESC LIMIT 16")
                    memcache.set('q_latest_16', q2, 600)
                    latest = q2
                    template_values['latest'] = latest
                path = os.path.join(os.path.dirname(__file__), 'tpl', 'portion', 'home_mobile.html')
                home_rendered = template.render(path, template_values)
                memcache.set('home_rendered_mobile', home_rendered, 600)
            template_values['home'] = home_rendered
        else:
            home_rendered = memcache.get('home_rendered')
            if home_rendered is None:
                latest = memcache.get('q_latest_16')
                if (latest):
                    template_values['latest'] = latest
                else:
                    q2 = db.GqlQuery("SELECT * FROM Topic ORDER BY last_touched DESC LIMIT 16")
                    memcache.set('q_latest_16', q2, 600)
                    latest = q2
                    template_values['latest'] = latest
                path = os.path.join(os.path.dirname(__file__), 'tpl', 'portion', 'home.html')
                home_rendered = template.render(path, template_values)
                memcache.set('home_rendered', home_rendered, 600)
            template_values['home'] = home_rendered
        member_total = memcache.get('member_total')
        if member_total is None:
            q3 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'member.total'")
            if (q3.count() > 0):
                member_total = q3[0].value
            else:
                member_total = 0
            memcache.set('member_total', member_total, 600)
        template_values['member_total'] = member_total
        topic_total = memcache.get('topic_total')
        if topic_total is None:
            q4 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'topic.total'")
            if (q4.count() > 0):
                topic_total = q4[0].value
            else:
                topic_total = 0
            memcache.set('topic_total', topic_total, 600)
        template_values['topic_total'] = topic_total
        reply_total = memcache.get('reply_total')
        if reply_total is None:
            q5 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'reply.total'")
            if (q5.count() > 0):
                reply_total = q5[0].value
            else:
                reply_total = 0
            memcache.set('reply_total', reply_total, 600)
        template_values['reply_total'] = reply_total
        if (browser['ios']):
            path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'index.html')
        else:
            hottest = memcache.get('index_hottest_sidebar')
            if hottest is None:
                qhot = db.GqlQuery("SELECT * FROM Node ORDER BY topics DESC LIMIT 25")
                hottest = u''
                for node in qhot:
                    hottest = hottest + '<a href="/go/' + node.name + '" class="item_node">' + node.title + '</a>'
                memcache.set('index_hottest_sidebar', hottest, 5000)
            template_values['index_hottest_sidebar'] = hottest
            c = memcache.get('index_categories')
            if c is None:
                c = ''
                i = 0
                categoriesdb = db.GqlQuery("SELECT * FROM Node where category = ''")
                for cate in categoriesdb:
                    category = cate.name.strip()
                    i = i + 1
                    if i == categoriesdb.count():
                        css_class = 'inner'
                    else:
                        css_class = 'cell'
                    c = c + '<div class="' + css_class + '"><table cellpadding="0" cellspacing="0" border="0"><tr><td align="right" width="80"><span class="fade">' + category + '</span></td><td style="line-height: 200%; padding-left: 15px;">'
                    qx = db.GqlQuery("SELECT * FROM Node WHERE category = :1 ORDER BY topics DESC", category)
                    for node in qx:
                        c = c + '<a href="/go/' + node.name + '" style="font-size: 14px;">' + node.title + '</a>&nbsp; &nbsp; '
                    c = c + '</td></tr></table></div>'
                    memcache.set('index_categories', c, 3600)
            template_values['c'] = c
            path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'index.html')
        output = template.render(path, template_values)
        self.response.out.write(output)
Example #35
0
def GetCachedIsInternalUser(username):
    return memcache.get(_IsInternalUserCacheKey(username))
Example #36
0
    def testContext_NamespaceBonanza(self):
        # Test that memcache ops issued for datastore caching use the
        # correct namespace.
        def assertNone(expr):
            self.assertTrue(expr is None, repr(expr))

        def assertNotNone(expr):
            self.assertTrue(expr is not None, repr(expr))

        def assertLocked(expr):
            self.assertTrue(expr is context._LOCKED, repr(expr))

        def assertProtobuf(expr, ent):
            self.assertEqual(
                expr,
                ent._to_pb(set_key=False).SerializePartialToString())

        class Foo(model.Model):
            pass

        k1 = model.Key(Foo, 1, namespace='a')
        k2 = model.Key(Foo, 2, namespace='b')
        mk1 = self.ctx._memcache_prefix + k1.urlsafe()
        mk2 = self.ctx._memcache_prefix + k2.urlsafe()
        e1 = Foo(key=k1)
        e2 = Foo(key=k2)
        self.ctx.set_cache_policy(False)
        self.ctx.set_memcache_policy(True)

        self.ctx.set_datastore_policy(False)  # This will vary in subtests

        # Test put with datastore policy off
        k1 = self.ctx.put(e1).get_result()
        k2 = self.ctx.put(e2).get_result()
        # Nothing should be in the empty namespace
        assertNone(memcache.get(mk1, namespace=''))
        assertNone(memcache.get(mk2, namespace=''))
        # Only k1 is found in namespace 'a'
        assertProtobuf(memcache.get(mk1, namespace='a'), e1)
        assertNone(memcache.get(mk2, namespace='a'))
        # Only k2 is found in namespace 'b'
        assertNone(memcache.get(mk1, namespace='b'))
        assertProtobuf(memcache.get(mk2, namespace='b'), e2)

        memcache.flush_all()
        self.ctx.set_datastore_policy(True)

        # Test put with datastore policy on
        k1_fut = self.ctx.put(e1)
        while not self.ctx._put_batcher._running:
            eventloop.run0()
        # Nothing should be in the empty namespace
        assertNone(memcache.get(mk1, namespace=''))
        assertNone(memcache.get(mk2, namespace=''))
        # Only k1 is found in namespace 'a', as _LOCKED
        assertLocked(memcache.get(mk1, namespace='a'))
        assertNone(memcache.get(mk2, namespace='a'))
        self.assertEqual(k1_fut.get_result(), k1)
        # Have to test one at a time, otherwise _LOCKED value may not be set
        k2_fut = self.ctx.put(e2)
        while not self.ctx._put_batcher._running:
            eventloop.run0()
        # Only k2 is found in namespace 'b', as _LOCKED
        assertNone(memcache.get(mk1, namespace='b'))
        assertLocked(memcache.get(mk2, namespace='b'))
        # Keys should be identical
        self.assertEqual(k2_fut.get_result(), k2)

        memcache.flush_all()

        # Test get with cold cache
        e1 = self.ctx.get(k1).get_result()
        e2 = self.ctx.get(k2).get_result()
        eventloop.run()  # Wait for memcache RPCs to run
        # Neither is found in the empty namespace
        assertNone(memcache.get(mk1, namespace=''))
        assertNone(memcache.get(mk2, namespace=''))
        # Only k1 is found in namespace 'a'
        assertProtobuf(memcache.get(mk1, namespace='a'), e1)
        assertNone(memcache.get(mk2, namespace='a'))
        # Only k2 is found in namespace 'b'
        assertNone(memcache.get(mk1, namespace='b'))
        assertProtobuf(memcache.get(mk2, namespace='b'), e2)

        self.ctx.set_datastore_policy(False)

        # Test get with warm cache
        self.ctx.get(k1).get_result()
        self.ctx.get(k2).get_result()
        eventloop.run()  # Wait for memcache RPCs to run
        # Neither is found in the empty namespace
        assertNone(memcache.get(mk1, namespace=''))
        assertNone(memcache.get(mk2, namespace=''))
        # Only k1 is found in namespace 'a'
        assertNotNone(memcache.get(mk1, namespace='a'))
        assertNone(memcache.get(mk2, namespace='a'))
        # Only k2 is found in namespace 'b'
        assertNone(memcache.get(mk1, namespace='b'))
        assertNotNone(memcache.get(mk2, namespace='b'))

        self.ctx.set_datastore_policy(True)

        # Test delete
        self.ctx.delete(k1).check_success()
        self.ctx.delete(k2).check_success()
        # Nothing should be in the empty namespace
        assertNone(memcache.get(mk1, namespace=''))
        assertNone(memcache.get(mk2, namespace=''))
        # Only k1 is found in namespace 'a', as _LOCKED
        assertLocked(memcache.get(mk1, namespace='a'))
        assertNone(memcache.get(mk2, namespace='a'))
        # Only k2 is found in namespace 'b', as _LOCKED
        assertNone(memcache.get(mk1, namespace='b'))
        assertLocked(memcache.get(mk2, namespace='b'))

        memcache.flush_all()

        # Test _clear_memcache (it doesn't delete, it locks, like put)
        self.ctx._clear_memcache([k1, k2]).check_success()
        # Nothing should be in the empty namespace
        assertNone(memcache.get(mk1, namespace=''))
        assertNone(memcache.get(mk2, namespace=''))
        # Only k1 is found in namespace 'a', as _LOCKED
        assertLocked(memcache.get(mk1, namespace='a'))
        assertNone(memcache.get(mk2, namespace='a'))
        # Only k2 is found in namespace 'b', as _LOCKED
        assertNone(memcache.get(mk1, namespace='b'))
        assertLocked(memcache.get(mk2, namespace='b'))
Example #37
0
def proxy(url):
    '''
    Make a proxy call and write response to current http request.
    
    For example, a remote resource 'http://www.google.com/images/logo.gif' 
    can be proxied as '/util/proxy/http%3A%2F%2Fwww.google.com%2Fimages%2Flogo.gif'.
    
    Raw url can also work such as '/util/proxy/http://www.google.com/images/logo.gif'
    
    Host name can be encoded as 'www_google_com', so this also works: 
    '/util/proxy/http://www_google_com/images/logo.gif'
    
    Returns:
        None, and data was wrote to current http response before returning.
    '''
    if not (url.startswith('http://') or url.startwith('https://')):
        raise HttpNotFoundError()

    root_pos = url.find('/', 8)
    if root_pos == (-1):
        url = url.replace('_', '.')
    else:
        url = url[:root_pos].replace('_', '.') + url[root_pos:]

    cached_data = memcache.get(url)
    if cached_data is not None:
        logging.info('Got cached content for url fetch: %s' % url)
        __write_to_response(context.response, cached_data)
        return

    # get cache time:
    cached_time = 0
    cached_param = context.query.get('__cache__', '')
    if cached_param:
        cached_time = int(cached_param)

    # fetch url:
    fetch_headers = {'User-Agent': USER_AGENT, 'Accept': '*/*', 'Referer': url}
    try:
        result = urlfetch.fetch(url,
                                headers=fetch_headers,
                                follow_redirects=False)
    except (urlfetch.Error, apiproxy_errors.Error):
        raise HttpNotFoundError()

    response = context.response
    response.set_status(result.status_code)

    if (result.status_code != 200):
        return

    # build cache object:
    buffer = []
    for key, value in result.headers.iteritems():
        if key.lower() not in IGNORE_HEADERS:
            response.headers[key] = value
            buffer.append(key + ': ' + value)
    if cached_time > 0:
        response.headers['Cache-Control'] = 'max-age=' + str(cached_time)
        buffer.append('Cache-Control: max-age=' + str(cached_time))
    else:
        response.headers['Cache-Control'] = 'max-age=' + MAX_AGE
    buffer.append('\n')
    buffer.append(result.content)
    cached_data = '\n'.join(buffer)
    if cached_time > 0:
        memcache.set(url, cached_data, cached_time)
        logging.info('Put cached content after url fetch: %s' % url)

    # write to context.response:
    response.out.write(result.content)
Example #38
0
 def get(cls, key):
     return memcache.get(key)
Example #39
0
def GenerateGraphData(SID, startdate, enddate, localization):
    values = {}
    if not localization:
        values[
            'categories1'] = "'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'"
        values[
            'categories2'] = "'Jan 2012','Feb 2012','Mar 2012','Apr 2012','May 2012','Jun 2012','Jul 2012','Aug 2012','Sep 2012','Oct 2012','Nov 2012','Dec 2012'"
        values['LocalSelected1'] = ' selected'
        values['TotalSalesLang'] = 'Total Sales'
        values['ProductSalesLang'] = 'Product Sales'
        values['NewMembershipsLang'] = 'New Memberships'
        values['OnlineBookingsLang'] = 'Online Bookings'
        values['AttendanceLang'] = 'Attendance'
        values['FirstVisitsLang'] = 'First Visits'
        values['SalesTitle'] = 'Sales - Year over Year'
        values['OnlineTitle'] = 'Online Bookings'
        values['ActMemTitle'] = 'Active Members'
        values['MonthLang'] = 'Month'
        values['YearLang'] = 'Year'
    if localization == 'EN':
        values[
            'categories1'] = "'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'"
        values[
            'categories2'] = "'Jan 2012','Feb 2012','Mar 2012','Apr 2012','May 2012','Jun 2012','Jul 2012','Aug 2012','Sep 2012','Oct 2012','Nov 2012','Dec 2012'"
        values['LocalSelected1'] = ' selected'
        values['TotalSalesLang'] = 'Total Sales'
        values['ProductSalesLang'] = 'Product Sales'
        values['NewMembershipsLang'] = 'New Memberships'
        values['OnlineBookingsLang'] = 'Online Bookings'
        values['AttendanceLang'] = 'Attendance'
        values['FirstVisitsLang'] = 'First Visits'
        values['SalesTitle'] = 'Sales - Year over Year'
        values['OnlineTitle'] = 'Online Bookings'
        values['ActMemTitle'] = 'Active Members'
        values['MonthLang'] = 'Month'
        values['YearLang'] = 'Year'
    if localization == 'SP':
        values[
            'categories1'] = "'Enero','Feb','Marzo','Abr','Mayo','Jun','Jul','Agosto','Sept','Oct','Nov','Dic'"
        values[
            'categories2'] = "'Enero 2012','Feb 2012','Marzo 2012','Abr 2012','Mayo 2012','Jun 2012','Jul 2012','Agosto 2012','Sept 2012','Oct 2012','Nov 2012','Dic 2012'"
        values['LocalSelected2'] = ' selected'
        values['TotalSalesLang'] = 'Las Ventas Totales'
        values['ProductSalesLang'] = 'Venta de Productos'
        values['NewMembershipsLang'] = 'Nuevas Usuarios'
        values['OnlineBookingsLang'] = 'Reservas Online'
        values['AttendanceLang'] = 'Publico'
        values['FirstVisitsLang'] = 'Primeras Visitas'
        values['SalesTitle'] = 'Ventas - Ano tras Ano'
        values['OnlineTitle'] = 'Reservas Online'
        values['ActMemTitle'] = 'Miembros Activos'
        values['MonthLang'] = 'Mes'
        values['YearLang'] = 'Ano'
    if localization == 'DE':
        values[
            'categories1'] = "'Jan','Feb','Marz','Apr','Mai','Juni','Juli','Aug','Sept','Okt','Nov','Dez'"
        values[
            'categories2'] = "'Jan 2012','Feb 2012','Marz 2012','Apr 2012','Mai 2012','Juni 2012','Juli 2012','Aug 2012','Sept 2012','Okt 2012','Nov 2012','Dez 2012'"
        values['LocalSelected3'] = ' selected'

    values['SID'] = SID
    values['startdate'] = startdate
    values['enddate'] = enddate

    #Sales
    salessql = "SELECT isnull(SUM(case when Sales.SaleDate BETWEEN '" + startdate + "' AND '" + enddate + "' then tblSDPayments.SDPaymentAmount - tblSDPayments.ItemTax1 - tblSDPayments.ItemTax2 - tblSDPayments.ItemTax3 - tblSDPayments.ItemTax4 - tblSDPayments.ItemTax5 end),0) AS KDPValue, isnull(SUM(case when Sales.SaleDate BETWEEN dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "')) AND '" + startdate + "' then tblSDPayments.SDPaymentAmount - tblSDPayments.ItemTax1 - tblSDPayments.ItemTax2 - tblSDPayments.ItemTax3 - tblSDPayments.ItemTax4 - tblSDPayments.ItemTax5 end),0) AS ChangeValue FROM [Sales Details] INNER JOIN Sales ON [Sales Details].SaleID = Sales.SaleID INNER JOIN tblPayments ON Sales.SaleID = tblPayments.SaleID INNER JOIN tblSDPayments ON [Sales Details].SDID = tblSDPayments.SDID AND tblPayments.PaymentID = tblSDPayments.PaymentID INNER JOIN [Payment Types] ON tblPayments.PaymentMethod = [Payment Types].Item# WHERE (Sales.SaleDate BETWEEN dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "')) AND '" + enddate + "') AND ([Payment Types].CashEQ = 1) AND ([Sales Details].CategoryID != 21)"

    salesapi = ApiCall(SID, salessql)
    x = minidom.parseString(salesapi.read())
    TotalSales = ''
    TotalSalesNum = 0
    SalesChangeNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            TotalSales += format(
                int(
                    math.floor(float(
                        row.childNodes[0].childNodes[0].nodeValue))), ",d")
            TotalSalesNum = float(row.childNodes[0].childNodes[0].nodeValue)
            SalesChangeNum = float(row.childNodes[1].childNodes[0].nodeValue)
        except:
            pass
    if TotalSales == '':
        TotalSales = '0'
    values['TotalSales'] = '$' + TotalSales

    # #Sales Change
    # saleschangesql = "SELECT SUM(tblSDPayments.SDPaymentAmount - tblSDPayments.ItemTax1 - tblSDPayments.ItemTax2 - tblSDPayments.ItemTax3 - tblSDPayments.ItemTax4 - tblSDPayments.ItemTax5) AS KDPValue FROM [Sales Details] INNER JOIN Sales ON [Sales Details].SaleID = Sales.SaleID INNER JOIN tblPayments ON Sales.SaleID = tblPayments.SaleID INNER JOIN tblSDPayments ON [Sales Details].SDID = tblSDPayments.SDID AND tblPayments.PaymentID = tblSDPayments.PaymentID INNER JOIN [Payment Types] ON tblPayments.PaymentMethod = [Payment Types].Item# WHERE (Sales.SaleDate BETWEEN dateadd(day,datediff(day,'"+startdate+"', '"+enddate+"')*-1, dateadd(day,-1,'"+startdate+"')) AND '"+startdate+"') AND ([Payment Types].CashEQ = 1) AND ([Sales Details].CategoryID != 21)"

    # saleschangeapi = ApiCall(SID,saleschangesql)
    # x =minidom.parseString(saleschangeapi.read())
    # SalesChangeNum = 0
    # for row in x.getElementsByTagName("Row"):
    # 	try:
    # 		SalesChangeNum=float(row.childNodes[0].childNodes[0].nodeValue)
    # 	except:
    # 		pass
    if not SalesChangeNum:
        values['SalesChange'] = u"▲100%"
        values['SalesChangeColor'] = '(0, 170, 0)'
    if TotalSalesNum == SalesChangeNum:
        values['SalesChange'] = u"►0%"
        values['SalesChangeColor'] = '(255, 241, 13)'
    elif SalesChangeNum == 0:
        values['SalesChange'] = u"▲100%"
        values['SalesChangeColor'] = '(0, 170, 0)'
    elif TotalSalesNum == 0:
        values['SalesChange'] = u"▼100%"
        values['SalesChangeColor'] = '(255, 0, 0)'
    else:
        SalesCalc = TotalSalesNum - SalesChangeNum
        SalesCalc = SalesCalc / SalesChangeNum
        SalesCalc = SalesCalc * 100

        if SalesCalc < 0:
            values['SalesChange'] = u"▼" + format(
                abs(int(math.floor(SalesCalc))), ",d") + "%"
            values['SalesChangeColor'] = '(255, 0, 0)'
        elif SalesCalc > 0:
            values['SalesChange'] = u"▲" + format(
                abs(int(math.floor(SalesCalc))), ",d") + "%"
            values['SalesChangeColor'] = '(0, 170, 0)'

    #Product Sales
    prodsql = "SELECT SUM(tblSDPayments.SDPaymentAmount - tblSDPayments.ItemTax1 - tblSDPayments.ItemTax2 - tblSDPayments.ItemTax3 - tblSDPayments.ItemTax4 - tblSDPayments.ItemTax5) AS KDPValue FROM [Sales Details] INNER JOIN Sales ON [Sales Details].SaleID = Sales.SaleID INNER JOIN tblPayments ON Sales.SaleID = tblPayments.SaleID INNER JOIN tblSDPayments ON [Sales Details].SDID = tblSDPayments.SDID AND tblPayments.PaymentID = tblSDPayments.PaymentID INNER JOIN [Payment Types] ON tblPayments.PaymentMethod = [Payment Types].Item# WHERE (Sales.SaleDate BETWEEN '" + startdate + "' AND '" + enddate + "') AND ([Payment Types].CashEQ = 1) AND ([Sales Details].CategoryID != 21) and [sales details].categoryid >25"

    prodapi = ApiCall(SID, prodsql)
    x = minidom.parseString(prodapi.read())
    ProductSales = ''
    TotalProdNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            ProductSales += format(
                int(
                    math.floor(float(
                        row.childNodes[0].childNodes[0].nodeValue))), ",d")
            TotalProdNum = int(
                math.floor(float(row.childNodes[0].childNodes[0].nodeValue)))
        except:
            pass
    if ProductSales == '':
        ProductSales = '0'
    values['ProductSales'] = '$' + ProductSales

    #Prod Change
    prodchangesql = "SELECT SUM(tblSDPayments.SDPaymentAmount - tblSDPayments.ItemTax1 - tblSDPayments.ItemTax2 - tblSDPayments.ItemTax3 - tblSDPayments.ItemTax4 - tblSDPayments.ItemTax5) AS KDPValue FROM [Sales Details] INNER JOIN Sales ON [Sales Details].SaleID = Sales.SaleID INNER JOIN tblPayments ON Sales.SaleID = tblPayments.SaleID INNER JOIN tblSDPayments ON [Sales Details].SDID = tblSDPayments.SDID AND tblPayments.PaymentID = tblSDPayments.PaymentID INNER JOIN [Payment Types] ON tblPayments.PaymentMethod = [Payment Types].Item# WHERE (Sales.SaleDate BETWEEN dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "')) AND '" + startdate + "') AND ([Payment Types].CashEQ = 1) AND ([Sales Details].CategoryID != 21) and [sales details].categoryid >25"

    prodchangeapi = ApiCall(SID, prodchangesql)
    x = minidom.parseString(prodchangeapi.read())
    ProdChangeNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            ProdChangeNum = float(row.childNodes[0].childNodes[0].nodeValue)
        except:
            pass
    if not ProdChangeNum:
        values['ProdChange'] = u"▲100%"
        values['ProdChangeColor'] = '(0, 170, 0)'
    if TotalProdNum == ProdChangeNum:
        values['ProdChange'] = u"►0%"
        values['ProdChangeColor'] = '(255, 241, 13)'
    elif ProdChangeNum == 0:
        values['ProdChange'] = u"▲100%"
        values['ProdChangeColor'] = '(0, 170, 0)'
    elif TotalProdNum == 0:
        values['ProdChange'] = u"▼100%"
        values['ProdChangeColor'] = '(255, 0, 0)'
    else:
        ProdCalc = TotalProdNum - ProdChangeNum
        ProdCalc = ProdCalc / ProdChangeNum
        ProdCalc = ProdCalc * 100

        if ProdCalc < 0:
            values['ProdChange'] = u"▼" + format(
                abs(int(math.floor(ProdCalc))), ",d") + "%"
            values['ProdChangeColor'] = '(255, 0, 0)'
        elif ProdCalc > 0:
            values['ProdChange'] = u"▲" + format(
                abs(int(math.floor(ProdCalc))), ",d") + "%"
            values['ProdChangeColor'] = '(0, 170, 0)'

    #New Members
    memsql = "SELECT Count(*) AS KPIValue FROM (SELECT tblClientContracts.ClientID,Isnull(Sales.LocationID, CLIENTS.HomeStudio) AS LocationID FROM CLIENTS INNER JOIN tblClientContracts ON CLIENTS.ClientID = tblClientContracts.ClientID LEFT OUTER JOIN Sales INNER JOIN [Sales Details] ON Sales.SaleID = [Sales Details].SaleID ON tblClientContracts.ClientContractID = [Sales Details].ClientContractID WHERE ( tblClientContracts.AgreementDate between '" + startdate + "' and '" + enddate + "' ) AND ( tblClientContracts.Deleted = 0 ) AND ( tblClientContracts.AutoRenewClientContractID IS NULL ) GROUP  BY tblClientContracts.ClientID, Isnull(Sales.LocationID, CLIENTS.HomeStudio)) AS NewContract"

    memapi = ApiCall(SID, memsql)
    x = minidom.parseString(memapi.read())
    NewMemberships = ''
    TotalMemNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            NewMemberships += format(
                int(
                    math.floor(float(
                        row.childNodes[0].childNodes[0].nodeValue))), ",d")
            TotalMemNum = int(
                math.floor(float(row.childNodes[0].childNodes[0].nodeValue)))
        except:
            pass
    if NewMemberships == '':
        NewMemberships = '0'
    values['NewMemberships'] = NewMemberships

    #Mem Change
    memchangesql = "SELECT Count(*) AS KPIValue FROM (SELECT tblClientContracts.ClientID,Isnull(Sales.LocationID, CLIENTS.HomeStudio) AS LocationID FROM CLIENTS INNER JOIN tblClientContracts ON CLIENTS.ClientID = tblClientContracts.ClientID LEFT OUTER JOIN Sales INNER JOIN [Sales Details] ON Sales.SaleID = [Sales Details].SaleID ON tblClientContracts.ClientContractID = [Sales Details].ClientContractID WHERE ( tblClientContracts.AgreementDate between dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "')) AND '" + startdate + "') AND ( tblClientContracts.Deleted = 0 ) AND ( tblClientContracts.AutoRenewClientContractID IS NULL ) GROUP  BY tblClientContracts.ClientID, Isnull(Sales.LocationID, CLIENTS.HomeStudio)) AS NewContract"

    memchangeapi = ApiCall(SID, memchangesql)
    x = minidom.parseString(memchangeapi.read())
    MemChangeNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            MemChangeNum = float(row.childNodes[0].childNodes[0].nodeValue)
        except:
            pass
    if not MemChangeNum:
        values['MemChange'] = u"▲100%"
        values['MemChangeColor'] = '(0, 170, 0)'
    if TotalMemNum == MemChangeNum:
        values['MemChange'] = u"►0%"
        values['MemChangeColor'] = '(255, 241, 13)'
    elif MemChangeNum == 0:
        values['MemChange'] = u"▲100%"
        values['MemChangeColor'] = '(0, 170, 0)'
    elif TotalMemNum == 0:
        values['MemChange'] = u"▼100%"
        values['MemChangeColor'] = '(255, 0, 0)'
    else:
        MemCalc = TotalMemNum - MemChangeNum
        MemCalc = MemCalc / MemChangeNum
        MemCalc = MemCalc * 100

        if MemCalc < 0:
            values['MemChange'] = u"▼" + format(abs(int(math.floor(MemCalc))),
                                                ",d") + "%"
            values['MemChangeColor'] = '(255, 0, 0)'
        elif MemCalc > 0:
            values['MemChange'] = u"▲" + format(abs(int(math.floor(MemCalc))),
                                                ",d") + "%"
            values['MemChangeColor'] = '(0, 170, 0)'

    #Online Bookings
    onlinesql = "SELECT COUNT(*) AS KPIValue FROM [VISIT DATA] INNER JOIN tblTypeGroup ON [VISIT DATA].TypeGroup = tblTypeGroup.TypeGroupID WHERE (([VISIT DATA].ClassDate between '" + startdate + "' and '" + enddate + "') OR ([VISIT DATA].RequestDate between '" + startdate + "' and '" + enddate + "'))  AND ([VISIT DATA].Cancelled = 0) AND ([VISIT DATA].Missed = 0) AND ([VISIT DATA].WebScheduler = 1) AND (NOT ([VISIT DATA].TypeGroup IS NULL)) AND (NOT ([VISIT DATA].ClassDate IS NULL))"

    memapi = ApiCall(SID, onlinesql)
    x = minidom.parseString(memapi.read())
    OnlineBookings = ''
    TotalOnlineNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            OnlineBookings += format(
                int(
                    math.floor(float(
                        row.childNodes[0].childNodes[0].nodeValue))), ",d")
            TotalOnlineNum = int(
                math.floor(float(row.childNodes[0].childNodes[0].nodeValue)))
        except:
            pass
    if OnlineBookings == '':
        OnlineBookings = '0'
    values['OnlineBookings'] = OnlineBookings

    #Online Change
    onlinechangesql = "SELECT COUNT(*) AS KPIValue FROM [VISIT DATA] INNER JOIN tblTypeGroup ON [VISIT DATA].TypeGroup = tblTypeGroup.TypeGroupID WHERE (([VISIT DATA].ClassDate between dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "')) OR ([VISIT DATA].RequestDate between dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "'))  AND ([VISIT DATA].Cancelled = 0) AND ([VISIT DATA].Missed = 0) AND ([VISIT DATA].WebScheduler = 1) AND (NOT ([VISIT DATA].TypeGroup IS NULL)) AND (NOT ([VISIT DATA].ClassDate IS NULL))"

    onlinechangeapi = ApiCall(SID, onlinechangesql)
    x = minidom.parseString(onlinechangeapi.read())
    OnlineChangeNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            OnlineChangeNum = float(row.childNodes[0].childNodes[0].nodeValue)
        except:
            pass
    if not OnlineChangeNum:
        values['OnlineChange'] = u"▲100%"
        values['OnlineChangeColor'] = '(0, 170, 0)'
    if TotalOnlineNum == OnlineChangeNum:
        values['OnlineChange'] = u"►0%"
        values['OnlineChangeColor'] = '(255, 241, 13)'
    elif OnlineChangeNum == 0:
        values['OnlineChange'] = u"▲100%"
        values['OnlineChangeColor'] = '(0, 170, 0)'
    elif TotalOnlineNum == 0:
        values['OnlineChange'] = u"▼100%"
        values['OnlineChangeColor'] = '(255, 0, 0)'
    else:
        OnlineCalc = TotalOnlineNum - OnlineChangeNum
        OnlineCalc = OnlineCalc / OnlineChangeNum
        OnlineCalc = OnlineCalc * 100

        if OnlineCalc < 0:
            values['OnlineChange'] = u"▼" + format(
                abs(int(math.floor(OnlineCalc))), ",d") + "%"
            values['OnlineChangeColor'] = '(255, 0, 0)'
        elif OnlineCalc > 0:
            values['OnlineChange'] = u"▲" + format(
                abs(int(math.floor(OnlineCalc))), ",d") + "%"
            values['OnlineChangeColor'] = '(0, 170, 0)'

    #Attendance
    attendsql = "SELECT COUNT(*) AS KPIValue FROM [VISIT DATA] INNER JOIN tblTypeGroup ON [VISIT DATA].TypeGroup = tblTypeGroup.TypeGroupID WHERE (([VISIT DATA].ClassDate between '" + startdate + "' and '" + enddate + "') OR ([VISIT DATA].RequestDate between '" + startdate + "' and '" + enddate + "'))  AND ([VISIT DATA].Cancelled = 0) AND ([VISIT DATA].Missed = 0) AND (NOT ([VISIT DATA].TypeGroup IS NULL)) AND (NOT ([VISIT DATA].ClassDate IS NULL))"

    attendapi = ApiCall(SID, attendsql)
    x = minidom.parseString(attendapi.read())
    Attendance = ''
    TotalAttendNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            Attendance += format(
                int(
                    math.floor(float(
                        row.childNodes[0].childNodes[0].nodeValue))), ",d")
            TotalAttendNum = int(
                math.floor(float(row.childNodes[0].childNodes[0].nodeValue)))
        except:
            pass
    if Attendance == '':
        Attendance = '0'
    values['Attendance'] = Attendance

    #Attendance Change
    attendchangesql = "SELECT COUNT(*) AS KPIValue FROM [VISIT DATA] INNER JOIN tblTypeGroup ON [VISIT DATA].TypeGroup = tblTypeGroup.TypeGroupID WHERE (([VISIT DATA].ClassDate between dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "')) OR ([VISIT DATA].RequestDate between dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "'))  AND ([VISIT DATA].Cancelled = 0) AND ([VISIT DATA].Missed = 0) AND (NOT ([VISIT DATA].TypeGroup IS NULL)) AND (NOT ([VISIT DATA].ClassDate IS NULL))"

    attendchangeapi = ApiCall(SID, attendchangesql)
    x = minidom.parseString(attendchangeapi.read())
    AttendChangeNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            AttendChangeNum = float(row.childNodes[0].childNodes[0].nodeValue)
        except:
            pass
    if not AttendChangeNum:
        values['AttendChange'] = u"▲100%"
        values['AttendChangeColor'] = '(0, 170, 0)'
    if TotalAttendNum == AttendChangeNum:
        values['AttendChange'] = u"►0%"
        values['AttendChangeColor'] = '(255, 241, 13)'
    elif AttendChangeNum == 0:
        values['AttendChange'] = u"▲100%"
        values['AttendChangeColor'] = '(0, 170, 0)'
    elif TotalAttendNum == 0:
        values['AttendChange'] = u"▼100%"
        values['AttendChangeColor'] = '(255, 0, 0)'
    else:
        AttendCalc = TotalAttendNum - AttendChangeNum
        AttendCalc = AttendCalc / AttendChangeNum
        AttendCalc = AttendCalc * 100

        if AttendCalc < 0:
            values['AttendChange'] = u"▼" + format(
                abs(int(math.floor(AttendCalc))), ",d") + "%"
            values['AttendChangeColor'] = '(255, 0, 0)'
        elif AttendCalc > 0:
            values['AttendChange'] = u"▲" + format(
                abs(int(math.floor(AttendCalc))), ",d") + "%"
            values['AttendChangeColor'] = '(0, 170, 0)'

    #FirstVisits
    firstsql = "SELECT Count(*) AS KPIValue FROM CLIENTS WHERE (CLIENTS.Deleted = 0) AND ((NOT(CLIENTS.FirstClassDate IS NULL)) OR (NOT(CLIENTS.FirstApptDate IS NULL))) AND Case WHEN CLIENTS.FirstApptDate IS NULL THEN CLIENTS.FirstClassDate ELSE CLIENTS.FirstApptDate END between '" + startdate + "' and '" + enddate + "'"

    firstapi = ApiCall(SID, firstsql)
    x = minidom.parseString(firstapi.read())
    FirstVisits = ''
    TotalFVNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            FirstVisits += format(
                int(
                    math.floor(float(
                        row.childNodes[0].childNodes[0].nodeValue))), ",d")
            TotalFVNum = int(
                math.floor(float(row.childNodes[0].childNodes[0].nodeValue)))
        except:
            pass
    if FirstVisits == '':
        FirstVisits = '0'
    values['FirstVisits'] = FirstVisits

    #Attendance Change
    fvchangesql = "SELECT Count(*) AS KPIValue FROM CLIENTS WHERE (CLIENTS.Deleted = 0) AND ((NOT(CLIENTS.FirstClassDate IS NULL)) OR (NOT(CLIENTS.FirstApptDate IS NULL))) AND Case WHEN CLIENTS.FirstApptDate IS NULL THEN CLIENTS.FirstClassDate ELSE CLIENTS.FirstApptDate END between dateadd(day,datediff(day,'" + startdate + "', '" + enddate + "')*-1, dateadd(day,-1,'" + startdate + "')"

    fvchangeapi = ApiCall(SID, fvchangesql)
    x = minidom.parseString(fvchangeapi.read())
    FVChangeNum = 0
    for row in x.getElementsByTagName("Row"):
        try:
            FVChangeNum = float(row.childNodes[0].childNodes[0].nodeValue)
        except:
            pass
    if not FVChangeNum:
        values['FVChange'] = u"▲100%"
        values['FVChangeColor'] = '(0, 170, 0)'
    if TotalFVNum == FVChangeNum:
        values['FVChange'] = u"►0%"
        values['FVChangeColor'] = '(255, 241, 13)'
    elif FVChangeNum == 0:
        values['FVChange'] = u"▲100%"
        values['FVChangeColor'] = '(0, 170, 0)'
    elif TotalFVNum == 0:
        values['FVChange'] = u"▼100%"
        values['FVChangeColor'] = '(255, 0, 0)'
    else:
        FVCalc = TotalFVNum - FVChangeNum
        FVCalc = FVCalc / FVChangeNum
        FVCalc = FVCalc * 100

        if FVCalc < 0:
            values['FVChange'] = u"▼" + format(abs(int(math.floor(FVCalc))),
                                               ",d") + "%"
            values['FVChangeColor'] = '(255, 0, 0)'
        elif FVCalc > 0:
            values['FVChange'] = u"▲" + format(abs(int(math.floor(FVCalc))),
                                               ",d") + "%"
            values['FVChangeColor'] = '(0, 170, 0)'

    if memcache.get('SID') == SID:
        values['SalesYear2011'] = memcache.get('SalesYear2011')
        values['SalesYear2012'] = memcache.get('SalesYear2012')
        values['SalesYear2013'] = memcache.get('SalesYear2013')

        values['OnlineYear2011'] = memcache.get('OnlineYear2011')
        values['OnlineYear2012'] = memcache.get('OnlineYear2012')
        values['OnlineYear2013'] = memcache.get('OnlineYear2013')

        values['MembershipYOY'] = memcache.get('MembershipYOY')

    else:
        memcache.set('SID', SID)
        #Sales YOY
        Sales2011YOY = {
            '01/2011': 0,
            '02/2011': 0,
            '03/2011': 0,
            '04/2011': 0,
            '05/2011': 0,
            '06/2011': 0,
            '07/2011': 0,
            '08/2011': 0,
            '09/2011': 0,
            '10/2011': 0,
            '11/2011': 0,
            '12/2011': 0
        }

        Sales2012YOY = {
            '01/2012': 0,
            '02/2012': 0,
            '03/2012': 0,
            '04/2012': 0,
            '05/2012': 0,
            '06/2012': 0,
            '07/2012': 0,
            '08/2012': 0,
            '09/2012': 0,
            '10/2012': 0,
            '11/2012': 0,
            '12/2012': 0
        }

        Sales2013YOY = {
            '01/2013': 0,
            '02/2013': 0,
            '03/2013': 0,
            '04/2013': 0,
            '05/2013': 0,
            '06/2013': 0,
            '07/2013': 0,
            '08/2013': 0,
            '09/2013': 0,
            '10/2013': 0,
            '11/2013': 0,
            '12/2013': 0
        }

        sales2011 = ''
        sales2012 = ''
        sales2013 = ''

        syoysql = "SELECT right('00'+cast(datepart(month,Sales.SaleDate) as nvarchar(max)),2) + '/' + cast(datepart(year,Sales.SaleDate) as nvarchar(max)) as date, SUM(tblSDPayments.SDPaymentAmount - tblSDPayments.ItemTax1 - tblSDPayments.ItemTax2 - tblSDPayments.ItemTax3 - tblSDPayments.ItemTax4 - tblSDPayments.ItemTax5) AS KDPValue FROM [Sales Details] INNER JOIN Sales ON [Sales Details].SaleID = Sales.SaleID INNER JOIN tblPayments ON Sales.SaleID = tblPayments.SaleID INNER JOIN tblSDPayments ON [Sales Details].SDID = tblSDPayments.SDID AND tblPayments.PaymentID = tblSDPayments.PaymentID INNER JOIN [Payment Types] ON tblPayments.PaymentMethod = [Payment Types].Item# WHERE (Sales.SaleDate BETWEEN '1/1/2011' AND cast(getdate() as date)) AND ([Payment Types].CashEQ = 1) AND ([Sales Details].CategoryID != 21) GROUP BY right('00'+cast(datepart(month,Sales.SaleDate) as nvarchar(max)),2) + '/' + cast(datepart(year,Sales.SaleDate) as nvarchar(max))"

        syoy = ApiCall(SID, syoysql)
        x = minidom.parseString(syoy.read())
        SalesYear = ''
        for row in x.getElementsByTagName("Row"):
            if str(row.childNodes[0].childNodes[0].nodeValue) in Sales2011YOY:
                Sales2011YOY[str(
                    row.childNodes[0].childNodes[0].nodeValue)] = str(
                        int(
                            math.floor(
                                float(row.childNodes[1].childNodes[0].nodeValue
                                      ))))
            if str(row.childNodes[0].childNodes[0].nodeValue) in Sales2012YOY:
                Sales2012YOY[str(
                    row.childNodes[0].childNodes[0].nodeValue)] = str(
                        int(
                            math.floor(
                                float(row.childNodes[1].childNodes[0].nodeValue
                                      ))))
            if str(row.childNodes[0].childNodes[0].nodeValue) in Sales2013YOY:
                Sales2013YOY[str(
                    row.childNodes[0].childNodes[0].nodeValue)] = str(
                        int(
                            math.floor(
                                float(row.childNodes[1].childNodes[0].nodeValue
                                      ))))
        for e, v in sorted(Sales2011YOY.items()):
            sales2011 += str(v) + ","
        for e, v in sorted(Sales2012YOY.items()):
            sales2012 += str(v) + ","
        for e, v in sorted(Sales2013YOY.items()):
            sales2013 += str(v) + ","

        values['SalesYear2011'] = sales2011[:-1]
        values['SalesYear2012'] = sales2012[:-1]
        values['SalesYear2013'] = sales2013[:-1]

        memcache.set('SalesYear2011', sales2011[:-1])
        memcache.set('SalesYear2012', sales2012[:-1])
        memcache.set('SalesYear2013', sales2013[:-1])

        #Online YOY
        Online2011YOY = {
            '01/2011': 0,
            '02/2011': 0,
            '03/2011': 0,
            '04/2011': 0,
            '05/2011': 0,
            '06/2011': 0,
            '07/2011': 0,
            '08/2011': 0,
            '09/2011': 0,
            '10/2011': 0,
            '11/2011': 0,
            '12/2011': 0
        }

        Online2012YOY = {
            '01/2012': 0,
            '02/2012': 0,
            '03/2012': 0,
            '04/2012': 0,
            '05/2012': 0,
            '06/2012': 0,
            '07/2012': 0,
            '08/2012': 0,
            '09/2012': 0,
            '10/2012': 0,
            '11/2012': 0,
            '12/2012': 0
        }

        Online2013YOY = {
            '01/2013': 0,
            '02/2013': 0,
            '03/2013': 0,
            '04/2013': 0,
            '05/2013': 0,
            '06/2013': 0,
            '07/2013': 0,
            '08/2013': 0,
            '09/2013': 0,
            '10/2013': 0,
            '11/2013': 0,
            '12/2013': 0
        }

        online2011 = ''
        online2012 = ''
        online2013 = ''

        oyoysql = "SELECT RIGHT('00' + Cast(Datepart(month, [VISIT DATA].ClassDate) AS NVARCHAR(max)), 2) + '/' + Cast(Datepart(year, [VISIT DATA].ClassDate) AS NVARCHAR(max)) as date, COUNT(*) AS KPIValue FROM [VISIT DATA] INNER JOIN tblTypeGroup ON [VISIT DATA].TypeGroup = tblTypeGroup.TypeGroupID WHERE (([VISIT DATA].ClassDate between '1/1/2011' and cast(getdate() as date)) OR ([VISIT DATA].RequestDate between '1/1/2011' and cast(getdate() as date)))  AND ([VISIT DATA].Cancelled = 0) AND ([VISIT DATA].Missed = 0) AND ([VISIT DATA].WebScheduler = 1) AND (NOT ([VISIT DATA].TypeGroup IS NULL)) AND (NOT ([VISIT DATA].ClassDate IS NULL)) group by RIGHT('00' + Cast(Datepart(month, [VISIT DATA].ClassDate) AS NVARCHAR(max)), 2) + '/' + Cast(Datepart(year, [VISIT DATA].ClassDate) AS NVARCHAR(max))"

        oyoy = ApiCall(SID, oyoysql)
        x = minidom.parseString(oyoy.read())
        OnlineYear = ''
        for row in x.getElementsByTagName("Row"):
            try:
                if str(row.childNodes[0].childNodes[0].nodeValue
                       ) in Online2011YOY:
                    Online2011YOY[str(
                        row.childNodes[0].childNodes[0].nodeValue)] = str(
                            int(
                                math.floor(
                                    float(row.childNodes[1].childNodes[0].
                                          nodeValue))))
                if str(row.childNodes[0].childNodes[0].nodeValue
                       ) in Online2012YOY:
                    Online2012YOY[str(
                        row.childNodes[0].childNodes[0].nodeValue)] = str(
                            int(
                                math.floor(
                                    float(row.childNodes[1].childNodes[0].
                                          nodeValue))))
                if str(row.childNodes[0].childNodes[0].nodeValue
                       ) in Online2013YOY:
                    Online2013YOY[str(
                        row.childNodes[0].childNodes[0].nodeValue)] = str(
                            int(
                                math.floor(
                                    float(row.childNodes[1].childNodes[0].
                                          nodeValue))))
            except:
                pass
        for e, v in sorted(Online2011YOY.items()):
            online2011 += str(v) + ","
        for e, v in sorted(Online2012YOY.items()):
            online2012 += str(v) + ","
        for e, v in sorted(Online2013YOY.items()):
            online2013 += str(v) + ","

        values['OnlineYear2011'] = online2011[:-1]
        values['OnlineYear2012'] = online2012[:-1]
        values['OnlineYear2013'] = online2013[:-1]

        memcache.set('OnlineYear2011', online2011[:-1])
        memcache.set('OnlineYear2012', online2012[:-1])
        memcache.set('OnlineYear2013', online2013[:-1])

        #Memberships YOY
        MembershipYOY = {
            '03/2012': 'null',
            '04/2012': 'null',
            '05/2012': 'null',
            '06/2012': 'null',
            '07/2012': 'null',
            '08/2012': 'null',
            '09/2012': 'null',
            '10/2012': 'null',
            '11/2012': 'null',
            '12/2012': 'null',
            '01/2013': 'null',
            '02/2013': 'null'
        }

        membership2011 = ''

        memyoysql = "SELECT RIGHT('00' + Cast(Datepart(month, tblAggregateKPI.kpidate) AS NVARCHAR(max)), 2) + '/' + Cast(Datepart(year, tblAggregateKPI.kpidate) AS NVARCHAR(max)) as date, kpivalue FROM tblAggregateKPI INNER JOIN (SELECT MAX(KPIDate) AS kpidate FROM tblAggregateKPI AS tblAggregateKPI_1 WHERE (KPITypeID = 15) AND (RefCatID = 1) GROUP BY CAST(DATEPART(month, KPIDate) AS NVARCHAR(MAX)) + '/' + CAST(DATEPART(year, KPIDate) AS NVARCHAR(MAX))) AS derivedtbl_1 ON  tblAggregateKPI.KPIDate = derivedtbl_1.kpidate WHERE (KPITypeID = 15) AND (RefCatID = 1) and tblAggregateKPI.kpidate between '1/1/2011' and getdate()"

        memyoy = ApiCall(SID, memyoysql)
        x = minidom.parseString(memyoy.read())
        MembershipYear = ''
        for row in x.getElementsByTagName("Row"):
            try:
                if str(row.childNodes[0].childNodes[0].nodeValue
                       ) in MembershipYOY:
                    MembershipYOY[str(
                        row.childNodes[0].childNodes[0].nodeValue)] = str(
                            int(
                                math.floor(
                                    float(row.childNodes[1].childNodes[0].
                                          nodeValue))))
            except:
                pass
        for e, v in sorted(MembershipYOY.items()):
            membership2011 += str(v) + ","

        values['MembershipYOY'] = membership2011[:-1]

        memcache.set('MembershipYOY', membership2011[:-1])

    return values
Example #40
0
 def test_memcache(self):
     """Tests memcache"""
     memcache.set('test_key', 'contents')
     self.assertEqual(memcache.get('test_key'), 'contents')
Example #41
0
 def get_by_key_name(key_name):
     return memcache.get(key_name)
Example #42
0
                for classes_name, classes_item in classes:  #@UnusedVariable
                    # Если в классе объявлен обработчик URL, то добавляем его в список (если его там еще нет)
                    if hasattr(classes_item, 'url_handler'):
                        if not (classes_item().url_handler,
                                classes_item) in url_handlers:
                            url_handlers.append(
                                (classes_item().url_handler, classes_item))
        # Обработчик ошибки для __init__.py и папок-не-модулей
        except ImportError:
            pass
    # Возвращаем отсортированный список (сортировка по обработчику, чтобы / и /.+ были в конце
    return sorted(url_handlers, key=lambda (t): t[0], reverse=True)


if __name__ == "__main__":
    """ Запускаем App Engine """

    # Пробуем получить из кэша обработчики URL
    url_handlers = memcache.get("url_handlers")  #@UndefinedVariable
    # Если их там нет, получаем их с нуля и добавляем в кэш
    if not url_handlers:
        url_handlers = get_url_handlers(os.path.curdir)
        memcache.add("url_handlers", pickle.dumps(url_handlers),
                     3600)  #@UndefinedVariable
    # Если есть, загружаем
    else:
        url_handlers = pickle.loads(url_handlers)
    # Запускаем GAE
    application = webapp.WSGIApplication(url_handlers, debug=True)
    run_wsgi_app(application)
Example #43
0
 def getAnnouncement(self, request):
     """Return Announcement from memcache."""
     return StringMessage(
         data=memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY) or "")
Example #44
0
    def get(self, filename):
        """GET

    Args:
      filename: str, package filename like 'foo.dmg'
    Returns:
      None if a blob is being returned,
      or a response object
    """
        auth_return = auth.DoAnyAuth()
        if hasattr(auth_return, 'email'):
            email = auth_return.email()
            if not auth.IsAdminUser(email) and not auth.IsSupportUser(email):
                raise auth.IsAdminMismatch

        filename = urllib.unquote(filename)
        pkg = models.PackageInfo.MemcacheWrappedGet(filename)

        if pkg is None or not pkg.blobstore_key:
            self.error(404)
            return

        if common.IsPanicModeNoPackages():
            self.error(503)
            return

        # Get the Blobstore BlobInfo for this package; memcache wrapped.
        memcache_key = 'blobinfo_%s' % filename
        blob_info = memcache.get(memcache_key)
        if not blob_info:
            blob_info = blobstore.BlobInfo.get(pkg.blobstore_key)
            if blob_info:
                memcache.set(memcache_key, blob_info,
                             300)  # cache for 5 minutes.
            else:
                logging.critical(
                    'Failure fetching BlobInfo for %s. Verify the blob exists: %s',
                    pkg.filename, pkg.blobstore_key)
                self.error(404)
                return

        header_date_str = self.request.headers.get('If-Modified-Since', '')
        etag_nomatch_str = self.request.headers.get('If-None-Match', 0)
        etag_match_str = self.request.headers.get('If-Match', 0)
        pkg_date = blob_info.creation
        pkg_size_bytes = blob_info.size

        # TODO(user): The below can be simplified once all of our clients
        # have ETag values set on the filesystem for these files.  The
        # parsing of If-Modified-Since could be removed.  Removing it prematurely
        # will cause a re-download of all packages on all clients for 1 iteration
        # until they all have ETag values.

        # Reduce complexity of elif conditional below.
        # If an If-None-Match: ETag is supplied, don't worry about a
        # missing file modification date -- the ETag supplies everything needed.
        if etag_nomatch_str and not header_date_str:
            resource_expired = False
        else:
            resource_expired = handlers.IsClientResourceExpired(
                pkg_date, header_date_str)

        # Client supplied If-Match: etag, but that etag does not match current
        # etag.  return 412.
        if (etag_match_str and pkg.pkgdata_sha256
                and etag_match_str != pkg.pkgdata_sha256):
            self.response.set_status(412)

        # Client supplied no etag or If-No-Match: etag, and the etag did not
        # match, or the client's file is older than the mod time of this package.
        elif ((etag_nomatch_str and pkg.pkgdata_sha256
               and etag_nomatch_str != pkg.pkgdata_sha256)
              or resource_expired):
            self.response.headers['Content-Disposition'] = str(
                'attachment; filename=%s' % filename)
            # header date empty or package has changed, send blob with last-mod date.
            if pkg.pkgdata_sha256:
                self.response.headers['ETag'] = str(pkg.pkgdata_sha256)
            self.response.headers['Last-Modified'] = pkg_date.strftime(
                handlers.HEADER_DATE_FORMAT)
            self.response.headers['X-Download-Size'] = str(pkg_size_bytes)
            self.send_blob(pkg.blobstore_key)
        else:
            # Client doesn't need to do anything, current version is OK based on
            # ETag and/or last modified date.
            if pkg.pkgdata_sha256:
                self.response.headers['ETag'] = str(pkg.pkgdata_sha256)
            self.response.set_status(304)
Example #45
0
	def post(self):
		user = users.get_current_user()
		if user is None: 
			self.redirect('/welcome')
			
		else:
			user_id = user.user_id()
			user_ent_key = ndb.Key(Account, user_id)
			
			cur_id = self.request.get('cursor', default_value=None)
			start_cursor = None
			cursor_key = None
			cursor_obj = None

			if cur_id is not None:
				# load more
				cursor_key = ndb.Key('AchievementsCursor', int(cur_id), parent = user_ent_key)
				cursor_obj = memcache.get(user_id+'achievements-cursor')
				if cursor_obj is None:
					cursor_obj = cursor_key.get()

				start_cursor = Cursor(urlsafe = cursor_obj.cursor)	

			
			qry = Entry.query(ancestor = user_ent_key).filter(Entry.isAchievement==True).order(-Entry.date)	
			entries, next_cursor, more = qry.fetch_page(5, start_cursor=start_cursor)
			
			if next_cursor is not None:

				if cur_id is not None:
					cursor_obj.cursor = next_cursor.urlsafe()
					cursor_obj.put()
					memcache.set(user_id+'achievements-cursor', cursor_obj)

				else:	
					all_cursors_query = ndb.gql("SELECT * FROM AchievementsCursor WHERE ANCESTOR IS :1", user_ent_key)						
					saved_cursor = list(all_cursors_query)
					if(saved_cursor): # not empty - update
						saved_cursor[0].cursor = next_cursor.urlsafe()
						cursor_key = saved_cursor[0].put()
						cur_id = cursor_key.id()
						memcache.set(user_id+'achievements-cursor', saved_cursor[0], 1800)
						#logging.error('---------\n------Restored saved cursor--\n--------')
					else:	
						cursor_obj = AchievementsCursor(parent = user_ent_key, cursor = next_cursor.urlsafe())
						cursor_key = cursor_obj.put()
						cur_id = cursor_key.id()		
						memcache.set(user_id+'achievements-cursor', cursor_obj, 1800)

						
			# entries is not json serializable because of date object
			results = []
			logging.error('------\n-------\n'+str(len(entries))+'\n-----------')
			for entry in entries:
				entrydict = make_entry_dict(entry)
				# to conform to the dailybox structure
				entrybox = {'entries':[entrydict]}
				entrybox['date'] = entrydict['date']
				results.append(entrybox) 

			search_results = {"results": results, "more": more, "cursor": cur_id}
					#logging.error('------\n-------\n'+str(search_results)+'\n-----------')

		self.response.out.write(json.dumps(search_results))		
Example #46
0
 def getFeaturedSpeaker(self, request):
     """Return Featured Speaker from memcache."""
     return StringMessage(
         data=memcache.get(MEMCACHE_FEATURED_SPEAKER) or "")
Example #47
0
	def post(self):
		user = users.get_current_user()
		if user is None: 
			self.redirect('/welcome')
			
		else:
			user_id = user.user_id()
			user_ent_key = ndb.Key(Account, user_id)
			tag = self.request.get('tag', default_value=None)
			cur_id = self.request.get('cursor', default_value=None)
			start_cursor = None
			cursor_key = None
			cursor_obj = None

			if cur_id is not None:
				# load more
				cursor_key = ndb.Key('SearchCursor', int(cur_id), parent = user_ent_key)
				cursor_obj = memcache.get(user_id+'tag-cursor')
				if cursor_obj is None:
					cursor_obj = cursor_key.get()
				
				start_cursor = Cursor(urlsafe = cursor_obj.cursor)	

			if tag is None or tag.replace(' ','') == '':
				self.response.out.write(json.dumps({}))

			else:
				tag = tag.lower().replace(' ', '')
				# later------ check if the tag is present in the Tags list
				# Now just search over entries
				qry = Entry.query(ancestor = user_ent_key).filter(Entry.normtags==tag).order(-Entry.date)	
				entries, next_cursor, more = qry.fetch_page(5, start_cursor=start_cursor)

				if next_cursor is not None:

					if cur_id is not None:
						cursor_obj.cursor = next_cursor.urlsafe()
						cursor_obj.put()
						memcache.set(user_id+'tag-cursor', cursor_obj)

					else:	
						all_cursors_query = ndb.gql("SELECT * FROM SearchCursor WHERE ANCESTOR IS :1", user_ent_key)
						saved_cursor = list(all_cursors_query)
						if(saved_cursor): # not empty - update
							saved_cursor[0].cursor = next_cursor.urlsafe()
							cursor_key = saved_cursor[0].put()
							cur_id = cursor_key.id()
							memcache.set(user_id+'tag-cursor', saved_cursor[0], 1800)
							#logging.error('---------\n------Restored saved cursor--\n--------')
						else:	
							cursor_obj = SearchCursor(parent = user_ent_key, cursor = next_cursor.urlsafe())
							cursor_key = cursor_obj.put()
							cur_id = cursor_key.id()		
							memcache.set(user_id+'tag-cursor', cursor_obj, 1800)

						
				# entries is not json serializable because of date object
				results = []
				for entry in entries:
					entrydict = make_entry_dict(entry)
					# to conform to the dailybox structure
					entrybox = {'entries':[entrydict]}
					entrybox['date'] = entrydict['date']
					results.append(entrybox) 

				search_results = {"results": results, "more": more, "cursor": cur_id}
				self.response.out.write(json.dumps(search_results))	
 def get_multi_async(self, keys, namespace='', time=0):
   return _RPC(result=dict(
     (k, memcache.get(k, namespace=namespace, time=time)) for k in keys))
Example #49
0
    def test_retry(self):
        self.assertEqual([], self.taskqueue_stub.GetTasks('propagate'))

        source = self.sources[0]
        source.domain_urls = ['http://orig']
        source.last_hfeed_refetch = last_hfeed_refetch = \
            testutil.NOW - datetime.timedelta(minutes=1)
        source.put()

        resp = self.responses[0]
        resp.status = 'complete'
        resp.unsent = ['http://unsent']
        resp.sent = ['http://sent']
        resp.error = ['http://error']
        resp.failed = ['http://failed']
        resp.skipped = ['https://skipped']

        # SyndicatedPost with new target URLs
        resp.activities_json = [
            json.dumps({'object': {
                'url': 'https://fa.ke/1'
            }}),
            json.dumps({
                'url': 'https://fa.ke/2',
                'object': {
                    'unused': 'ok'
                }
            }),
            json.dumps({'url': 'https://fa.ke/3'}),
        ]
        resp.put()
        SyndicatedPost.insert(source, 'https://fa.ke/1', 'https://orig/1')
        SyndicatedPost.insert(source, 'https://fa.ke/2', 'http://orig/2')
        SyndicatedPost.insert(source, 'https://fa.ke/3', 'http://orig/3')

        # cached webmention endpoint
        memcache.set('W https skipped /', 'asdf')

        key = resp.key.urlsafe()
        response = app.application.get_response('/retry',
                                                method='POST',
                                                body=urllib.urlencode(
                                                    {'key': key}))
        self.assertEquals(302, response.status_int)
        self.assertEquals(source.bridgy_url(self.handler),
                          response.headers['Location'].split('#')[0])
        params = testutil.get_task_params(
            self.taskqueue_stub.GetTasks('propagate')[0])
        self.assertEqual(key, params['response_key'])

        # status and URLs should be refreshed
        got = resp.key.get()
        self.assertEqual('new', got.status)
        self.assertItemsEqual([
            'http://unsent/', 'http://sent/', 'https://skipped/',
            'http://error/', 'http://failed/', 'https://orig/1',
            'http://orig/2', 'http://orig/3'
        ], got.unsent)
        for field in got.sent, got.skipped, got.error, got.failed:
            self.assertEqual([], field)

        # webmention endpoints for URL domains should be refreshed
        self.assertIsNone(memcache.get('W https skipped /'))

        # shouldn't have refetched h-feed
        self.assertEqual(last_hfeed_refetch,
                         source.key.get().last_hfeed_refetch)
Example #50
0
	def post(self):
		user = users.get_current_user()
		if user is None: 
			self.redirect('/welcome')
			
		else:
			user_id = user.user_id()
			user_ent_key = ndb.Key(Account, user_id)
			pid = int(self.request.get('pid', default_value=None))

			if pid:
				# only if pid is sent, we can search for the project
				projectKey = ndb.Key(Account, user.user_id(), Projects, pid)
				#projectObject = projectKey.get()
				#projectName = projectObject.projectName;
				#isproductive = projectObject.projectProductive
				cur_id = self.request.get('cursor', default_value=None)
				start_cursor = None
				cursor_key = None
				cursor_obj = None

				if cur_id is not None:
					# load more
					cursor_key = ndb.Key('ProjectSearchCursor', int(cur_id), parent = user_ent_key)
					cursor_obj = memcache.get(user_id+'filter-project-cursor')
					if cursor_obj is None:
						cursor_obj = cursor_key.get()
				
					start_cursor = Cursor(urlsafe = cursor_obj.cursor)	

				# Search only if the key exists
				#p = projectKey.get()
				#pname = p.projectName
				#logging.error('------\n-------\n'+pname+'\n-----------')	.filter(Entry.project==projectKey)	
				qry = Entry.query(ancestor = user_ent_key).filter(Entry.project==projectKey).order(-Entry.date)	
				entries, next_cursor, more = qry.fetch_page(5, start_cursor=start_cursor)
				#entries = qry.fetch()
				#next_cursor = None
				#more = False

				#for e in entries:
				#	if e.projectName == pname:
				#		if e.project == projectKey:
				#			logging.info('------\nBoth Matched-------\n'+pname+'\n-----------')	
				#		else:
				#			logging.error('------\nName Matched But not Key-------\n'+pname+'\n'+str(projectKey)+'\n'+str(e.project)+'-----------')		


				if next_cursor is not None:

					if cur_id is not None:
						cursor_obj.cursor = next_cursor.urlsafe()
						cursor_obj.put()
						memcache.set(user_id+'filter-project-cursor', cursor_obj)

					else:	
						all_cursors_query = ndb.gql("SELECT * FROM ProjectSearchCursor WHERE ANCESTOR IS :1", user_ent_key)
						saved_cursor = list(all_cursors_query)
						if(saved_cursor): # not empty - update
							saved_cursor[0].cursor = next_cursor.urlsafe()
							cursor_key = saved_cursor[0].put()
							cur_id = cursor_key.id()
							memcache.set(user_id+'filter-project-cursor', saved_cursor[0], 1800)
							#logging.error('---------\n------Restored saved cursor--\n--------')
						else:	
							cursor_obj = ProjectSearchCursor(parent = user_ent_key, cursor = next_cursor.urlsafe())
							cursor_key = cursor_obj.put()
							cur_id = cursor_key.id()		
							memcache.set(user_id+'filter-project-cursor', cursor_obj, 1800)
						
				# entries is not json serializable because of date object
				results = []
				logging.error('------\n-------\n'+str(len(entries))+'\n-----------')
				for entry in entries:
					entrydict = make_entry_dict(entry)
					# to conform to the dailybox structure
					entrybox = {'entries':[entrydict]}
					entrybox['date'] = entrydict['date']
					results.append(entrybox) 
					
				search_results = {"response":0, "results": results, "more": more, "cursor": cur_id}
				#logging.error('------\n-------\n'+str(search_results)+'\n-----------')

			else:
				logging.error('------\n-------\nNo pid sent\n-----------')
				search_results = {"response":1} # projectId not found	

			self.response.out.write(json.dumps(search_results))		
Example #51
0
 def __getitem__(self, varname):
     try:
         cached = memcache.get(make_key(varname))
     except Exception, exc:
         log.error('Getting %s: %s' % (make_key(varname), exc))
         cached = False
Example #52
0
	def post(self):
		user = users.get_current_user()
		if user is None: 
			self.redirect('/welcome')
			
		else:
			user_id = user.user_id()
			#logging.error('---------\n------'+user_id+'--\n--------')
			user_ent_key = ndb.Key(Account, user_id)

			# this is opaque id of cursor sent from the client
			cursor_id = self.request.get('cursor', default_value=None)
			cursor_obj = None
			cursor_key = None
			start_cursor = None

			if cursor_id:
				cursor_key = ndb.Key('OpaqueCursor', int(cursor_id), parent = user_ent_key)
				cursor_obj = memcache.get(user_id+'ajax-cursor')
				
				if cursor_obj is None:
					#logging.error('---------\n---------Cache Miss------')
					cursor_obj = cursor_key.get()

				start_cursor = Cursor(urlsafe = cursor_obj.cursor)

			#logging.error(self.request.get('cursor', default_value=None))
			qry = Entry.query(ancestor = user_ent_key).order(-Entry.date)	
			entries, next_cursor, more = qry.fetch_page(20, start_cursor=start_cursor)

			if cursor_id and next_cursor:
				cursor_obj.cursor = next_cursor.urlsafe()
				cursor_obj.put()
				memcache.set(user_id+'ajax-cursor', cursor_obj, 1800)
			elif next_cursor:	# refresh
				# check whether any cursor already present
				# otherwise we'll be recreating an OpaqueCursor entity for every refresh
				all_cursors_query = ndb.gql("SELECT * FROM OpaqueCursor WHERE ANCESTOR IS :1", user_ent_key)
				saved_cursor = list(all_cursors_query)
				if(saved_cursor): # not empty - update
					saved_cursor[0].cursor = next_cursor.urlsafe()
					cursor_key = saved_cursor[0].put()
					cursor_id = cursor_key.id()
					#memcache.delete('ajax-cursor')
					memcache.set(user_id+'ajax-cursor', saved_cursor[0], 1800)
					
				else: # first time ever user is saving a cursor -only happens once for a user
					# but may be better to separate from account creation for maintainability
					cursor_key = OpaqueCursor(parent = user_ent_key, cursor = next_cursor.urlsafe()).put()	
					cursor_id = cursor_key.id()

			# Need to restructure the query result by aggregating over the date 
			entries_by_date = [] # top level of json to return
			#t = datetime.date.today() # Sorry! datetime is not json serializable
			#t = t.replace(day = 30, month = 12, year = 1970) # initialization
			t = 'Mon Jun 31 2026' # incorrect date but in correct format
			todays_entries = [] # contains entries of a day
			
			for entry in entries:
				entrydict = make_entry_dict(entry)
				if utils.date_string(entry.date) != t:
					t = utils.date_string(entry.date) # keep track of current entries' date
					todays_entries = []
					daybox = {'date': t, 'entries': todays_entries} # create a new date box and put all relevant entries there
					entries_by_date.append(daybox)

				todays_entries.append(entrydict)
			
			whole_data = {"data": entries_by_date, "cursor": cursor_id, "more": more}
			self.response.out.write(json.dumps(whole_data))	
Example #53
0
def get_example(update=False):
    example = memcache.get(memcache_key)
    if example is None or update:
        #example = model.Example.get_all()  load data model
        memcache.set(memcache_key, example)  #Update memcache
    return example
Example #54
0
    def get(self):
        user = self.user

        # Try to get rendered output from memcache
        rendered = memcache.get('dashboard-' + user.user_id)
        if rendered and not settings.DEBUG:
            return self.response.out.write(rendered)

        # Fetch following users
        following = user.following_users\
                        .order('name')\
                        .fetch(100)

        user_keys = [user.key()] + [u.key() for u in following]

        # Start async fetch of top recipes
        top_recipes = Recipe.all()\
                            .filter('owner IN', user_keys)\
                            .order('-grade')\
                            .run(limit=15)

        # Get and process interesting events
        interesting_events = UserAction.all()\
                                       .filter('owner IN', user_keys)\
                                       .order('-created')\
                                       .fetch(15)

        object_ids = UserAction.gather_object_ids(interesting_events)
        object_ids['users'] = [
            id for id in object_ids['users']
            if id not in [user.key().id()] + user.following
        ]

        # Start async fetch of relevant recipes
        recipes = db.get_async(
            [Key.from_path('Recipe', id) for id in object_ids['recipes']])

        # Start async fetch of relevant brews
        brews = db.get_async(
            [Key.from_path('Brew', id) for id in object_ids['brews']])

        # Convert iterators to  lists of items in memory and setup a map
        # of user id -> user for easy lookups
        following = list(following)
        top_recipes = list(top_recipes)

        user_map = {user.key().id(): user}

        for u in following:
            user_map[u.key().id()] = u

        if object_ids['users']:
            for u in UserPrefs.get_by_id(object_ids['users']):
                user_map[u.key().id()] = u

        # Setup a map of brew id -> brew for easy lookups
        brew_map = {}
        brew_recipe_ids = set()

        for b in brews.get_result():
            brew_recipe_ids.add(b.recipe_key.id())
            brew_map[b.key().id()] = b

        # Async fetch of any recipes brews reference that weren't
        # included in the recipe fetch above...
        brew_recipes = db.get_async([
            Key.from_path('Recipe', id) for id in brew_recipe_ids
            if id not in object_ids['recipes']
        ])

        # Setup a map of recipe id -> recipe for easy lookups
        recipe_map = {}

        for r in recipes.get_result():
            recipe_map[r.key().id()] = r

        for r in brew_recipes.get_result():
            recipe_map[r.key().id()] = r

        # Render and cache for 1 minute
        memcache.set(
            'dashboard-' + user.user_id,
            self.render(
                'dashboard.html', {
                    'following': following,
                    'user_map': user_map,
                    'recipe_map': recipe_map,
                    'brew_map': brew_map,
                    'top_recipes': top_recipes,
                    'interesting_events': interesting_events
                }), self.CACHE_TIME)
Example #55
0
def get_data_from_cache(localpath):
  memcache_data = memcache.get(localpath)
  if not memcache_data:
    return None
  logging.debug('content for %s found in memcache' % localpath)
  return json.loads(memcache_data)
Example #56
0
 def getProgress(self):
     return memcache.get(self.report_prog_mckey)
Example #57
0
def parse_item_response(response):
    '''Extract what's needed from the JSON response and use the TF2 item schema to get new data.'''
    # Get item schema from cache
    tf2_item_schema = {
        'items': memcache.get('items'),
        'qualities': memcache.get('qualities'),
        'originNames': memcache.get('origins')
    }

    # Check variables and results
    if response <= 0:
        return response

    # Variables..
    ordered_response = OrderedDict(response)
    items_in_bp = ordered_response['items']['item']
    schema_items = tf2_item_schema['items']
    bp_slots = ordered_response['num_backpack_slots']
    time_written = ctime(
        ordered_response['time_written'])  # Timestamp in ASCII form
    item_qualities = {
        v: k.title()
        for k, v in tf2_item_schema['qualities'].items()
    }  # Reverse dict to make searching easier
    item_origins = {
        each['origin']: each['name']
        for each in tf2_item_schema['originNames']
    }  # Map origin number to name
    req = [
        ('image_url', 'Image'),
        ('name', 'Name'),
        ('level', 'Level'),
        ('defindex', 'Identifier'),
        ('flag_cannot_trade', 'Tradeable?'),
        ('flag_cannot_craft', 'Craftable?'),
        ('quantity', 'Quantity'),
        ('quality', 'Quality'),
        ('origin', 'Origin'),
        ('id', 'ID'),
        ('original_id', 'Original ID'),
        ('custom_name', 'Custom Name'),
        ('custom_desc', 'Custom Description'),
    ]

    # Make a dictionary mapping of item defindex to absolute index in list -> {defindex: index}
    mapping = {
        item['defindex']: schema_items.index(item)
        for item in schema_items
    }

    parsed_items = []  # Item dicts stored here

    # Get required info from each item; use mapping to find each item in JSON response
    for item in items_in_bp:
        current_item = OrderedDict()  # Empty ordered dict for each item

        # Used to find item position in schema through mapping
        current_index = mapping[item['defindex']]

        # Check if each attribute is in either the schema or the item response, and add to current_item if it is
        for pair in req:
            attr = pair[0]
            new_attr = pair[1]
            if attr in item:
                if attr == 'quality':
                    quality = item_qualities[item[attr]]
                    current_item[new_attr] = quality
                elif attr == 'origin':
                    current_item[new_attr] = item_origins[item[attr]]
                elif attr == 'flag_cannot_trade' or attr == 'flag_cannot_craft':
                    current_item[new_attr] = 'No'
                else:
                    current_item[new_attr] = item[attr]
            elif attr in schema_items[current_index]:
                current_item[new_attr] = schema_items[current_index][attr]
            else:
                if attr == 'flag_cannot_trade' or attr == 'flag_cannot_craft':
                    current_item[new_attr] = 'Yes'
                elif attr == 'custom_name' or attr == 'custom_desc':
                    current_item[new_attr] = 'None'

        parsed_items.append(current_item)  # Append each item to parsed_items

    return [parsed_items, bp_slots, time_written]
Example #58
0
 def getDevices(self):
     devices = memcache.get("user_%s_devices", self.user.user_id())
     if devices == None:
         devices = self.devices.fetch(1000)
         memcache.set("user_%s_devices" % self.user.user_id(), devices)
     return devices
Example #59
0
	def getMyPageLinkMemCache(cls, helper, link_id):
		memcache_key = 'mypagelink?tenant=' + helper._tenant + '&link_id=' + link_id
		return memcache.get(memcache_key)
Example #60
0
    def get(self, type, source_short_name, string_id, *ids):
        source_cls = models.sources.get(source_short_name)
        if not source_cls:
            self.abort(
                400, "Source type '%s' not found. Known sources: %s" %
                (source_short_name, filter(None, models.sources.keys())))

        self.source = source_cls.get_by_id(string_id)
        if not self.source:
            self.abort(
                400, 'Source %s %s not found' % (source_short_name, string_id))
        elif (self.source.status == 'disabled'
              or ('listen' not in self.source.features
                  and 'email' not in self.source.features)):
            self.abort(
                400, 'Source %s is disabled for backfeed' %
                self.source.bridgy_path())

        format = self.request.get('format', 'html')
        if format not in ('html', 'json'):
            self.abort(400,
                       'Invalid format %s, expected html or json' % format)

        for id in ids:
            if not self.VALID_ID.match(id):
                self.abort(404, 'Invalid id %s' % id)

        label = '%s:%s %s %s' % (source_short_name, string_id, type, ids)
        cache_key = 'H ' + label
        obj = memcache.get(cache_key)
        if obj and not appengine_config.DEBUG:
            logging.info('Using cached object for %s', label)
        else:
            logging.info('Fetching %s', label)
            try:
                obj = self.get_item(*ids)
            except models.DisableSource as e:
                self.abort(
                    401,
                    "Bridgy's access to your account has expired. Please visit https://brid.gy/ to refresh it!"
                )
            except ValueError as e:
                self.abort(400,
                           '%s error:\n%s' % (self.source.GR_CLASS.NAME, e))
            except Exception as e:
                # pass through all API HTTP errors if we can identify them
                code, body = util.interpret_http_exception(e)
                # temporary, trying to debug a flaky test failure
                # eg https://circleci.com/gh/snarfed/bridgy/769
                if code:
                    self.response.status_int = int(code)
                    self.response.headers['Content-Type'] = 'text/plain'
                    self.response.write('%s error:\n%s' %
                                        (self.source.GR_CLASS.NAME, body))
                    return
                else:
                    raise
            memcache.set(cache_key, obj, time=CACHE_TIME)

        if not obj:
            self.abort(404, label)

        if self.source.is_blocked(obj):
            self.abort(410, 'That user is currently blocked')

        # use https for profile pictures so we don't cause SSL mixed mode errors
        # when serving over https.
        author = obj.get('author', {})
        image = author.get('image', {})
        url = image.get('url')
        if url:
            image['url'] = util.update_scheme(url, self)

        mf2_json = microformats2.object_to_json(obj, synthesize_content=False)

        # try to include the author's silo profile url
        author = first_props(mf2_json.get('properties', {})).get('author', {})
        author_uid = first_props(author.get('properties', {})).get('uid', '')
        if author_uid:
            parsed = util.parse_tag_uri(author_uid)
            if parsed:
                silo_url = self.source.gr_source.user_url(parsed[1])
                urls = author.get('properties', {}).setdefault('url', [])
                if silo_url not in microformats2.get_string_urls(urls):
                    urls.append(silo_url)

        # write the response!
        self.response.headers['Access-Control-Allow-Origin'] = '*'
        if format == 'html':
            self.response.headers['Content-Type'] = 'text/html; charset=utf-8'
            url = obj.get('url', '')
            self.response.out.write(
                TEMPLATE.substitute({
                    'refresh':
                    (('<meta http-equiv="refresh" content="0;url=%s">' %
                      url) if url else ''),
                    'url':
                    url,
                    'body':
                    microformats2.json_to_html(mf2_json),
                    'title':
                    self.get_title(obj),
                }))
        elif format == 'json':
            self.response.headers[
                'Content-Type'] = 'application/json; charset=utf-8'
            self.response.out.write(json.dumps(mf2_json, indent=2))