def post(self): if users.is_current_user_admin(): site = memcache.get("site") if site is None: site = Site.all().get() memcache.add("site", site) if not site: site = Site() title = self.request.get('title').strip() author = self.request.get('author').strip() email = self.request.get('email').strip() url = self.request.get('url').strip().rstrip("/") description = self.request.get('description').strip() if title and author and email and re.compile(r"(?:^|\s)[-a-z0-9_.]+@(?:[-a-z0-9]+\.)+[a-z]{2,6}(?:\s|$)", re.IGNORECASE).match(email) and url and re.compile('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+').match(url): site.title = title site.author = author site.email = email site.url = url site.description = description site.put() self.redirect('/') else: self.redirect('/config') else: self.redirect('/config')
def get(self): url = self.request.get('url') url = urllib.unquote(url) # only allow urls that start with prefixes defined in URL_PREFIXES to be used if not self.isUrlAllowed(url): self.response.out.write("The URL passed can not be proxied due to security reasons.") return memcacheKey = getMemcacheKey(url) # Use memcache to store the request for CACHE_TIME proxiedContent = memcache.get(memcacheKey) proxiedContentInMemcache = True if proxiedContent is None: proxiedContentInMemcache = False try: response = urlfetch.fetch(url) except (urlfetch.Error, apiproxy_errors.Error): return self.error(404) proxiedContent = response.content if proxiedContent is None: return self.error(404) # Add the fetched content to memcache if (not proxiedContentInMemcache): memcache.add(memcacheKey,proxiedContent,CACHE_TIME) self.response.out.write(proxiedContent)
def get_all_places(cls): """ Get them all. """ all_places = memcache.get(ALL_PLACES_LOCATION_KEY) if all_places is None: all_places = cls.all() memcache.add(ALL_PLACES_LOCATION_KEY, all_places) return all_places
def search(self, **searchparams): results = memcache.get(urlencode(searchparams)) # cicabogár, ennek valójában nem itt lenne a helye if results is "tutinemezaválasz": #not None: return results else: searchurl = "http://iwiw.hu/search/pages/user/ajaxsearch.jsp?do=AdvancedSearch&page=0&" iwiwsearch = urlfetch.fetch(searchurl + urlencode(searchparams), headers={'Cookie': self.logincookie}).content leves = BeautifulSoup(iwiwsearch) cuccok = [] for kartya in leves.findAll("div", "cardContainer"): nev = kartya.findAll("a")[1].string.strip() name = nev.split("[")[0] try: nick = re.search("\[(?P<nick>.*)\]", nev).group(1) except AttributeError: nick = "" profile_url = kartya.findAll("a")[1]["href"] try: pic_popup_url = kartya.find("a", "user_image")["onclick"].split("'")[1] except KeyError: pic_popup_url = "" try: pic_thumbnail = kartya.find("a", "user_image").img["src"] except KeyError: pic_thumbnail = "" try: city = kartya.find("div", "city").string.strip() except AttributeError: city = "" tutu = {"name": name, "nick": nick, "profile_url": profile_url, "pic_popup_url": pic_popup_url, "pic_thumbnail": pic_thumbnail, "city": city} cuccok.append(tutu) memcache.add(urlencode(searchparams), cuccok) return cuccok
def get_service_account_key_json(self, user_info): """ Get a service account key json to access objects protected by fence implementation: first see if there is a service account for the user id in memcache, if so, return it else, lookup who the user really is in sam and see if there is a service account for the real user in data store if so, put it in memcache under the passed in user and return it else, initiate a lock for the real user, fetch a service account from fence, put it in datastore, put it in memcache :param user_info: :return: fence service account key_json """ key_json = memcache.get(namespace=self.provider_name, key=user_info.id) if key_json is None: real_user_info = self._fetch_real_user_info(user_info) fsa_key = self._fence_service_account_key(real_user_info[SamKeys.USER_ID_KEY]) fence_service_account = fsa_key.get() now = datetime.datetime.now() if fence_service_account is None or \ fence_service_account.expires_at is None or \ fence_service_account.expires_at < now: fence_service_account = self._fetch_service_account(real_user_info, fsa_key) key_json = fence_service_account.key_json seconds_to_expire = (fence_service_account.expires_at - now).total_seconds() memcache.add(namespace=self.provider_name, key=user_info.id, value=key_json, time=seconds_to_expire) return key_json
def get(cls): key = 'PetType' data = memcache.get(key) if data is None: data = cls.query().fetch() memcache.add(key, data, 3600) return [row.to_dict() for row in data]
def resrobot(query): json = memcache.get('resrobot_'+query) if json is None: key = 'UacUcP0MlG9fZ0j82r1k5he6KXQ6koSS' url = 'https://api.trafiklab.se/samtrafiken/resrobot/FindLocation.json' url += '?key=' + key + '&from=' + urllib2.quote(query) + '&coordSys=RT90&apiVersion=2.1' print url response = urllib2.urlopen(url) the_page = response.read() jsondata = loads(the_page) stations = jsondata['findlocationresult']['from']['location'] data = [] if not isinstance(stations, list): stat = {} stat['id'] = stations['locationid'] stat['name'] = stations['displayname'] data.append(stat) else: for s in stations: stat = {} stat['id'] = s['locationid'] stat['name'] = s['displayname'] data.append(stat) json = loads('{"result":'+JSONEncoder().encode(data)+'}') memcache.add('resrobot_'+query, json) return json
def __init__(self): self.sid = None self.key = None self.session = None string_cookie = os.environ.get('HTTP_COOKIE', '') self.cookie = Cookie.SimpleCookie() self.cookie.load(string_cookie) # check for existing cookie if self.cookie.get(COOKIE_NAME): self.sid = self.cookie[COOKIE_NAME].value self.key = "session-" + self.sid self.session = memcache.get(self.key) if self.session is None: logging.info("Invalidating session "+self.sid) self.sid = None self.key = None if self.session is None: self.sid = str(random.random())[5:]+str(random.random())[5:] self.key = "session-" + self.sid logging.info("Creating session "+self.key); self.session = dict() memcache.add(self.key, self.session, 3600) self.cookie[COOKIE_NAME] = self.sid self.cookie[COOKIE_NAME]['path'] = DEFAULT_COOKIE_PATH # Send the Cookie header to the browser print self.cookie
def _handle_login(self, username=None, password=None, model=None): "generate auth token and register session." if (username is None and password is None) and model is None: self.api_fail() return # Generate session token teacher_token = uuid.uuid1().bytes.encode('base64')\ .rstrip('=\n').replace('/', '_').replace('+', "P") if model is not None: teacher_dict = model.get_dict() else: try: teacher = TeacherModel.all().filter("username ="******"password ="******"teachers") resp = {'api':'ok', 'teacher':teacher_dict, 'token':teacher_token } self.response.out.write(json.dumps(resp))
def auth_login_with_third_party(self, auth_id=None, login_id=None, remember=False, **kwargs): """Called to authenticate the user after a third party confirmed authentication. :param login_id: Authentication id, generally a combination of service name and user identifier for the service, e.g.: 'twitter:john'. :param remember: True if authentication should be persisted even if user leaves the current session (the "remember me" feature). :return: ``None``. This always authenticates the user. """ # Load user entity. user = self.auth_get_user_entity(auth_id=auth_id, login_id=login_id) if user: # Set current user from datastore. self.auth_set_session(user.auth_id, user.session_id, remember)#, **kwargs) else: # Simply set a session; user will be created later if required. # put args in memcache data = memcache.get(login_id) if data is None: memcache.add(login_id, kwargs, 3600) else: memcache.set(login_id, kwargs, 3600) self.auth_set_session(auth_id, remember=remember, login_id=login_id)#, **kwargs) return user
def fetch_trains(place_from, place_to, date): key = 'trains_' + place_from + '_' + place_to + '_' + str(date) data = memcache.get(key) #@UndefinedVariable if data != None: return data params = {'fromName': place_from, 'toName': place_to, 'when': utils.date_serialize(date), 'search_type': 'suburban'} url = 'http://m.rasp.yandex.ru/search?' + urllib.urlencode(params) response = urlfetch.fetch(url) html = response.content soup = BeautifulSoup(html) list_node = soup.find("ul", { "class" : "b-holster b-search-result" }) if list_node != None: regex = re.compile(r'<.*?>') b_nodes = list_node.findAll("b") result = [] for b_node in b_nodes: data = regex.split(b_node.renderContents()) try: time = [datetime.datetime.strptime(x, '%H:%M').time() for x in data] result.append(TrainTiming(time[0], time[1])) except: pass memcache.add(key, result, 60*60) #@UndefinedVariable return result
def get_doc(id): if googleUser.is_current_user_admin(): memcache.delete(id) entry = memcache.get(id) if not entry: client = gdata.docs.client.DocsClient(source='yourCo-yourAppName-v1') client.ssl = True # Force all API requests through HTTPS client.http_client.debug = True # Set to True for debugging HTTP requests client.ClientLogin(settings.DOCS_EMAIL, settings.DOCS_PASS, client.source) entry = client.GetFileContent( '/feeds/download/documents/Export?id=%s&format=html' % id) memcache.add(id, entry) html = BeautifulStoneSoup(entry, convertEntities=BeautifulStoneSoup.HTML_ENTITIES) body = html.body.renderContents() style = html.style.prettify() return { 'entry': entry, 'title': html.head.title.text, 'html': html, 'body': body.replace('http:///', '/'), 'style': style, 'id': id, }
def get(session): ''' Uses memcache or the datastore to fetch a single setting by wave_id, wavelet_id and email. Ideally this should fetch session only if needed but this wouldn't work with the appengine transaction framework. @transaction_safe @param session: the parent session object @return the setting using the most efficient means possible or None if it couldn't be found ''' if not session: return None key = base64.b64encode( memcacheConfig.PREFIX['SETTINGS'] + session.wave_id + session.wavelet_id + session.email) setting = memcache.get(key) if not setting == None: return setting else: query = Settings.all() query.ancestor(session) setting = query.get() memcache.add(key, setting, time=memcacheConfig.DEFAULT_EXPIRE_SECS) return setting
def get(self): site = memcache.get("site") if site is None: site = Site.all().get() memcache.add("site", site) if not site: self.redirect('/config') else: site.url = site.url.rstrip("/") posts = memcache.get("posts") if posts is None: query = Post.gql('ORDER BY time DESC, __key__ ASC') posts = query.fetch(PAGESIZE) for item in posts: item.content = re.sub(ur'<code(?P<index>.*)>(?P<content>[\s\S]*)</code(?P=index)>', lambda m: '<code>' + cgi.escape(m.group('content')) + '</code>', item.content) item.time += timedelta(hours=+8) template_values = { 'site': site, 'posts': posts } path = os.path.join(os.path.dirname(__file__), 'template/feed.xml') self.response.headers['Content-type'] = 'application/xml;charset=UTF-8' self.response.out.write(template.render(path, template_values))
def get(self): key = self.request.get('key') site = memcache.get("site") if site is None: site = Site.all().get() memcache.add("site", site) if not site: self.redirect('/config') else: if users.is_current_user_admin() and key: try: comment = Comment.all().order("-__key__").filter('__key__ =', db.Key(key)).get() if comment: template_values = { 'site': site, 'comment': comment, 'user': users.get_current_user(), 'admin': True, 'login': users.create_login_url(self.request.uri), 'logout': users.create_logout_url(self.request.uri) } if ismobile(self): template_values['mobile'] = True path = os.path.join(os.path.dirname(__file__), 'template/editcomment.html') self.response.out.write(template.render(path, template_values)) else: self.redirect('/') except: self.redirect('/') else: self.redirect('/')
def get(self): site = memcache.get("site") if site is None: site = Site.all().get() memcache.add("site", site) if not site: self.redirect('/config') else: if users.is_current_user_admin(): template_values = { 'site': site, 'user': users.get_current_user(), 'login': users.create_login_url(self.request.uri), 'logout': users.create_logout_url(self.request.uri), 'admin': True, 'new': True } if ismobile(self): template_values['mobile'] = True path = os.path.join(os.path.dirname(__file__), 'template/newpost.html') self.response.out.write(template.render(path, template_values)) else: self.redirect('/')
def get(self): site = memcache.get("site") if site is None: site = Site.all().get() memcache.add("site", site) if not site: self.redirect('/config') else: template_values = { 'site': site, 'page': 'about', 'recentcomments': getrecentcomments(PAGESIZE) } user = users.get_current_user() if user: template_values['logout'] = users.create_logout_url(self.request.uri) template_values['user'] = users.get_current_user() if users.is_current_user_admin(): template_values['admin'] = True else: template_values['login'] = users.create_login_url(self.request.uri) if ismobile(self): template_values['mobile'] = True path = os.path.join(os.path.dirname(__file__), 'template/about.html') self.response.out.write(template.render(path, template_values))
def get(self): name = self.request.get('nickname') cacheKey = 'interests' + name userinfo = memcache.get(cacheKey) if userinfo is not None: template = jinja_environment.get_template('/viewUser.html') self.response.out.write(template.render(json.loads(userinfo))) return q = AppUser.query(AppUser.nickname==name) logging.info(q) if q.count() > 0: logging.info("q count is larger than 0"); u = q.get() user_info = { 'nickname': u.nickname, 'musics' : u.musics, 'movies' : u.movies, 'games' : u.games, 'sports' : u.sports } info_str = json.dumps(user_info, separators=(',', ':')) memcache.add(cacheKey, info_str, 1800) template = jinja_environment.get_template('/viewUser.html') self.response.out.write(template.render(user_info)) else: self.response.write('error')
def userHandlerGet(name): cacheKey = 'interests' + name interests = memcache.get(cacheKey) retVal = "" if interests is not None: retVal = interests else: q = AppUser.query(AppUser.nickname==name) if q.count() > 0: logging.info("q count is larger than 0"); u = q.get() user_info = { "nickname:": u.nickname, "musics" : u.musics, "movies" : u.movies, "games" : u.games, "sports" : u.sports } info_str = json.dumps(user_info, separators=(',', ':')) memcache.add(cacheKey, info_str, 1800) retVal = info_str else: retVal = 'error' return retVal
def deurlify(environment, url): value = memcache.get(url) if value is None: value = url.replace("_", " ") memcache.add(url, value, 25920000) memcache.add(value, url, 25920000) return value
def getPicks(event_id): picks = memcache.get('picks'+event_id) if not picks: picks_query = Pick.all().ancestor(event_key(event_id)).order('pick_no') picks = picks_query.fetch(25) memcache.add('picks'+event_id, picks) return picks
def get(self): self.response.headers["Content-Type"] = "application/json; charset=utf-8" cache = memcache.get("dashboard") if cache: self.response.out.write(cache) return webkitTrunk = Branch.get_by_key_name("webkit-trunk") # FIXME: Determine popular branches, platforms, and tests dashboard = { "defaultBranch": "WebKit trunk", "branchToId": {webkitTrunk.name: webkitTrunk.id}, "platformToId": {}, "testToId": {}, } for platform in Platform.all(): dashboard["platformToId"][platform.name] = platform.id for test in Test.all(): dashboard["testToId"][test.name] = test.id result = json.dumps(dashboard) self.response.out.write(result) memcache.add("dashboard", result)
def getConversionActs(self): acts = [] cacheKey = "wrookMemberConversionActs-%s" % self.key() acts = memcache.get(cacheKey) if acts == None: acts = [] """ acts.append(MemberConversionAct( "uploadedProfilePhoto", _("Upload a profile photo"), _("You uploaded a profile photo"), 1, (self.ProfilePhoto != None))) acts.append(MemberConversionAct( "startedReadingABook", _("Start reading a book"), _("You started reading a book"), 1, (len(self.Bookmarks.fetch(1))>0))) acts.append(MemberConversionAct( "wroteAboutHimself", _("Say a few words about yourself in your profile"), _("You said a few things about yourself"), 1, (self.About != None))) acts.append(MemberConversionAct( "inviteSomeone", _("Invite a friend to join Wrook"), _("You invited a friend to join Wrook"), 1, (len(self.SentInvites.fetch(1))>0))) """ memcache.add(cacheKey, acts) return acts
def get(self, username, album_name): feed_photos = self.getPhotoFeed(username,album_name) key_albums = "albums_"+ username try: feed_albums = memcache.get(key_albums) except Exception: feed_albums = None if not feed_albums: gd_client = gdata.photos.service.PhotosService() feed_albums = gd_client.GetUserFeed(user=username) memcache.add(key=key_albums, value=feed_albums, time=3600) album = None for album_ in feed_albums.entry: if album_.name.text == album_name: album = album_ break template_values = { 'photos': feed_photos.entry, 'album': album, 'username':username, 'album_name':album_name, } self.generate('album_view.html',template_values)
def get_form_token(request): token = FormToken() token.ip = request.META.get('REMOTE_ADDR') token.time = time.time() key = common_utils.get_random_identifier(12, 'FTok') memcache.add(key, token, WEB_EXPIRATION_MILLISECONDS) return key
def saveReview(self, username, date, review, movie): f = Review(username = username, date = date, review = review, movie = movie) f.put() #add the username/date/review/movie to the database randomNumber = random.randint(1,100000) cached_reviews_key = username + "_" + str(randomNumber) #create a key for the review cached_reviews_value = [username, date, review, movie] cached_allReviews_dict = memcache.get("allReviews") if cached_allReviews_dict is not None: #if this dictionary exists cached_reviews_dict = cached_allReviews_dict.get(username) #get the cached dictionary of this user's reviews if cached_reviews_dict is not None: #if this dictionary exists cached_reviews_dict[cached_reviews_key] = cached_reviews_value #add the key/value pair to the dictionary cached_allReviews_dict[username] = cached_reviews_dict memcache.set("allReviews", cached_allReviews_dict) self.redirect("/") else: #if this is the user's first review cached_reviews_dict = {} cached_reviews_dict[cached_reviews_key] = cached_reviews_value cached_allReviews_dict[username] = cached_reviews_dict memcache.set("allReviews", cached_allReviews_dict) self.redirect("/") else: #creating cached_allReviews_dict for first time cached_reviews_dict = {} cached_reviews_dict[cached_reviews_key] = cached_reviews_value cached_allReviews_dict = {} cached_allReviews_dict[username] = cached_reviews_dict memcache.add("allReviews", cached_allReviews_dict) self.redirect("/")
def saveFavorite(self, username, favoriteMovie, favoriteGenre): f = Favorite(username = username, movie = favoriteMovie, genre = favoriteGenre) f.put() #add the username/movie/genre to the database randomNumber = random.randint(1,100000) cached_favorite_key = username + "_" + str(randomNumber) #create a key for the favorite cached_favorite_value = [username, favoriteMovie, favoriteGenre] cached_allFavorites_dict = memcache.get("allFavorites") if cached_allFavorites_dict is not None: #if this dictionary exists cached_favorites_dict = cached_allFavorites_dict.get(username) #get the cached dictionary of this user's favorites if cached_favorites_dict is not None: #if this dictionary exists cached_favorites_dict[cached_favorite_key] = cached_favorite_value #add the key/value pair to the dictionary cached_allFavorites_dict[username] = cached_favorites_dict memcache.set("allFavorites", cached_allFavorites_dict) self.redirect("/") else: #if this is the user's first favorite cached_favorites_dict = {} cached_favorites_dict[cached_favorite_key] = cached_favorite_value cached_allFavorites_dict[username] = cached_favorites_dict memcache.set("allFavorites", cached_allFavorites_dict) self.redirect("/") else: #creating cached_allFavorites_dict for first time cached_favorites_dict = {} cached_favorites_dict[cached_favorite_key] = cached_favorite_value cached_allFavorites_dict = {} cached_allFavorites_dict[username] = cached_favorites_dict memcache.add("allFavorites", cached_allFavorites_dict) self.redirect("/")
def fetch_and_cache(self, mirror_url): if url_tools.is_absolute_url(mirror_url): self.record_last_host = url_tools.get_host_from_url(mirror_url) else: mirror_url = url_tools.join(HTTP_PREFIX, self.record_last_host, mirror_url) host_name = url_tools.get_host_from_url(mirror_url) # http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html 8.1.3 Proxy Servers adjusted_headers = dict(self.request.headers) adjusted_headers['Connection'] = 'close' logging.debug("request headers '%s' of url: '%s'", url_tools.dict_to_s(adjusted_headers), mirror_url) try: # fetch the requested url for attempt in range(FETCH_ATTEMPTS): response = urlfetch.fetch(mirror_url, self.request.body, self.method, adjusted_headers) logging.info('url fetch attempt %d for "%s" successful', attempt + 1, mirror_url) break except urlfetch.Error: exception_type = sys.exc_info()[0] logging.error('url fetch exception "%s" for "%s"', str(exception_type), mirror_url) return None transform_response = transform.ResponseTransformer(mirror_url, response) # cache the transformed entity and return mirror_content = MirrorEntity(mirror_url, host_name, transform_response.status_code, transform_response.headers, transform_response.content) memcache.add(mirror_url, mirror_content, config.EXPIRATION_RATE_S) return mirror_content
def get_access_token(scopes, service_account_id=None): """OAuth2 access token to act on behalf of the application, cached. Generates and caches an OAuth2 access token for the service account for the appengine application. Each application has an associated Google account. This function returns OAuth2 access token corresponding to the running app. Access tokens are safe to cache and reuse until their expiry time as returned. This method will do that using memcache. Args: scopes: The requested API scope string, or a list of strings. Returns: Pair, Access token (string) and expiration time (seconds since the epoch). """ memcache_key = _MEMCACHE_KEY_PREFIX + str(scopes) if service_account_id: memcache_key += ',%s' % service_account_id memcache_value = memcache.get(memcache_key, namespace=_MEMCACHE_NAMESPACE) if memcache_value: access_token, expires_at = memcache_value else: access_token, expires_at = get_access_token_uncached( scopes, service_account_id=service_account_id) memcache.add(memcache_key, (access_token, expires_at), expires_at - 300, namespace=_MEMCACHE_NAMESPACE) return access_token, expires_at
def post(self): url = "https://raw.githubusercontent.com/avelino/vim-bootstrap/master/" langs = {"bundle": {}, "vim": {}} for l in self.request.POST.getall('langs'): data = memcache.get('vim-{}'.format(l)) if not data: langs["bundle"][l] = requests.get( "{0}vim_template/langs/{1}/{1}.bundle".format(url, l)).text langs["vim"][l] = requests.get( "{0}vim_template/langs/{1}/{1}.vim".format(url, l)).text memcache.add('vim-{}'.format(l), {'vim': langs['vim'][l], 'bundle': langs['bundle'][l]}, 3600) else: langs["bundle"][l] = data['bundle'] langs["vim"][l] = data['vim'] template = Template( requests.get("{}vim_template/vimrc".format(url)).text) self.response.headers['Content-Type'] = 'application/text' self.response.headers['Content-Disposition'] = "attachment; " self.response.headers['Content-Disposition'] += "filename=.vimrc" self.response.out.write(template.render(**langs))
def Create(self, track, uid1, uid2, name, name2, image, image2, are_they_friend): """ Parameters track - id of the race track uid1 - user id for player 1, could be fbid or uuid uid2 - user id for player 2, could be fbid or uuid """ challenge = None challenges = Challenge.all()\ .filter('uid1 =', uid1) \ .filter('uid2 =', uid2) \ .ancestor(db.Key.from_path('Challenge',config.db['challengedb_name'])) \ .fetch(1) if len(challenges) > 0: challenge = challenges[0] else: challenges = Challenge.all()\ .filter('uid1 =', uid2) \ .filter('uid2 =', uid1) \ .ancestor(db.Key.from_path('Challenge',config.db['challengedb_name'])) \ .fetch(1) if len(challenges) > 0: challenge = challenges[0] if challenge is None: challenge = Challenge(parent=db.Key.from_path( 'Challenge', config.db['challengedb_name'])) challenge.id = Utils.genanyid(self, 'c') if challenge is not None: challenge.track = track challenge.uid1 = uid1 challenge.uid2 = uid2 challenge.state = CHALLENGE_TYPE.OPEN_GAME challenge.data = '{' challenge.data += '"player1":{"player": {"id":"' + uid1 + '"},"name":"' + name + '", "image":"' + image + '"},' challenge.data += '"player2":{"player": {"id":"' + uid2 + '"},"name":"' + name2 + '", "image":"' + image2 + '"},' challenge.data += '"friend":' + str(are_they_friend).lower() + ',' challenge.data += '"result":{"winner":"pending","player1_seen":false,"player2_seen":false}' challenge.data += '}' if challenge.put(): if not memcache.add( config.db['challengedb_name'] + '.' + challenge.id, challenge, config.memcache['holdtime']): logging.warning( 'Challenge - Set memcache for challenge by Id failed!') return challenge
def ManufacturerPidCount(self): """Return the number of manufacturer PIDs.""" manufacturer_pids = memcache.get( memcache_keys.MANUFACTURER_PID_COUNT_KEY) if manufacturer_pids is None: manufacturer_pids = 0 for pid in Pid.all(): if pid.manufacturer.esta_id != self.ESTA_ID: manufacturer_pids += 1 if not memcache.add(memcache_keys.MANUFACTURER_PID_COUNT_KEY, manufacturer_pids): logging.error("Memcache set failed.") return manufacturer_pids
def post(self): self.response.headers['Content-Type'] = 'application/json' customer = Customer.get_by_email(users.get_current_user().email()) success = False if customer.verified: success = True else: verification_code = self.request.get('verification_code') if customer.verification_hash == verification_code.strip().upper(): customer.verified = True customer.put() memcache.add(customer.phone_number, customer, 60 * 60) success = True to_return = { 'verified': success, } self.response.out.write(json.dumps(to_return))
def main(): # Check if defaults have been installed installed_defaults = memcache.get("installed_defaults") if installed_defaults is None: installed_defaults = Setting.all().filter('name = ', 'installed_defaults').get() if installed_defaults is None: logging.info("Installing default statuses") Status.install_defaults() if not memcache.add("installed_defaults", True): logging.error("Memcache set failed.") application = webapp.WSGIApplication(ROUTES, debug=config.DEBUG) wsgiref.handlers.CGIHandler().run(application)
def getTopDiputadosIniciativas(self, limit, tipo): key = "top_%s_%d" % (tipo, limit) results = memcache.get(key) if results is None: q = Diputado.all() q.order("-%s" % tipo) results = q.fetch(limit) if not memcache.add(key, results): logging.error("Memcache set failed.") else: logging.info("using cache in gettop") return results
def get_data(user): """Get data from the datastore only if we don't have it cached""" key = user + "_photos" data = memcache.get(key) if data is not None: logging.info("Found in cache") return data else: logging.info("Cache miss") ancestor_key = ndb.Key("User", user) data = Photo.query_user(ancestor_key).fetch(100) if not memcache.add(key, data, 3600): logging.info("Memcache failed") return data
def xsrf_secret_key(): """Return the secret key for use for XSRF protection. If the Site entity does not have a secret key, this method will also create one and persist it. Returns: The secret key. """ secret = memcache.get(XSRF_MEMCACHE_ID, namespace=oauth2clientpatch_NAMESPACE) if not secret: # Load the one and only instance of SiteXsrfSecretKey. model = SiteXsrfSecretKey.get_or_insert(key_name='site') if not model.secret: model.secret = _generate_new_xsrf_secret_key() model.put() secret = model.secret memcache.add(XSRF_MEMCACHE_ID, secret, namespace=oauth2clientpatch_NAMESPACE) return str(secret)
def season(self, request): theCache = memcache.get('seasons') if theCache is None: t = Team() seasons = [] for team in t.getSeasons(): season = Season(season=team.season) if season not in seasons: seasons.append(season) if not memcache.add('seasons', seasons): logging.error('memcache failed to set') return SeasonResponse(seasons=seasons) else: return SeasonResponse(seasons=theCache)
def download_small(request, key, name): """ Отдаем картинку из БД по ключу и имени файла получаемые из запроса по url'у""" file = memcache.get("small_" + key) if file is not None: current_time = datetime.datetime.utcnow() response = HttpResponse() last_modified = current_time - datetime.timedelta(days=1) response['Content-Type'] = 'image/jpg' response['Last-Modified'] = last_modified.strftime( '%a, %d %b %Y %H:%M:%S GMT') response['Expires'] = current_time + datetime.timedelta(days=30) response['Cache-Control'] = 'public, max-age=315360000' response['Date'] = current_time response.content = file return response else: file = Picture.get(db.Key(key)) memcache.add("small_" + key, file.data) if file.name != name: raise Http404('Could not find file with this name!') return HttpResponse(file.data_small, content_type='image/png', mimetype='image/png')
def get_modtime(config, name): """Retrieve the value for a given sharded maximum. Args: name: The name of the counter. Returns: Integer; the maximum of all sharded maximums for the given maximum name. """ mm = memcache.get(name) if mm is None: all_keys = ShardConfig.all_keys_config(config, name) for value in ndb.get_multi(all_keys): if value is not None: if mm != None: mm = value else: mm = modtime_max(mm, value) memcache.add(name, mm, 60) return (mm.mtime_sec, mm.mtime_nsec)
def render_template_with_csrf(self, view_filename, params=None): if not params: params = {} cookie_law = self.request.cookies.get("cookie_law") if cookie_law: params["cookies"] = True user = users.get_current_user() if user: if users.is_current_user_admin(): user.admin = True params["user"] = user params["logout_url"] = users.create_logout_url('/') else: params["login_url"] = users.create_login_url('/') csrf_token = str(uuid.uuid4()) # convert UUID to string memcache.add(key=csrf_token, value=True, time=600) params["csrf_token"] = csrf_token template = jinja_env.get_template(view_filename) return self.response.out.write(template.render(params))
def _GetFromAPI(self, key): memcache_key = '____/%s/%s/samples.json' % (self._channel, key) cached = memcache.get(memcache_key) if cached is not None: logging.info('Serving samples of %s from memcache' % key) return cached.get('data') else: logging.info('Fetching samples of %s' % key) url = ('https://' + url_constants.SLAVE_SAMPLES_APP_ID + '.appspot.com/' + self._channel + '/' + url_constants.SLAVE_SAMPLES_API_BASE_URL + key) logging.info('Fetching %s' % url) result = urlfetch.fetch(url) logging.info('Result code of the fetch: %s' % result.status_code) if result.status_code == 200: cached = { 'data': json.loads(result.content), } memcache.add(key = memcache_key, value = cached, time = 60 * 60 * 12) return cached.get('data') else: return []
def get_by_name(cls, product_name): """Get a review entity by the product name.""" keys = "product_name|{}".format(product_name) # Search the memcache using the keys reviews = memcache.get(keys) # If key points excess in memcache, then return key points. if reviews is not None: return reviews # Else it will search database then return key points and add to memcache else: reviews = Review.query(Review.product_name == product_name).fetch(10) if not memcache.add(keys,reviews): logging.error('Memcache set failed.') return reviews
def render_template(self, view_filename, params=None): if not params: params = {} if self.request.cookies.get("sprejel-piskotek") == "DA": params["piskotek"] = True uporabnik = users.get_current_user() if uporabnik: #if uporabnik.is_current_user_admin(): # uporabnik.admin = True #params["uporabnik"] = uporabnik params["url_odjave"] = users.create_logout_url("/") else: params["url_prijave"] = users.create_login_url("/") params["uporabnik"] = uporabnik params["csrf_zeton"] = str(uuid.uuid4()) memcache.add(params["csrf_zeton"], True, 60 * 10) template = jinja_env.get_template(view_filename) return self.response.out.write(template.render(params))
def get(email): """ Get marketplace model by user email. """ # Set memcache key memcache_key = 'email.marktplace.%s' % email # Verify if user marktplace is cached marketplace_cached = memcache.get(memcache_key) # and return cached marketplace if marketplace_cached is not None: logging.debug("Marketplace cached for %s user. Return it.", email) return marketplace_cached # else, query or add new one... # Create user key as parent user_key = user.user_key(email) # Select user marketplace marketplaceModel = MarketplaceModel.query(ancestor=user_key).get() # If not present, create new one if marketplaceModel is None: marketplaceModel = put(email=email, name='Nova Loja', user_key=user_key) logging.debug("Marketplace create sucessfully for %s user", email) # Add to memcache memcache.add(key=memcache_key, value=marketplaceModel, time=3600 * 24) logging.debug("Marketplace cached for %s user", email) # Return marketplace return marketplaceModel
def getaccess(): user = UserPrefs.current() breadcrumbs = [{'link': '/', 'text': 'Hem'}] baselink = "/getaccess/" section_title = "Access" breadcrumbs.append({'link': baselink, 'text': section_title}) if request.method == "POST": # anti-spam protection, the user can only ask once. user_request_access_key = "request_access_" + user.getemail() if memcache.get(user_request_access_key) is not None: logging.warning("User is spamming req-access:" + user.getemail()) return "denied", 403 memcache.add(user_request_access_key, True) sgroup = None if len(request.form.get('scoutgroup')) != 0: sgroup = ndb.Key(urlsafe=request.form.get('scoutgroup')).get() if sgroup is not None: groupAdminEmails = UserPrefs.getAllGroupAdminEmails(sgroup.key) if len(groupAdminEmails) > 0: mail.send_mail( sender="*****@*****.**", to=','.join(groupAdminEmails), subject= u"""Användaren: {} vill ha access till närvaroregistrering i Skojjt för scoutkåren {}""".format(user.getemail(), sgroup.getname()), body=u"""Gå till {} för att lägga till {}""".format( request.host_url + "groupaccess/", user.getname())) return redirect('/') else: return render_template('getaccess.html', baselink=baselink, breadcrumbs=breadcrumbs, scoutgroups=ScoutGroup.query().fetch())
def get_sums_month_avg(year, month): n = C_SUMS_MONTH_AVG % (year, month) data = memcache.get(n) if not data: d = datetime.date(year, month, 1) if month == 12: d2 = datetime.date(year + 1, 1, 1) else: d2 = datetime.date(year, month + 1, 1) inds = ['PB', 'PC', 'PBM', 'PS', 'OL', 'LM', 'NP', 'Con'] data = dict([(i, 0) for i in inds]) duplas = 0 wks = 0 for ws in models.WeekSum.all().filter('weekdate >=', d).filter('weekdate <', d2).fetch(100): for i in inds: data[i] += getattr(ws, i) duplas += ws.duplas wks += 1 m = float(duplas) for k, v in data.iteritems(): data[k] /= m data['LI'] = data['OL'] + data['LM'] data['duplas'] = duplas data['weeks'] = wks memcache.add(n, data) return data
def do_set_search(self): memcache_key = "set_search." + self.request.query cached_response = memcache.get(memcache_key) if cached_response is not None: self.response.out.write(cached_response) return query = models.TestSet.all() query.order("-date") # Apply filters. networks = self.request.get("networks_filter") if networks: query.filter("network IN ", [ db.Key.from_path('Network', int(k)) for k in set(networks.split(",")) ]) versions = self.request.get("version_filter") if versions: query.filter("version IN ", [ db.Key.from_path('Version', int(k)) for k in set(versions.split(",")) ]) cpus = self.request.get("cpus_filter") if cpus: query.filter("cpu IN ", [ db.Key.from_path('Cpu', int(k)) for k in set(cpus.split(",")) ]) if self.request.get("set_id"): test_set = models.TestSet.get_by_id(int( self.request.get("set_id"))) results = test_set.summaries results = query.fetch(500) response = json.encode(results) memcache.add(memcache_key, response, 30) # Cache for 30secs self.response.out.write(response)
def writeDefault(self, fromCache = False): inf("Writing default") self.writeHeaders() if not fromCache: newFavicon = favIcon( domain = self.targetDomain, icon = None, useDefault = True, referrer = self.request.headers.get("Referer") ) newFavicon.put() memcache.add("icon-" + self.targetDomain, "DEFAULT", ) counter.ChangeCount("favIconsServedDefault",1) if self.request.get("defaulticon"): if self.request.get("defaulticon") == "none": self.response.set_status(204) elif self.request.get("defaulticon") == "1pxgif": self.response.out.write(open("1px.gif").read()) else: self.redirect(self.request.get("defaulticon")) else: self.response.out.write(open("default.gif").read())
def get_index(request): more_entries = None more_type = None email = None login_url = None valid_user = None mem_key = "entry index" object_list = memcache.get(mem_key) #This filters out any entries listed as private if object_list is None: q = Entry.all() object_list = q.filter('private =', False) object_list = q.order('-publish_date') memcache.add(mem_key, object_list, 3600) status = get_blob_status() q = Country.all() country_list = q.order("title") #This allows the template filter to show any entry valid_user = True return render_to_response("blog/entry_detail.html", { 'object': object_list[0], 'valid_user': valid_user, 'login_url': login_url, 'email': email, 'more_entries': object_list[:5], 'more_type': "Recent Entries", 'country_list': country_list, 'status': status, }, context_instance=RequestContext(request))
def get(self): key = self.request.get('key') site = memcache.get("site") if site is None: site = Site.all().get() memcache.add("site", site) if not site: self.redirect('/config') else: if users.is_current_user_admin() and key: try: post = Post.all().order("-__key__").filter( '__key__ =', db.Key(key)).get() if post: template_values = { 'site': site, 'post': post, 'user': users.get_current_user(), 'login': users.create_login_url(self.request.uri), 'logout': users.create_logout_url(self.request.uri), 'admin': True } if ismobile(self): template_values['mobile'] = True path = os.path.join(os.path.dirname(__file__), 'template/editpost.html') self.response.out.write( template.render(path, template_values)) else: self.redirect('/') except: self.redirect('/') else: self.redirect('/')
def generate(): editor_name = request.POST.get("editor", "vim") editor = editors.get_editor(editor_name) ctx = {"bundle": {}, "vim": {}, "editor": editor} select_lang = request.POST.getall('langs') for l in select_lang: data = memcache.get('vim-{}'.format(l)) if not data: cache = {} for ext in ["bundle", "vim"]: with open("./vim_template/langs/{0}/{0}.{1}".format(l, ext)) as f: cache[ext] = ctx[ext][l] = f.read() memcache.add('vim-{}'.format(l), cache, 3600) else: ctx["bundle"][l] = data['bundle'] ctx["vim"][l] = data['vim'] template = None with open("./vim_template/vimrc") as f: template = Template(f.read().decode('utf-8')) if not template: template = Template("") response.headers['Content-Type'] = 'application/text' response.headers['Content-Disposition'] = 'attachment; \ filename='.format(os.path.basename(editor.name)) ctx['select_lang'] = ",".join(select_lang) def sh_exist(lang): return os.path.isfile("./vim_template/langs/{0}/{0}.sh".format(lang)) ctx['sh_exist'] = sh_exist return template.render(**ctx)
def require_user_info(self, request_state, token_info_fn=_token_info): """Get the user's info from cache or from google if not in cache, throwing unauthorized errors as appropriate :param request_state: self.request_state from a class extending protorpc.remote.Service :param token_info_fn: function to get token info, defaults to google http request, override for testing one parameter, the token string, returns json token info (see https://www.googleapis.com/oauth2/v1/tokeninfo) :return: UserInfo instance """ auth_header = request_state.headers.get('Authorization') if auth_header is None: raise endpoints.UnauthorizedException( message='Request missing Authorization header.') auth_header_parts = auth_header.split() if len(auth_header_parts) != 2 or auth_header_parts[0].lower( ) != 'bearer': raise endpoints.UnauthorizedException( message= 'Malformed Authorization header, must be in the form of "bearer [token]".' ) token = auth_header_parts[1] user_info = memcache.get(key='access_token:' + token) if user_info is None: user_info = self._fetch_user_info(token, token_info_fn) # cache for 10 minutes or until token expires expires_in = min( [user_info.expires_in, self.config.max_token_life]) logging.debug("caching token %s for %s seconds", token, expires_in) memcache.add(key='access_token:' + token, value=user_info, time=expires_in) else: logging.debug("auth token cache hit for token %s", token) return user_info
def getChartData(self): chartData = memcache.get("chartData") if chartData is not None: logging.debug("chartData found in memcache") return chartData else: orderbookQuery = Orderbook.all() orderbook = orderbookQuery.order("-created").get() if orderbook: entries = {"asks": [], "bids": []} for entryType in ["asks", "bids"]: keys = getattr(orderbook, entryType) entries[entryType] = Order.get(keys) accumvolume = 0 for idx, entry in enumerate(entries[entryType]): accumvolume += entry.amount setattr(entries[entryType][idx], "accumvolume", accumvolume) else: logging.error("Couldn't get orderbook from DB") tradeQuery = Trade.all() trades = [] for trade in tradeQuery.order("-executed").run(limit=100): trades.append(trade) if trades: accumvolume = 0 for idx, trade in enumerate(trades): accumvolume += trade.amount setattr(trades[idx], "accumvolume", accumvolume) else: logging.error("Couldn't get trades from DB") chartData = {"orderbook": entries, "trades": trades} memcache.add("chartData", chartData, 300) logging.debug("chartData added to memcache") return chartData
def get(self): (forum, siteroot, tmpldir) = forum_siteroot_tmpldir_from_url(self.request.path_info) if not forum or forum.is_disabled: return self.error(HTTP_NOT_FOUND) cached_feed = memcache.get(rss_memcache_key(forum)) if cached_feed is not None: self.response.headers['Content-Type'] = 'text/xml' self.response.out.write(cached_feed) return feed = feedgenerator.Atom1Feed( title = forum.title or forum.url, link = my_hostname() + siteroot + "rss", description = forum.tagline) topics = Topic.gql("WHERE forum = :1 AND is_deleted = False ORDER BY created_on DESC", forum).fetch(25) for topic in topics: title = topic.subject link = my_hostname() + siteroot + "topic?id=" + str(topic.key().id()) first_post = Post.gql("WHERE topic = :1 ORDER BY created_on", topic).get() msg = first_post.message # TODO: a hack: using a full template to format message body. # There must be a way to do it using straight django APIs name = topic.created_by if name: t = Template("<strong>{{ name }}</strong>: {{ msg|striptags|escape|urlize|linebreaksbr }}") else: t = Template("{{ msg|striptags|escape|urlize|linebreaksbr }}") c = Context({"msg": msg, "name" : name}) description = t.render(c) pubdate = topic.created_on feed.add_item(title=title, link=link, description=description, pubdate=pubdate) feedtxt = feed.writeString('utf-8') self.response.headers['Content-Type'] = 'text/xml' self.response.out.write(feedtxt) memcache.add(rss_memcache_key(forum), feedtxt)
def post(self): logging.info('Received ACK webhook') webhook_id = self.request.headers['X-Webhook-Id'] if memcache.get(webhook_id) != None: logging.error("webhook_id already handled, ignoring duplicate " + webhook_id) return logging.info("Handling webhook_id " + webhook_id) memcache.add(webhook_id, "1") logging.info('body: ' + self.request.body) client = Client(os.environ['TWILIO_SID'], os.environ['TWILIO_TOKEN']) req = json.loads(self.request.body) for msg in req['messages']: to = msg['incident']['title'].split()[0] title = msg['incident']['title'].split(' ', 1)[1] if to and title and to[0] == "+": if (msg['event'] == "incident.acknowledge"): to = msg['incident']['title'].split()[0] title = msg['incident']['title'].split(' ', 1)[1] message = client.messages.create( body='The request "' + title + '" has been acknowledged by on call staff and is being looked into. Please refer to the ticket system for further updates: ' + os.environ['TICKET_SYSTEM_URL'], from_=os.environ['TWILIO_FROM_NUMBER'], to=to ) logging.debug("ACK message = \"" + message.sid + "\"") if (msg['event'] == "incident.resolve"): message = client.messages.create( body='The request "' + title + '" has been marked resolved by on call staff. Please refer to the ticket system for further updates: ' + os.environ['TICKET_SYSTEM_URL'], from_=os.environ['TWILIO_FROM_NUMBER'], to=to ) logging.debug("resolve message = \"" + message.sid + "\"")
def get(self): cachename = 'appenhancedjsview' + os.environ['CURRENT_VERSION_ID'] if app.config.env == 'prod': view = memcache.get(cachename) js_file = 'app-enhanced.min.js' else: view = None js_file = 'app-enhanced.js' if view is None: tvars = {'l': app.locale.l} jq_path = os.path.join(os.path.dirname(__file__), '../js-enhanced/jquery-1.4.4.min.js') view = template.render(jq_path, {}) mm_path = os.path.join(os.path.dirname(__file__), '../js-enhanced/markermanager-1.0.min.js') view += template.render(mm_path, {}) ll_path = os.path.join(os.path.dirname(__file__), '../js-enhanced/latlng.min.js') view += template.render(ll_path, {}) js_path = os.path.join(os.path.dirname(__file__), '../js-enhanced/' + js_file) view += template.render(js_path, tvars) if app.config.env == 'prod': memcache.add(cachename, view) self.response.headers[ 'Cache-Control'] = 'max-age=7776000, must-revalidate' self.response.headers['Content-Type'] = 'text/javascript' self.response.out.write(view)
def get(self, sloth_path): memcached_key = '%s-fetch:%s' % (version, sloth_path) fetched = memcache.get(memcached_key) if fetched is not None: self.response.out.write(fetched) else: sloth = SlothDrawing.get_by_key_name(sloth_path) if sloth == None: self.response.set_status(404) self.response.headers['Content-Type'] = 'text/plain' self.response.out.write( 'booourns no sloths here check your url!') return # smrt ppl wouldn't hard code this # i'm not claiming to be one of them share_url_enc = urllib.quote_plus('http://sloths.arerad.com/' + sloth_path) template = jinja_environment.get_template('fetch.html') data = { 'sloth_path': sloth_path, 'title': sloth_path, 'twitter_share_url': 'https://twitter.com/share', 'facebook_share_url': 'https://www.facebook.com/sharer/sharer.php?u=' + share_url_enc, 'email_share_url': share_url_enc, 'version': version, 'render_date': strftime("%Y-%m-%d %H:%M:%S", gmtime()), 'development': development } fetched = template.render(data) if not memcache.add(memcached_key, fetched, cachetime): logging.error('Memcache set failed.') self.response.out.write(fetched)
def getLot(lotID): if isinstance(lotID, basestring): lotID = long(lotID) if isinstance(lotID, long): lotID = ndb.Key(LOT, lotID) key = 'lot_' + str(lotID.id()) cached = memcache.get(key) if cached is not None: return cached container = retrieveLot(lotID) if not memcache.add(key, container, 300): logging.info("Memcache add failed") return container
def open_resource(self, name): """Opens a resource from the zoneinfo subdir for reading.""" name_parts = name.lstrip('/').split('/') for part in name_parts: if part == os.path.pardir or os.path.sep in part: raise ValueError('Bad path segment: %r' % part) cache_key = 'pytz.zoneinfo.%s.%s' % (OLSON_VERSION, name) try: zonedata = memcache.get(cache_key) except Exception: zonedata = None if zonedata is None: zonedata = get_zoneinfo().read('zoneinfo/' + name) try: memcache.add(cache_key, zonedata) logging.info('Added timezone to memcache: %s' % cache_key) except Exception: logging.info('Failed adding timezone to memcache: %s' % cache_key) else: logging.info('Loaded timezone from memcache: %s' % cache_key) return StringIO(zonedata)