def load_friends(self): key_suffix = self.account_type + ":" + self.user_id key = 'friends:' + key_suffix total_key = 'total_friends:' + key_suffix self.friends = memcache.get(key) self.total_friends = memcache.get(total_key) if not self.friends: self.friends = self.get_friends_by_url() memcache.set(key, self.friends, time=USERINFO_CACHE_TIME) memcache.set(total_key, self.total_friends, time=USERINFO_CACHE_TIME) return self.friends
def GetPageCount(pagename): logging.debug("pagecount.GetPageCount(pagename='"+pagename+"')") memcache_id = KeyName(pagename) val = memcache.get(memcache_id) if (val != None): return val val = LoadPageCount(pagename) memcache_id = KeyName(pagename) memcache.set(memcache_id, val) return val
def GetPageCount(pagename): logging.debug("pagecount.GetPageCount(pagename='" + pagename + "')") memcache_id = KeyName(pagename) val = memcache.get(memcache_id) if (val != None): return val val = LoadPageCount(pagename) memcache_id = KeyName(pagename) memcache.set(memcache_id, val) return val
def geocode(addr, usecache=True, retrying=False): """convert a human-readable address into a "lat,long" value (string).""" loc = addr.lower().strip() # already geocoded-- just return if is_latlongzoom(loc): return loc if is_latlong(loc): # regexp allow missing comma # TODO: pick a smart default zoom, depending on population density. return loc + ",4" loc = re.sub(r'^[^0-9a-z]+', r'', loc) loc = re.sub(r'[^0-9a-z]+$', r'', loc) loc = re.sub(r'\s\s+', r' ', loc) memcache_key = "geocode:" + loc val = memcache.get(memcache_key) if usecache and val: logging.info("geocode: cache hit loc=" + loc + " val=" + val) return val if not retrying: params = urllib.urlencode({ 'address': loc.lower(), 'sensor': 'false', 'region': 'us', 'client': private_keys.MAPS_API_CLIENT_ID }) request = "/maps/api/geocode/json?%s" % params signature = sign_maps_api_request(request) fetchurl = "http://maps.googleapis.com" + request + "&signature=" + signature logging.info("geocode: cache miss, trying " + fetchurl) fetch_result = urlfetch.fetch(fetchurl, deadline=api.CONST_MAX_FETCH_DEADLINE) if fetch_result.status_code != 200: # fail and also don't cache logging.info("gecode: fail %s %s" % (str(fetch_result.status_code), fetch_result.content)) return "" res = fetch_result.content respcode, zoom, lat, lng = parse_geo_response(res) if respcode == '200': logging.info("geocode: success " + loc) val = lat + "," + lng + "," + zoom rev_geocode_json(lat, lng) memcache.set(memcache_key, val) return val logging.info("geocode: failed " + loc) return ""
def geocode(addr, usecache=True, retrying = False): """convert a human-readable address into a "lat,long" value (string).""" loc = addr.lower().strip() # already geocoded-- just return if is_latlongzoom(loc): return loc if is_latlong(loc): # regexp allow missing comma # TODO: pick a smart default zoom, depending on population density. return loc + ",4" loc = re.sub(r'^[^0-9a-z]+', r'', loc) loc = re.sub(r'[^0-9a-z]+$', r'', loc) loc = re.sub(r'\s\s+', r' ', loc) memcache_key = "geocode:" + loc val = memcache.get(memcache_key) if usecache and val: logging.info("geocode: cache hit loc=" + loc + " val=" + val) return val if not retrying: params = urllib.urlencode( {'address':loc.lower(), 'sensor':'false', 'region':'us', 'client':private_keys.MAPS_API_CLIENT_ID }) request = "/maps/api/geocode/json?%s" % params signature = sign_maps_api_request(request) fetchurl = "http://maps.googleapis.com" + request + "&signature=" + signature logging.info("geocode: cache miss, trying " + fetchurl) fetch_result = urlfetch.fetch(fetchurl, deadline = api.CONST_MAX_FETCH_DEADLINE) if fetch_result.status_code != 200: # fail and also don't cache logging.info("gecode: fail %s %s" % (str(fetch_result.status_code), fetch_result.content)) return "" res = fetch_result.content respcode, zoom, lat, lng = parse_geo_response(res) if respcode == '200': logging.info("geocode: success " + loc) val = lat + "," + lng + "," + zoom rev_geocode_json(lat, lng) memcache.set(memcache_key, val) return val logging.info("geocode: failed " + loc) return ""
def get_user(request): for cls in (TestUser, FriendConnectUser, FacebookUser): cookie = cls.get_cookie() if cookie: key = 'cookie:' + cookie user = memcache.get(key) if not user: try: user = cls(request) memcache.set(key, user, time=USERINFO_CACHE_TIME) except: # This hides all errors from the Facebook client library # TODO(doll): Hand back an error message to the user logging.exception( "Facebook or Friend Connect client exception.") return None return user
def IncrPageCount(pagename, delta): """ increment page count """ return logging.debug("pagecount.IncrPageCount(pagename='"+pagename+"')") memcache_id = KeyName(pagename) if memcache.get(memcache_id) == None: # initializes memcache if missing return GetPageCount(pagename) newval = memcache.incr(memcache_id, delta) #TODO: rnd seems unused? #rnd = random.random() * 1000000.0 if (random.random() * 1000000.0 <= WRITEBACK_FREQ_PCT / 100.0 * 1000000.0): logging.debug("pagecount.IncrPageCount: writeback: writebacks="+ str(pc_writebacks)+" newval="+str(newval)) db.run_in_transaction(Writeback, pagename, newval) return newval
def IncrPageCount(pagename, delta): """ increment page count """ return logging.debug("pagecount.IncrPageCount(pagename='" + pagename + "')") memcache_id = KeyName(pagename) if memcache.get(memcache_id) == None: # initializes memcache if missing return GetPageCount(pagename) newval = memcache.incr(memcache_id, delta) #TODO: rnd seems unused? #rnd = random.random() * 1000000.0 if (random.random() * 1000000.0 <= WRITEBACK_FREQ_PCT / 100.0 * 1000000.0): logging.debug("pagecount.IncrPageCount: writeback: writebacks=" + str(pc_writebacks) + " newval=" + str(newval)) db.run_in_transaction(Writeback, pagename, newval) return newval
def get_value(cls, name): """Retrieves the value of a configuration parameter. Args: name: the name of the parameter whose value we are looking for. Returns: The value of the parameter or None if the parameter is unknown. """ if cls.local_config_cache is None: # The local cache is empty, retrieve its content from memcache. cache = memcache.get(cls.MEMCACHE_ENTRY) if cache is None: # Nothing in memcache either, recreate the cache from the datastore. cache = dict(cls.DEFAULT_VALUES) for parameter in Config.all(): cache[parameter.key().name()] = parameter.value # Save the full cache in memcache with 1h expiration time. memcache.add(cls.MEMCACHE_ENTRY, cache, 60*60) cls.local_config_cache = cache # Retrieve the value from the cache. return cls.local_config_cache.get(name)
def get_value(cls, name): """Retrieves the value of a configuration parameter. Args: name: the name of the parameter whose value we are looking for. Returns: The value of the parameter or None if the parameter is unknown. """ if cls.local_config_cache is None: # The local cache is empty, retrieve its content from memcache. cache = memcache.get(cls.MEMCACHE_ENTRY) if cache is None: # Nothing in memcache either, recreate the cache from the datastore. cache = dict(cls.DEFAULT_VALUES) for parameter in Config.all(): cache[parameter.key().name()] = parameter.value # Save the full cache in memcache with 1h expiration time. memcache.add(cls.MEMCACHE_ENTRY, cache, 60 * 60) cls.local_config_cache = cache # Retrieve the value from the cache. return cls.local_config_cache.get(name)
def dispatch_gist_it( dispatch, location ): location = urllib.unquote( location ) match = gist_it.Gist.match( location ) dispatch.response.headers['Content-Type'] = 'text/plain'; if not match: dispatch.response.set_status( 404 ) dispatch.response.out.write( dispatch.response.http_status_message( 404 ) ) dispatch.response.out.write( "\n" ) return else: slice_option = dispatch.request.get( 'slice' ) footer_option = dispatch.request.get( 'footer' ) gist = gist_it.Gist.parse( location, slice_option = slice_option, footer_option = footer_option ) if not gist: dispatch.response.set_status( 500 ) dispatch.response.out.write( "Unable to parse \"%s\": Not a valid repository path?" % ( location ) ) dispatch.response.out.write( "\n" ) return if _CACHE_ and dispatch.request.get( 'flush' ): dispatch.response.out.write( memcache.delete( memcache_key ) ) return memcache_key = gist.raw_url data = memcache.get( memcache_key ) if data is None or not _CACHE_: base = dispatch.url_for() # For below, see: http://stackoverflow.com/questions/2826238/does-google-appengine-cache-external-requests response = urlfetch.fetch( gist.raw_url, headers = { 'Cache-Control': 'max-age=300' } ) if response.status_code != 200: if response.status_code == 403: dispatch.response.set_status( response.status_code ) elif response.status_code == 404: dispatch.response.set_status( response.status_code ) else: dispatch.response.set_status( 500 ) dispatch.response.out.write( "Unable to fetch \"%s\": (%i)" % ( gist.raw_url, response.status_code ) ) return else: gist_content = take_slice( response.content, gist.start_line, gist.end_line ) gist_html = str( render_gist_html( base, gist, gist_content, footer = gist.footer ) ).strip() callback = dispatch.request.get( 'callback' ); if callback != '': result = render_gist_js_callback( callback, gist, gist_html ) else: result = render_gist_js( base, gist, gist_html ) result = str( result ).strip() data = result test = dispatch.request.get( 'test' ) if test: if test == 'json': dispatch.response.headers['Content-Type'] = 'text/plain'; dispatch.response.out.write(simplejson.dumps({ 'gist': gist.value(), 'content': gist_content, 'html': gist_html, })) elif False and test == 'example': pass else: dispatch.response.headers['Content-Type'] = 'text/plain' dispatch.response.out.write( gist_html ) return if _CACHE_: memcache.add( memcache_key, data, 60 * 60 * 24 ) dispatch.response.headers['Content-Type'] = 'text/javascript' dispatch.response.out.write( data )
def geocode(addr, usecache=True, retrying = False): """convert a human-readable address into a "lat,long" value (string).""" loc = addr.lower().strip() # already geocoded-- just return if is_latlongzoom(loc): return loc if is_latlong(loc): # regexp allow missing comma # TODO: pick a smart default zoom, depending on population density. return loc + ",4" loc = re.sub(r'^[^0-9a-z]+', r'', loc) loc = re.sub(r'[^0-9a-z]+$', r'', loc) loc = re.sub(r'\s\s+', r' ', loc) memcache_key = "geocode:" + loc val = memcache.get(memcache_key) if usecache and val: logging.info("geocode: cache hit loc=" + loc + " val=" + val) return val if not retrying: params = urllib.urlencode( {'q':loc.lower(), 'output':'csv', 'oe':'utf8', 'sensor':'false', 'gl':'us', 'client':private_keys.MAPS_API_CLIENT_ID }) fetchurl = "http://maps.google.com/maps/geo?%s" % params logging.info("geocode: cache miss, trying " + fetchurl) fetch_result = urlfetch.fetch(fetchurl, deadline = api.CONST_MAX_FETCH_DEADLINE) if fetch_result.status_code != 200: # fail and also don't cache logging.info("gecode: fail %s %s" % (str(fetch_result.status_code), fetch_result.content)) return "" res = fetch_result.content logging.info("geocode: maps responded %s" % res) respcode, zoom, lat, lng = parse_geo_response(res) if respcode == '200': logging.info("geocode: success " + loc) val = lat+","+lng+","+zoom memcache.set(memcache_key, val) return val if retrying or respcode == '620': params = urllib.urlencode( {'q':loc.lower(), }) fetchurl = "http://pipes.appspot.com/geo?%s" % params fetch_result = urlfetch.fetch(fetchurl, deadline = api.CONST_MAX_FETCH_DEADLINE) res = fetch_result.content logging.info("geocode: datastore responded %s" % res) respcode, zoom, lat, lng = parse_geo_response(res) if respcode == '200': val = lat+","+lng+","+zoom memcache.set(memcache_key, val) return val if respcode == '620' and not retrying: logging.info("geocode: retrying " + loc) return geocode(addr, usecache, True) logging.info("geocode: failed " + loc) return ""
def geocode(addr, usecache=False, retrying=False): """convert a human-readable address into a "lat,long" value (string).""" loc = addr.lower().strip() # already geocoded-- just return if is_latlongzoom(loc): return loc if is_latlong(loc): # regexp allow missing comma # TODO: pick a smart default zoom, depending on population density. return loc + ",4" loc = re.sub(r'^[^0-9a-z]+', r'', loc) loc = re.sub(r'[^0-9a-z]+$', r'', loc) loc = re.sub(r'\s\s+', r' ', loc) memcache_key = "geocode:" + loc val = memcache.get(memcache_key) if usecache and val: logging.info("geocode: cache hit loc=" + loc + " val=" + val) return val if not retrying: params = urllib.urlencode({ 'q': loc.lower(), 'output': 'csv', 'oe': 'utf8', 'sensor': 'false', 'gl': 'us', 'key': 'ABQIAAAAPwa6P0RAONGDnDVWIoz60RS_XVdtR9vJUHoImLNBbcMuXbr6qRRCTJ1XM9Je76qJSqsr_4HKGKJ65A' }) fetchurl = "http://maps.google.com/maps/geo?%s" % params logging.info("geocode: cache miss, trying " + fetchurl) fetch_result = urlfetch.fetch(fetchurl) if fetch_result.status_code != 200: # fail and also don't cache return "" res = fetch_result.content logging.info("geocode: maps responded %s" % res) respcode, zoom, lat, lng = parse_geo_response(res) if respcode == '200': logging.info("geocode: success " + loc) val = respcode + "," + zoom + "," + lat + "," + lng memcache.set(memcache_key, val) return val if retrying or respcode == '620': params = urllib.urlencode({ 'q': loc.lower(), }) fetchurl = "http://pipes.appspot.com/geo?%s" % params fetch_result = urlfetch.fetch(fetchurl, deadline=api.CONST_MAX_FETCH_DEADLINE) res = fetch_result.content logging.info("geocode: datastore responded %s" % res) respcode, zoom, lat, lng = parse_geo_response(res) if respcode == '200': val = lat + "," + lng + "," + zoom memcache.set(memcache_key, val) return val if respcode == '620' and not retrying: logging.info("geocode: retrying " + loc) return geocode(addr, usecache, True) logging.info("geocode: failed " + loc) return ""
def dispatch_gist_it(dispatch, location): location = urllib.unquote(location) match = gist_it.Gist.match(location) dispatch.response.headers['Content-Type'] = 'text/plain' if not match: dispatch.response.set_status(404) dispatch.response.out.write(dispatch.response.http_status_message(404)) dispatch.response.out.write("\n") return else: slice_option = dispatch.request.get('slice') footer_option = dispatch.request.get('footer') style_option = dispatch.request.get('style') highlight_option = dispatch.request.get('highlight') test = dispatch.request.get('test') gist = gist_it.Gist.parse(location, slice_option=slice_option, footer_option=footer_option, style_option=style_option, highlight_option=highlight_option) if not gist: dispatch.response.set_status(500) dispatch.response.out.write( "Unable to parse \"%s\": Not a valid repository path?" % (location)) dispatch.response.out.write("\n") return if _CACHE_ and dispatch.request.get('flush'): dispatch.response.out.write(memcache.delete(memcache_key)) return memcache_key = gist.raw_url data = memcache.get(memcache_key) if data is None or not _CACHE_: base = dispatch.url_for() # For below, see: http://stackoverflow.com/questions/2826238/does-google-appengine-cache-external-requests response = urlfetch.fetch(gist.raw_url, headers={'Cache-Control': 'max-age=300'}) if response.status_code != 200: if response.status_code == 403: dispatch.response.set_status(response.status_code) elif response.status_code == 404: dispatch.response.set_status(response.status_code) else: dispatch.response.set_status(500) dispatch.response.out.write( "Unable to fetch \"%s\": (%i)" % (gist.raw_url, response.status_code)) return else: # I believe GitHub always returns a utf-8 encoding, so this should be safe response_content = response.content.decode('utf-8') gist_content = take_slice(response_content, gist.start_line, gist.end_line) gist_html = str(render_gist_html(base, gist, gist_content)).strip() callback = dispatch.request.get('callback') if callback != '': result = render_gist_js_callback(callback, gist, gist_html) else: result = render_gist_js(base, gist, gist_html) result = str(result).strip() data = result if test: if test == 'json': dispatch.response.headers[ 'Content-Type'] = 'application/json' dispatch.response.out.write( simplejson.dumps({ 'gist': gist.value(), 'content': gist_content, 'html': gist_html, })) elif False and test == 'example': pass else: dispatch.response.headers[ 'Content-Type'] = 'text/plain' dispatch.response.out.write(gist_html) return if _CACHE_: memcache.add(memcache_key, data, 60 * 60 * 24) dispatch.response.headers['Content-Type'] = 'text/javascript' dispatch.response.out.write(data)