Example #1
0
    def process_data(self, controller):
        from google.appengine.api import memcache
        memcache.flush_all()

        obj = controller.content
        v = controller.form.validate_result
        replace_body = True
        for k in v:
            if k == 'body' and not v[k]:
                replace_body = False
            else:
                setattr(obj, k, v[k])
        if replace_body:
            f = controller.request.body_file.vars["body"]
            v['content_type'] = detect_imagetype(v['body'])
            i = images.Image(v['body'])
            v['width'], v['height'] = i.width, i.height
            v['filename'] = f.filename
        if not v.get('name', None) and v.get('filename'):
            v['name'] = v['filename']
        if not v.get('title', None):
            v['title'] = v['name']
        obj.modified_at = datetime.now()
        obj.put()
        obj.sync_path(obj.get_path())

        controller.set_state(FC.INITIAL)
        controller.redirect(obj.get_parent().get_path()+'/list')
  def test_delete_db_ndb_mixed(self):
    # Start empty
    storage_ndb = StorageByKeyName(
      CredentialsNDBModel, 'foo', 'credentials')
    storage = StorageByKeyName(
      CredentialsModel, 'foo', 'credentials')

    # First DB, then NDB
    self.assertEqual(None, storage.get())
    storage.put(self.credentials)
    self.assertNotEqual(None, storage.get())

    storage_ndb.delete()
    self.assertEqual(None, storage.get())

    # First NDB, then DB
    self.assertEqual(None, storage_ndb.get())
    storage_ndb.put(self.credentials)

    storage.delete()
    self.assertNotEqual(None, storage_ndb.get())
    # NDB uses memcache and an instance cache (Context)
    ndb.get_context().clear_cache()
    memcache.flush_all()
    self.assertEqual(None, storage_ndb.get())
Example #3
0
def snippet_edit(request, snippet_id, profile_callback=None):
    snippet = Snippet.get_by_id(int(snippet_id))
    if not snippet:
        raise Http404

    if request.method == "GET":
        tags = ""
        if snippet.tags:
            tags = ",".join([t.name for t in snippet.get_tags() if t])
        form = SnippetForm(
            {
                "title": snippet.title,
                "language": snippet.language,
                "code": snippet.code,
                "url": snippet.url,
                "comment": snippet.comment,
                "private": snippet.private,
                "tag": tags,
            }
        )
    if request.method == "POST":
        form = SnippetForm(request.POST)
        logging.getLogger().debug(form)
        if form.is_valid():
            updated_snippet = form.edit(int(snippet_id), profile_callback=profile_callback)
            memcache.flush_all()
            ping()
            return HttpResponseRedirect(updated_snippet.get_absolute_url())

    return render_to_response(
        "snippet/snippet_edit.html", {"form": form, "snippet": snippet}, context_instance=RequestContext(request)
    )
Example #4
0
def setup_for_testing(require_indexes=True):
  """Sets up the stubs for testing.

  Args:
    require_indexes: True if indexes should be required for all indexes.
  """
  from google.appengine.api import apiproxy_stub_map
  from google.appengine.api import memcache
  from google.appengine.tools import dev_appserver
  from google.appengine.tools import dev_appserver_index
  import urlfetch_test_stub
  before_level = logging.getLogger().getEffectiveLevel()
  try:
    logging.getLogger().setLevel(100)
    root_path = os.path.realpath(os.path.dirname(__file__))
    dev_appserver.SetupStubs(
        TEST_APP_ID,
        root_path=root_path,
        login_url='',
        datastore_path=tempfile.mktemp(suffix='datastore_stub'),
        history_path=tempfile.mktemp(suffix='datastore_history'),
        blobstore_path=tempfile.mktemp(suffix='blobstore_stub'),
        require_indexes=require_indexes,
        clear_datastore=False)
    dev_appserver_index.SetupIndexes(TEST_APP_ID, root_path)
    apiproxy_stub_map.apiproxy._APIProxyStubMap__stub_map['urlfetch'] = \
        urlfetch_test_stub.instance
    # Actually need to flush, even though we've reallocated. Maybe because the
    # memcache stub's cache is at the module level, not the API stub?
    memcache.flush_all()
  finally:
    logging.getLogger().setLevel(before_level)
Example #5
0
 def get(self):
     date = self.request.get('date')
     if date == backwards_date:
         date = get_prev_back_date()
         #don't index too much
         if date.year < 2013 and date.month < 9:
             return
     elif date == yesterday_date:
         #get the yesterday's date
         #this is for ensuring that we indexedeverything from yesterday
         logging.info('today is %s' % datetime.datetime.now())
         date = datetime.datetime.now() - datetime.timedelta(days=1)
         logging.info('indexing yesterday %s' % date)
         #datestr = '%s.%s.%s' % (date.day, date.month, date.year)
     else:
         try:
             date = datetime.datetime.strptime(date, '%d.%m.%Y')
         except:
             logging.warning('bad date in request, assuming current')
             date = day_date(datetime.datetime.now())
     datestr = "%s.%s.%s" % (date.day, date.month, date.year)
     self.fetch(datestr)
     memcache.flush_all()
     keyname = str(date)
     models.IndexedDate.get_or_insert(keyname, date=date)
     self.redirect('/admin_')
Example #6
0
def main():
    site_ds = getattr(settings, 'IMPORT_SOURCE', None)
    logging.info(site_ds)
    if not site_ds:
        site_ds = CURRENT_SOURCE()
    max_groups = getattr(settings, 'MAX_GROUPS', 0)
    collected = []
    c = 0
    for (gid, gname) in site_ds.group_ids():
            gdata = site_ds.group_data(gid)
            logging.info("%s: %s" %
                (gid, gdata.week(UPPER_WEEK)[0][1].list()))
            if not empty_data(gdata):
                collected.append(site_ds.group_data(gid))
                if max_groups:
                    if c < max_groups:
                        c += 1
                    else:
                        break

    new_v = GsqlDataSource.save_new_version(site_ds.groups(),
                                            site_ds.faculties(),
                                            site_ds.years(),
                                            collected)
    memcache.flush_all()
    print 'Fetched new version %s' % new_v
Example #7
0
 def post(self): # Create a new project.
     self.check_user()
     project = get_project_from_request(self.request)
     project.put()
     memcache.flush_all()
     new_story(self, "created the project", project=project)
     self.redirect("/projects/" + str(project.key()))
Example #8
0
File: hypr.py Project: m6tt/hypr
 def get(self, action):
   if action == "flush":
     self.response.out.write('Flushing...')
     memcache.flush_all()
     self.redirect('/')
   else:
     self.error(404)
    def post(self):
        self._require_admin()
        flushed = list()

        if self.request.get("all_keys") == "all_keys":
            memcache.flush_all()
            flushed.append("all memcache values")

        if self.request.get("webcast_keys") == "webcast_keys":
            flushed.append(MemcacheWebcastFlusher.flush())

        if self.request.get('memcache_key') is not "":
            memcache.delete(self.request.get("memcache_key"))
            flushed.append(self.request.get("memcache_key"))

        if self.request.get('return_url') is not "":
            self.redirect("{}?flushed={}".format(self.request.get('return_url'), flushed))
            return

        self.template_values.update({
            "flushed": flushed,
            "memcache_stats": memcache.get_stats(),
        })

        path = os.path.join(os.path.dirname(__file__), '../../templates/admin/memcache_index.html')
        self.response.out.write(template.render(path, self.template_values))
Example #10
0
 def test_get_week_numbers_with_populated_cache(self):
     logging.info("load get_weeks populated cache...")
     memcache.flush_all()
     self.__load_memcache_with_weeks()
     self.__get_weeks_test(2012,[1,2,3,4,5,6,7,8,9,10,11,12,13])
     self.__get_weeks_test(2013,[1,2,3,4,5,6,7,8,9,10,11,12,13])
     self.__get_weeks_invalid_year_test()
Example #11
0
 def get(self):
     if self.request.get("flush")=="true":
         memcache.flush_all() #@UndefinedVariable
         resultDict = {"result": 0}
         self.response.out.write(json.dumps(resultDict))
     else:
         self.response.out.write(errorMsg(202))
Example #12
0
 def test_invalid_weeks_with_populated_cache(self):
     logging.info("invalid week populated cache testing...")
     memcache.flush_all()
     self.__load_memcache_with_all_weeks()
     self.__test_invalid_week_query(year=2012,week_number=14)
     self.__test_invalid_week_query(year=2012,week_number=0)
     self.__test_invalid_week_query(year=1776,week_number=1)
Example #13
0
	def DELETE(self, request):
		if not web.ctx.env['REMOTE_ADDR'] in self.allowFlushFrom and not users.is_current_user_admin():
			raise web.Forbidden()
		if request.split('/').pop() == '__ALL__':
			if 'memcache' in web.ctx.query:
				memcache.flush_all()
				return 'memcache flushed.\n'
			# entity selection is limited by 1000 but often timeout
			limit = 800
			batch = []
			for entity in Cache.all(keys_only=True).order('-expires').fetch(limit):
				batch.append(entity)
			n = len(batch)
			if n == 0:
				return 'No entries.\n'
			# batch deletion is limited by 500 but it timeouts above ~200
			step = 200
			if step > n:
				step = n
			for i in range(0, limit, step):
				db.delete(batch[i:i+step])
			return '%s entries flushed\n' % n
		if self.ignoreQueryString is False:
			request += web.ctx.query
		cache = self.cache.get_by_key_name(request)
		if cache:
			cache.delete()
		memcache.delete(request)
		return 'ok\n'
Example #14
0
    def post(self, args, format):
      type = args.split("/")[0]
      key = args.split("/")[1].split("?")[0]
      if type.capitalize() in globals():
        cls = globals()[type.capitalize()]
        if cls:
          values = {}
          values["key"] = key

          for k in self.request.arguments():
            logging.info(k)
            value = self.request.get_all(k)
            logging.info(value)
            if k.split('-')[-1] in cls().properties().keys():
              if ".ListProperty" in cls().properties()[k.split('-')[-1]].__class__.__str__(""):
                if k.split("-")[-1] == "permissions":
                  values[k.split('-')[-1]] = self.request.get_all(k)
                else:
                  values[k.split('-')[-1]] = [x.lstrip().rstrip() for x in value.split(",")]
              else:
                values[k.split('-')[-1]] = value
            values[k] = self.request.get_all(k)
          result = cls.update(values)
          if result:
            memcache.flush_all()
            if format == 'html':
              self.redirect(self.request.get("return_url"))
            elif format == 'json':
              self.json_out(result)
          else:
            self.response.out.write("Failed to update")
        else:
          self.response.out.write(self.request.get("return_url"))
Example #15
0
 def flush_cache():
   """Flushes the memcache key used by this datamodel."""
   memcache.flush_all()
   memcache.delete(IS_STOPPED_MEMCACHE_KEY)
   # Wait for a second before accessing the memcache again (in case there is a
   # propagation delay).
   time.sleep(1)
Example #16
0
 def post(self, type, format):
   if type.capitalize() in globals():
     cls = globals()[type.capitalize()]
     if cls:
       values = {}
       for k in self.request.arguments():
         value = self.request.get_all(k)
         if k.split('-')[-1] == "permissions":
           values[k.split('-')[-1]] = ",".join(self.request.get_all("page.permissions"))
         if k.split('-')[-1] in cls().properties() and "List" in cls().properties()[k.split('-')[-1]].__class__().__str__():
           values[k.split('-')[-1]] = [x.lstrip().rstrip() for x in value]
         else:
           values[k.split('-')[-1]] = value
         values[k] = self.request.get(k)
       result = cls.create(values)
       if result:
         memcache.flush_all()
         if format == 'html':
           self.redirect(self.request.get("return_url"))
         elif format == 'json':
           self.json_out(result)
       else:
         self.response.out.write("Failed to update")
     else:
       self.response.out.write(self.request.get("return_url"))
Example #17
0
 def post(self):
   user = users.GetCurrentUser()
   if (not user):
     self.respondWithTemplate('please-sign-in.html', {'action': 'edit Radars'})
   else:
     id = self.request.get("id")
     radar = Radar.get_by_id(int(id))
     if not radar:
       self.respondWithText('Invalid Radar id')
     elif radar.user != user:
       self.respondWithText('Only the owner of a Radar can edit it')
     else:
       radar.title = self.request.get("title")
       radar.number = self.request.get("number")
       radar.number_intvalue = int(self.request.get("number"))
       radar.status = self.request.get("status")
       radar.description = self.request.get("description")
       radar.resolved = self.request.get("resolved")
       radar.product = self.request.get("product")
       radar.classification = self.request.get("classification")
       radar.reproducible = self.request.get("reproducible")
       radar.product_version = self.request.get("product_version")
       radar.originated = self.request.get("originated")
       radar.modified = datetime.datetime.now()
       radar.put()
       memcache.flush_all()
       self.redirect("/myradars")
Example #18
0
  def post(self):
    """Adds a new builder status message."""

    # Check if this is a delete builder request.
    delete_builder = self.request.get('delete')
    if delete_builder:
      BuilderStatus.delete_builder(delete_builder)
    else:
      message = self.request.get('message')
      builder_name = self.request.get('builder_name')
      # If an entry with the builder_name already exists then delete it first.
      BuilderStatus.delete_builder(builder_name)

      # Add a new BuilderStatus entry.
      BuilderStatus(username=self.user.email(),
                    builder_name=builder_name,
                    message=message).put()

    # Flush the cache.
    memcache.flush_all()
    memcache.delete('builder_statuses')
    # Wait for a second before accessing the memcache again (in case there is a
    # propagation delay).
    time.sleep(1)
    # Set template values and load the template.
    self._handle()
Example #19
0
 def do_index(self):
 
     date = self.request.get('date')
     if date == backwards_date:
         date = get_prev_back_date()
         #don't index too much
         if date.year < 2013 and date.month < 9:
             return
     else:
         try:
             date = datetime.datetime.strptime(date, '%d.%m.%Y')
         except:
             logging.warning('bad date in request, assuming current')
             date = day_date(datetime.datetime.now())
     logging.info('INDEXING WITH DATE %s' % date)
     end = date + datetime.timedelta(days=1)
     query = db.GqlQuery(
         'SELECT * FROM RSSEntry WHERE date >= :1 AND date <= :2',
             date, end)
     entries = list(query)
     logging.info('start indexing %d entries' % len(entries))
     for entry in entries:
         b = filters.scan(entry.desc)
         if b:
             logging.info('bad : %s' % b)
             keyname = rss.keyname_from_link(entry.url)
             bad = models.RSSBadEntry.get_or_insert(keyname, 
                 url=entry.url, desc=entry.desc, bad=b, date=entry.date)
     memcache.flush_all()
     keyname = str(date)
     models.IndexedDate.get_or_insert(keyname, date=date)
Example #20
0
 def post(self):
     action = self.request.get("admin_action")
     
     if (action == 'flush_cache'):
         memcache.flush_all()
     
     self.response.out.write(url_tools.dict_to_s(memcache.get_stats()))
Example #21
0
    def get(self):

        #delete cash
        memcache.flush_all()

        #redirect to blog FrontPage
        self.redirect('/')
Example #22
0
 def SnowFallGraphCreaterProcess(self):
   logging.info('Running the SnowFallGraphCreaterProcess.')
   ## TODO(wferrell): commented out to stop the error messages.
   #CreateSnowFallGraph()
   logging.info('SUCCESS: Running the SnowFallGraphCreaterProcess.')
   memcache.flush_all()
   logging.info('memcache.flush_all() run.')
Example #23
0
    def copy_get(self, url, config):
        if url == 'flushcache':
            memcache.flush_all()
            self.response.out.write('cache clear')
            return
       
        if hasattr(config, 'NOTURL') and url in config.NOTURL:
            gourl = config.SITE
        else:
            gourl = '%s/%s' % (config.SITE, url)
            if self.request.query_string:
                gourl = '%s?%s' % (gourl, self.request.query_string)

        origPostData = self.request.body
        page = None#memcache.get(gourl)
        if page is None or origPostData is not None:
            page = self.fetchurl(gourl, origPostData)
            ext = url.rsplit('\\.', 1)[-1].lower()
            if ext not in ['gif', 'jpg', 'png', 'bmp']:
                for k,v in config.REPLACE:
                    page.content = page.content.replace(k,v)
                for func in config.funcs:
                    if callable(func):
                        page.content = func(page.content)
            if len(page.content) < 500000 and origPostData is None:
                memcache.set(gourl, page, 864000)
        self.set_status(page.status_code)
        
        self.setHeader(page, config)
            
        self.response.out.write(page.content)
Example #24
0
	def get(self):
		from geomancer.model import Cache, Locality, Clause, Georef
		from google.appengine.ext import ndb
		from google.appengine.api import memcache
		for x in [Cache, Locality, Clause, Georef]:
			ndb.delete_multi(x.query().iter(keys_only=True))
		memcache.flush_all()
Example #25
0
 def post(self):
   user = users.GetCurrentUser()
   if (not user):
     self.respondWithDictionaryAsJSON({"error":"you must authenticate to add radars"})
   else:
     title = self.request.get("title")
     number = self.request.get("number")
     status = self.request.get("status")
     description = self.request.get("description")
     product = self.request.get("product")
     classification = self.request.get("classification")
     reproducible = int(self.request.get("reproducible"))
     product_version = self.request.get("product_version")
     originated = self.request.get("originated")
     radar = Radar(title=title,
                   number=number,
                   user=user,
                   status=status,
                   description=description,
                   resolved=resolved,
                   product=product,
                   classification=classification,
                   reproducible=reproducible,
                   product_version=product_version,
                   originated=originated,
                   created=datetime.datetime.now(),
                   modified=datetime.datetime.now())
     radar.put()
     memcache.flush_all()
     response = {"result":
                  {"title":title, 
                   "number":number, 
                   "status":status, 
                   "description":description}}
     self.respondWithDictionaryAsJSON(response)
Example #26
0
def recordLink(l, t, user_account):
	u = models.LinkModel(link = l, title = t, score = 0, submitter_id = user_account.getUserID(), submitter_username= user_account.getUsername())
	u.put()
	logging.info("Recorded the link to the database, now flush and updating cache")
	memcache.flush_all()
	queryLinks() # calling query links automatically updates the cache.
	return "OK"
Example #27
0
    def post(self):
        user = users.get_current_user()
        if not user or 'user_id' not in dir(user):
            self.redirect(users.create_login_url('/addbuffr'))
        apiAddress = self.request.get('apiAddress')
        to_console = {}
        to_console["apiAddress"] = apiAddress
        to_console["is_valid_url(apiAddress)"] = (is_valid_url(apiAddress) != None)

        buffr_instance = Buffr()
        buffr_instance.apiName = self.request.get('apiName')
        buffr_instance.apiAddress = apiAddress
        APIUnstable = self.request.get('APIUnstable')
        if APIUnstable not in [True, False]:
            buffr_instance.APIUnstable = False
        else:
            buffr_instance.APIUnstable = APIUnstable
        buffr_instance.user_id = user.user_id()
        buffr_instance.user_email = user.email()
        buffr_instance.update_interval = int(self.request.get('updateInterval'))
        for possibility in user_readable_convertion_table:
            logging.info(str((possibility[0], buffr_instance.update_interval)))
            if int(possibility[0]) == buffr_instance.update_interval:
                buffr_instance.user_readable_update_interval = possibility[2]
        buffr_instance.end_point = hashlib.md5('%s:%s' % (user.user_id(), apiAddress)).hexdigest()
        buffr_instance.last_known_data = None
        buffr_instance.buffr_version = current_api_version
        buffr_instance.put()
        memcache.flush_all()
        logging.info('Added new Buffr to datastore')
        taskqueue.add(url='/confirm_working_url', params={'key': buffr_instance.key()})
        render(self, 'addbuffer.html', {'to_console': to_console,
                                        'submitted': True,
                                        'apiAddress': apiAddress})
Example #28
0
def CmdInitUserFromDbDo(from_name,msg):
	if not funcs.isAdmin(from_name):
		return 'you are not administrator'
	users_data = {}
	all_users = DBUser.all().fetch(1000)
	
	users_online_list = []
	users_onlinemsgon_list = []
	users_onlinemsgoff_list = []
	users_offlinemsgon_list = []
	users_offlinemsgoff_list = []
	for user in all_users:
		users_data[user.gtalk] = user
		if xmpp_api.isOnline(user.gtalk):
			users_online_list.append(user.gtalk)

		if user.recvOnlineMsg:
			users_onlinemsgon_list.append(user.gtalk)
		else:
			users_onlinemsgoff_list.append(user.gtalk)
		if user.recvOfflineMsg:
			users_offlinemsgon_list.append(user.gtalk)
		else:
			users_offlinemsgoff_list.append(user.gtalk)

	memcache.flush_all()
	memcache.set('cache_all_user_list',users_data,43200)
	
	funcs.setConfigValueWithCache('cache_online_user_list',users_online_list)
	
	funcs.setConfigValueWithCache('cache_online_msgon_user_list',users_onlinemsgon_list)
	funcs.setConfigValueWithCache('cache_online_msgoff_user_list',users_onlinemsgoff_list)
	funcs.setConfigValueWithCache('cache_offline_msgon_user_list',users_offlinemsgon_list)
	funcs.setConfigValueWithCache('cache_offline_msgoff_user_list',users_offlinemsgoff_list)
	return 'init cache from db ok'
Example #29
0
 def post(self, key, type):
     self.check_user()
     entry = models.Entry.get(key)
     status = decode(self.request.get("status"))
     text = decode(self.request.get("text"))
     if type == "prod":
         if entry.prod_design_review != status:
             entry.prod_design_review = status
             new_story(
                 self,
                 "changed product design review status to <em>%s</em>" % 
                 status,
                 entry=entry)
             entry.put()
     elif type == "eng":
         if entry.eng_design_review != status:
             entry.eng_design_review = status
             new_story(
                 self,
                 "changed eng design review status to <em>%s</em>" %
                 status, 
                 entry=entry)
             entry.put()
     elif type == "mktg":
         if entry.mktg_review != status:
             entry.mktg_review = status
             new_story(
                 self,
                 "changed marketing review status to <em>%s</em>" %
                 status,
                 entry=entry)
     if text:
         new_story(self, text, entry=entry, type="comment")
     memcache.flush_all()
     self.redirect("/entries/" + key)
Example #30
0
def sync_datastore():
    dropbox = dropbox_api()

    dstore = {}
    for p in models.Picture.all():
        dstore[p.path] = p.rev

    remote_pics = dropbox.request_meta('/pics')
    accepted = ['jpeg', 'jpg', 'png', 'gif', 'bmp']
    for remote in remote_pics.get('contents', []):
        file_type = re.search(r'[^\.]*$', remote['path']).group(0).lower()
        if file_type in accepted:
            dstore_rev = dstore.get(remote['path'], "")
            if dstore_rev != remote['rev']:
                taskqueue.add(url='/sync/upload',
                    params={
                        'path': remote['path'],
                        'rev': remote['rev'],
                    })
            if remote['path'] in dstore:
                dstore.pop(remote['path'])

    for deleted in dstore:
        rec = models.Picture.all().filter('path =', deleted).get()
        rec.delete()
    memcache.flush_all()
    return True
Example #31
0
    def post(self):
        logging.info('Deleting ContentEntry')
        incremental_delete(model.ContentEntry.query().iter(keys_only=True),
                           ndb.delete_multi_async)

        gs_bucket = config.settings().gs_bucket
        logging.info('Deleting GS bucket %s', gs_bucket)
        incremental_delete(
            (i[0] for i in gcs.list_files(gs_bucket)),
            lambda filenames: gcs.delete_files(gs_bucket, filenames))

        logging.info('Flushing memcache')
        # High priority (.isolated files) are cached explicitly. Make sure ghosts
        # are zapped too.
        memcache.flush_all()
        logging.info('Finally done!')
Example #32
0
  def post(self, project_id):
    """ Handle modifying actions and redirect to a GET page.

    Args:
      project_id: A string specifyng the project ID.
    """
    self.ensure_user_has_admin(project_id)

    if self.request.get('action:flush_memcache'):
      if memcache.flush_all():
        message = 'Cache flushed, all keys dropped.'
      else:
        message = 'Flushing the cache failed. Please try again.'
      self.redirect(self._construct_url(remove=['action:flush_memcache'],
                                        add={'message': message}))
    elif self.request.get('action:delete_entities'):
      ds_access = DatastoreDistributed(project_id, DATASTORE_LOCATION,
                                       trusted=True)

      entity_keys = [datastore.Key(key)
                     for key in self.request.params.getall('entity_key')]
      _delete_entities(ds_access, entity_keys)
      self.redirect(self._construct_url(
          remove=['action:delete_entities'],
          add={'message': '%d entities deleted' % len(entity_keys)}))
    else:
      self.error(404)
Example #33
0
def flushcache():
    if memcache.flush_all():
        logging.info('Cache has been flushed')
        return True
    else:
        logging.info('Error flushing cache')
        return False
Example #34
0
    def setUp(self):  # pylint: disable-msg=g-bad-name
        super(TestBase, self).setUp()

        memcache.flush_all()
        sites.ApplicationContext.clear_per_process_cache()

        self.auto_deploy = sites.ApplicationContext.AUTO_DEPLOY_DEFAULT_COURSE
        sites.ApplicationContext.AUTO_DEPLOY_DEFAULT_COURSE = (
            self.get_auto_deploy())

        self.supports_editing = False
        self.assert_default_namespace()
        self.namespace = ''
        self.base = '/'
        # Reload all properties now to flush the values modified in other tests.
        config.Registry.get_overrides(True)
Example #35
0
  def testTokenSaved(self):
    retry_params = api_utils.RetryParams(save_access_token=True)
    api = rest_api._RestApi('scope', retry_params=retry_params)
    t1 = api.get_token()
    self.assertNotEqual(None, t1)

    api = rest_api._RestApi('scope', retry_params=retry_params)
    t2 = api.get_token()
    self.assertEqual(t2, t1)

    memcache.flush_all()
    ndb.get_context().clear_cache()

    api = rest_api._RestApi('scope', retry_params=retry_params)
    t3 = api.get_token()
    self.assertEqual(t3, t1)
Example #36
0
def savePost(post):
    for tagName in post.tags:
        tag = model.blog.Tag.get_by_key_name(ctrl.makeSlug(tagName))
        if not tag:
            tag = model.blog.Tag(key_name=ctrl.makeSlug(tagName))
            tag.name = tagName
            tag.postCount = 1
            tag.put()
        else:
            pass

    if not post.slug:
        post.slug = ctrl.makeSlug(post.title)

    post.put()
    memcache.flush_all()
Example #37
0
def perm_cached_class(func, flush=False):
    class_perm_cache = {}
    if flush:
        class_perm_cache = {}
        memcache.flush_all()
    def cached_func(self):
        cache_key = self.__name__ + "/" + func.__name__
        if cache_key in class_perm_cache:
            return class_perm_cache[cache_key]
        data = memcache.get(cache_key)
        if data is None:
            data = func(self)
            memcache.add(cache_key, data)
        class_perm_cache[cache_key] = data
        return data
    return cached_func
Example #38
0
 def GET(self):
     result = {}
     action = web.input(action = None)['action']
     
     if action=='quota': 
         results = urlfetch.fetch('https://api.stackexchange.com/2.0/info?site=stackoverflow&key=%s' % api_key, headers = {'User-Agent': 'StackPrinter'}, deadline = 10)
         response = simplejson.loads(results.content)
         result['result'] = response
     if action=='quotaauth': 
         results = urlfetch.fetch('https://api.stackexchange.com/2.0/info?site=stackoverflow&key=%s&access_token=%s' % (api_key, TokenManager.get_auth_token()), headers = {'User-Agent': 'StackPrinter'}, deadline = 10)
         response = simplejson.loads(results.content)
         result['result'] = response
     if action=='authkey': 
         result['result'] = TokenManager.get_auth_token()
     elif action =='memcachestats':
         result = memcache.get_stats()        
     elif action =='memcacheflush':
         result['result'] = memcache.flush_all()
     elif action =='normalize':
         deferred.defer(worker.deferred_normalize_printed_question)    
         result['result'] = True
     elif action =='delete':
         service = web.input(service = None)['service']
         question_id = web.input(question_id = None)['question_id']
         result['printed_question_deletion'] = dbquestion.delete_printed_question(question_id,service)
         result['question_deletion'] = dbquestion.delete_question(question_id,service)
         result['answers_deletion'] = dbquestion.delete_answers(question_id,service)
     return render.admin(result)
Example #39
0
def flush_mc(keys=None):
    if not (type(keys) is str or type(keys) is list or keys is None):
        raise TypeError

    max_retries = 50

    if type(keys) is str:
        for _ in xrange(max_retries):
            if mc.delete(keys):
                break
        else:
            return False
    elif type(keys) is list:
        for _ in xrange(max_retries):
            if mc.delete_multi(keys):
                break
        else:
            return False
    else:
        for _ in xrange(max_retries):
            if mc.flush_all():
                break
        else:
            return False

    return True
Example #40
0
def update_from_tepco():
    lastmod = Config.get_by_key_name('last-modified')
    lastmod = lastmod and lastmod.value
    data = tepco.from_web(lastmod)
    if not data:
        return ''  # not yet updated
    Config(key_name='last-modified', value=data['lastmodstr']).put()

    if data.has_key('quick'):
        Config(key_name='quick.txt', value=data['quick']).put()

    # the image is updated hourly just after the hour.
    jst = jst_from_utc(data['usage-updated']) - datetime.timedelta(hours=1)
    jst = jst.replace(minute=0, second=0, microsecond=0)

    for hour, (usage, saving, forecast) in data['usage'].iteritems():
        entryfor = utc_from_jst(jst.replace(hour=hour))
        entry = Usage.all().filter('entryfor =', entryfor).get()
        if entry:
            entry.saving = saving
            entry.forecast = forecast
            if entry.usage != usage:
                entry.usage = usage
                entry.usage_updated = data['usage-updated']
            if entry.capacity != data['capacity']:
                entry.capacity = data['capacity']
                entry.capacity_updated = data['capacity-updated']
        else:
            entry = Usage(entryfor=entryfor,
                          year=entryfor.year,
                          month=jst.month,
                          day=jst.day,
                          hour=hour,
                          saving=saving,
                          forecast=forecast,
                          usage=usage,
                          usage_updated=data['usage-updated'],
                          capacity=data['capacity'],
                          capacity_updated=data['capacity-updated'])

        entry.capacity_peak_period = data['capacity-peak-period']
        entry.forecast_peak_usage = data['forecast-peak-usage']
        entry.forecast_peak_period = data['forecast-peak-period']
        entry.forecast_peak_updated = data['forecast-peak-updated']
        entry.put()
    memcache.flush_all()
    return ''
Example #41
0
    def setUp(self):
        super(TestSetupMixin, self).setUp()

        from google.appengine.api import apiproxy_stub_map
        from google.appengine.api import memcache
        from google.appengine.api import queueinfo
        from google.appengine.datastore import datastore_stub_util
        from google.appengine.ext import testbed
        from google.appengine.ext.testbed import TASKQUEUE_SERVICE_NAME

        before_level = logging.getLogger().getEffectiveLevel()

        os.environ['APPLICATION_ID'] = self.TEST_APP_ID
        os.environ['CURRENT_VERSION_ID'] = self.TEST_VERSION_ID
        os.environ['HTTP_HOST'] = '%s.appspot.com' % self.TEST_APP_ID
        os.environ['DEFAULT_VERSION_HOSTNAME'] = os.environ['HTTP_HOST']
        os.environ['CURRENT_MODULE_ID'] = 'foo-module'

        try:
            logging.getLogger().setLevel(100)

            self.testbed = testbed.Testbed()
            self.testbed.activate()
            self.testbed.setup_env(app_id=self.TEST_APP_ID, overwrite=True)
            self.testbed.init_memcache_stub()

            hr_policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
                probability=1)
            self.testbed.init_datastore_v3_stub(consistency_policy=hr_policy)

            self.testbed.init_taskqueue_stub()

            root_path = os.path.realpath(os.path.dirname(__file__))

            # Actually need to flush, even though we've reallocated. Maybe because the
            # memcache stub's cache is at the module level, not the API stub?
            memcache.flush_all()
        finally:
            logging.getLogger().setLevel(before_level)

        define_queues = ['other']
        taskqueue_stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
        taskqueue_stub.queue_yaml_parser = (
            lambda x: queueinfo.LoadSingleQueue(
                'queue:\n- name: default\n  rate: 1/s\n' + '\n'.join(
                    '- name: %s\n  rate: 1/s' % name
                    for name in define_queues)))
Example #42
0
def cache_it(dbkey = 'joke'):
	contents = memcache.get(dbkey)
	if not contents:
		global query_time
		query_time = datetime.now()
		if dbkey is not 'joke':
			memcache.flush_all()
			contents = Content.get_by_id(int(dbkey))
			memcache.set(dbkey, contents)
		else:
			contents = db.GqlQuery("select * from Content order by created desc")
			contents = list(contents)
			memcache.set(dbkey, contents)
	this_moment = datetime.now()
	global time_elapsed
	time_elapsed = int((this_moment - query_time).total_seconds())
	return contents, time_elapsed
Example #43
0
    def testMemcacheClear(self):
        LoadInfo.InitializeTable()
        LoadInfo.AddInstance('test-instance')
        LoadInfo.RegisterInstanceIpAddress('test-instance', '1.2.3.4')
        LoadInfo.UpdateLoadInfo('test-instance', 55)
        # Simulate loss of all data in Memcache.
        memcache.flush_all()
        LoadInfo.UpdateLoadInfo('test-instance', 38)

        self.assertEqual(
            {
                'test-instance': {
                    'ip_address': '1.2.3.4',
                    'load': 38,
                    'force': False
                }
            }, LoadInfo.GetAll())
Example #44
0
    def signin(cls, match_id, code):
        match = Match.getone(match_id)

        if datetime.now() < match.signinEarliest:
            logging.debug("You are too early for sign in")
            return {"status": False, "reason": "You are too early for sign in", "code": -1}
        if datetime.now() > match.signinLatest:
            logging.debug("You are too late for sign in")
            logging.debug("Sign in user {}".format(current_user.key_id))
            match.signin(current_user.key_id)

            play = Play.getbyMatchPeople(match_id, current_user.key_id)

            if play is None:
                logging.debug("this guy didn't sign up, but is sign-in now")
                match.signup(current_user.key_id)
                Play.create(current_user.key_id, match_id, missing=True)
            else:
                play.signinTime = datetime.now()
                play.put()
            memcache.flush_all()

            return {"status": False, "reason": "You are too late for sign in", "code": 1}

        people = People.getone(current_user.key_id)
        if people and people.key in match.participatedPeople:
            logging.debug("You have already signed in")
            return {"status": False, "reason": "You have already signed in", "code": 0}

        if match and code == match.signinCode:
            logging.debug("Sign in user {}".format(current_user.key_id))
            match.signin(current_user.key_id)
            play = Play.getbyMatchPeople(match_id, current_user.key_id)

            if play is None:
                logging.debug("this guy didn't sign up, but is sign-in now")
                match.signup(current_user.key_id)
                Play.create(current_user.key_id, match_id, missing=True)
            else:
                play.signinTime = datetime.now()
                play.put()

            memcache.flush_all()

            return {"status": True}
        return {"status": False}
Example #45
0
    def post(self):
        clear = memcache.flush_all()    
        if clear:
            logging.info("Cache cleared")
        else:
            logging.error("Problem clearing cache")

        self.redirect("/admin/")
Example #46
0
    def post(self):
        subject = self.request.get("subject")
        content = self.request.get("content")

        if subject and content:
            p = Post(parent=blog_key(), subject=subject, content=content)
            p.put()
            get_posts(True)
            memcache.flush_all()
            self.redirect('/blog/%s' % str(p.key().id()))
        else:
            error = "subject and content, please!"
            self.render("newpost.html",
                        subject=subject,
                        content=content,
                        error=error,
                        s=self)
Example #47
0
def page_delete_user():
    user_id = request.args.get("user")
    if user_id:
        user = ndb.key.Key(urlsafe=user_id).get()
        user.delete()
        app.logger.info("User deleted")
        memcache.flush_all()

    msg = []
    for user in User.query().fetch():
        msg.append(u"""<a href="{url}?feed={user}">{uid} {email}</a>""".format(
            url=url_for('.page_delete_user'),
            user=user.key.urlsafe(),
            uid=user.key.id(),
            email=user.email))

    return u"<br>".join(msg)
Example #48
0
    def get(self):
        template = JINJA_ENVIRONMENT.get_template('index.html')

        current_user = users.get_current_user()
        db_user = User.query(User.email == current_user.email()).fetch()
        if not db_user:
            new_user = User()
            new_user.email = current_user.email()
            new_user.put()
            memcache.flush_all()

        values = {
            "users": User().query().fetch(),
            "items": Item().query().fetch()
        }

        self.response.write(template.render(values))
  def post(self):
    """Handle modifying actions and/or redirect to GET page."""
    super(MemcacheViewerRequestHandler, self).post()
    next_param = {}

    if self.request.get('action:flush'):
      if memcache.flush_all():
        next_param['message'] = 'Cache flushed, all keys dropped.'
      else:
        next_param['message'] = 'Flushing the cache failed.  Please try again.'

    elif self.request.get('action:display'):
      next_param['key'] = self.request.get('key')

    elif self.request.get('action:edit'):
      next_param['edit'] = self.request.get('key')

    elif self.request.get('action:delete'):
      key = self.request.get('key')
      result = memcache.delete(key)
      if result == memcache.DELETE_NETWORK_FAILURE:
        next_param['message'] = ('ERROR: Network failure, key "%s" not deleted.'
                                 % key)
      elif result == memcache.DELETE_ITEM_MISSING:
        next_param['message'] = 'Key "%s" not in cache.' % key
      elif result == memcache.DELETE_SUCCESSFUL:
        next_param['message'] = 'Key "%s" deleted.' % key
      else:
        next_param['message'] = ('Unknown return value.  Key "%s" might still '
                                 'exist.' % key)

    elif self.request.get('action:save'):
      key = self.request.get('key')
      value = self.request.get('value')
      type_ = self.request.get('type')
      next_param['key'] = key

      converter = self.FRIENDLY_TYPE_NAME_TO_CONVERTER[type_]
      try:
        memcache_value = converter.to_cache(value)
      except ValueError as e:
        next_param['message'] = 'ERROR: Failed to save key "%s": %s.' % (key, e)
      else:
        if self._set_memcache_value(key,
                                    memcache_value,
                                    converter.memcache_type):
          next_param['message'] = 'Key "%s" saved.' % key
        else:
          next_param['message'] = 'ERROR: Failed to save key "%s".' % key
    elif self.request.get('action:cancel'):
      next_param['key'] = self.request.get('key')
    else:
      next_param['message'] = 'Unknown action.'

    next = self.request.path_url
    if next_param:
      next = '%s?%s' % (next, self._urlencode(next_param))
    self.redirect(next)
Example #50
0
def clear(*args, **kwargs):
    """Removes all entities from the datastore.
  """

    # TODO(dbentley): If there are more than 1000 instances of any model,
    # this method will not clear all instances.  Instead, it should continually
    # call .all(), delete all those, and loop until .all() is empty.
    entities = itertools.chain(*[
        Survey.all(),
        SurveyRecord.all(),
        GCIOrganization.all(),
        GSoCTimeline.all(),
        GCITimeline.all(),
        GSoCProgram.all(),
        GSoCProject.all(),
        GSoCProposal.all(),
        GCIProgram.all(),
        GCIScore.all(),
        GSoCStudentInfo.all(),
        GCIStudentInfo.all(),
        GCITask.all(),
        Sponsor.all(),
        Site.all(),
        Document.all(),
        # The below models are all subclasses of ndb.Model and therefore must
        # use .query() to return all instances instead of .all().
        soc_org_model.SOCOrganization.query(),
        profile_model.Profile.query(),
        soc_profile.SOCStudentData.query(),
        user.User.query(),
        address.Address.query(),
        contact.Contact.query()
    ])

    try:
        for entity in entities:
            if isinstance(entity, ndb.Model):
                entity.key.delete()
            else:
                entity.delete()
    except db.Timeout:
        return http.HttpResponseRedirect('#')
    memcache.flush_all()

    return http.HttpResponse('Done')
    def post(self):
        self._require_admin()
        flushed = list()

        if self.request.get("all_keys") == "all_keys":
            memcache.flush_all()
            flushed.append("all memcache values")

        if self.request.get("webcast_keys") == "webcast_keys":
            flushed.append(MemcacheWebcastFlusher.flush())

        if self.request.get('memcache_key') is not "":
            memcache.delete(self.request.get("memcache_key"))
            flushed.append(self.request.get("memcache_key"))

        if self.request.get('event_key') is not "":
            # Clear event page and event teams pages
            event_key = self.request.get('event_key')
            memcache_keys_to_delete = [
                EventDetail().cache_key.format(event_key)
            ]

            event = Event.get_by_id(event_key)
            for team in event.teams:
                memcache_keys_to_delete.append(TeamDetail().cache_key.format(
                    team.key.id(), event.year))
                memcache_keys_to_delete.append(
                    TeamCanonical().cache_key.format(team.key.id()))

            memcache.delete_multi(memcache_keys_to_delete)
            flushed += memcache_keys_to_delete

        if self.request.get('return_url') is not "":
            self.redirect("{}?flushed={}".format(
                self.request.get('return_url'), flushed))
            return

        self.template_values.update({
            "flushed": flushed,
            "memcache_stats": memcache.get_stats(),
        })

        path = os.path.join(os.path.dirname(__file__),
                            '../../templates/admin/memcache_index.html')
        self.response.out.write(template.render(path, self.template_values))
Example #52
0
def store_bot_config(content):
    """Stores a new version of bot_config.py."""
    out = VersionedFile(content=content).store('bot_config.py')
    # Clear the cached version value since it has now changed. This is *super*
    # aggressive to flush all memcache but that's the only safe way to not send
    # old code by accident.
    while not memcache.flush_all():
        pass
    return out
Example #53
0
    def _save(self, cd, project):
        new_name = cd['name'].strip()

        # TODO(joeo): Big race condition here.
        #
        in_use = models.Project.get_project_for_name(new_name)
        if in_use and project.key() != in_use.key():
            self.errors['name'] = ['Name is already in use']

        if self.is_valid():
            project.name = new_name
            project.comment = cd['comment']
            project.owners_users, project.owners_groups = \
              fields.UserGroupField.get_user_and_group_keys(cd['owners'])
            project.set_code_reviews(
                _keys_for(map(_field_to_approval_right, cd['code_reviews'])))
            project.put()
            memcache.flush_all()
Example #54
0
    def test_atom_format(self):
        for test_module in test_facebook, test_instagram, test_twitter:
            self.reset()
            memcache.flush_all()
            self.mox.StubOutWithMock(FakeSource, 'get_actor')
            FakeSource.get_actor(None).AndReturn(test_module.ACTOR)
            self.activities = [copy.deepcopy(test_module.ACTIVITY)]

            # include access_token param to check that it gets stripped
            resp = self.get_response('/fake?format=atom&access_token=foo&a=b')
            self.assertEquals(200, resp.status_int)
            self.assert_multiline_equals(
                test_module.ATOM % {
                    'request_url':
                    'http://localhost/fake?format=atom&amp;access_token=foo&amp;a=b',
                    'host_url': 'http://fa.ke/',
                    'base_url': 'http://fa.ke/',
                }, resp.body)
Example #55
0
    def get(self, table):
        if table in dir(kloombaDb):
            i = 0
            request = GqlQuery('SELECT * FROM ' + table).run()
            for item in request:
                i += 1
                if 'timestamp' in dir(item):
                    kloombaDb.__dict__[table].__dict__['timestamp'].__dict__[
                        'auto_now'] = False
                if 'lastActiveTimestamp' in dir(item):
                    kloombaDb.__dict__[table].__dict__[
                        'lastActiveTimestamp'].__dict__['auto_now'] = False
                item.put()
            memcache.flush_all()

            self.response.headers['Content-Type'] = 'text/plain'
            self.response.out.write("Updated table: %s\nUpdated entitys: %d" %
                                    (table, i))
Example #56
0
    def get(self):

        # enforce rate limit
        if ratelimit(os.environ['REMOTE_ADDR']) is False:
            # logging.warning("ERROR: Daily request quota exceeded for %s" % os.environ['REMOTE_ADDR'])
            self.response.set_status(403)
            self.response.out.write(
                "%s daily requests quota exceeded. If you're interested in a higher quota, contact [email protected]\n"
                % CYCLE_REQ_QUOTA)
            return

        cache_key = "response-%s" % self.request.url  # GAE takes care of version namespacing internally

        url = 'http://' + self.request.get('url').replace(
            'http://',
            '')  # handle 'tinyurl.com' as well as 'http://tinyurl.com'
        callback = self.request.get('callback')
        format = self.request.get('format')
        flush = self.request.get('flushcache')

        if flush == 'now':
            memcache.flush_all()

        if url == 'http://':
            self.homepage()
        else:
            try:
                out = memcache.get(cache_key)
                if out is None:
                    if format == 'json':
                        out = self.json(url, callback)
                    else:
                        out = self.plain(url)

                    memcache.add(cache_key, out)

                self.response.headers.add_header('Access-Control-Allow-Origin',
                                                 '*')
                self.response.out.write(out)

            except Exception, err_msg:
                logging.warning("Exception in get(): %s" % err_msg)
                self.response.set_status(404)
                self.response.out.write('error')
def updateItem(client_id, cart_id, item_id):
    try:
        client_key = ndb.Key(Clients, client_id)
        cart_key = ndb.Key(Carts, cart_id)
        item_key = ndb.Key(Items, item_id)
    except:
        abort(404)
    item = item_key.get()
    item.name = request.json.get('name', item.name)
    item.put()
    client = client_key.get()
    cart = cart_key.get()
    memcache.flush_all()
    return make_response(
        jsonify({
            'Client_Email': client.email,
            'Cart_name': cart.name,
            'updated': item.to_dict()
        }), 200)
Example #58
0
    def get(self):
        """When the user grants access, they are redirected back to this
      handler where their authorized request token is exchanged for a
      long-lived access token."""

        client = gdata.apps.client.AppsClient(domain="wandoujia.com")
        client.ssl = True

        current_user = users.get_current_user()

        # Remember the token that we stashed? Let's get it back from
        # datastore now and adds information to allow it to become an
        # access token.
        request_token_key = 'request_token_%s' % current_user.email()
        request_token = gdata.gauth.ae_load(request_token_key)
        gdata.gauth.authorize_request_token(request_token, self.request.uri)

        # We can now upgrade our authorized token to a long-lived
        # access token by associating it with gdocs client, and
        # calling the get_access_token method.
        client.auth_token = client.get_access_token(request_token)

        # Note that we want to keep the access token around, as it
        # will be valid for all API calls in the future until a user
        # revokes our access. For example, it could be populated later
        # from reading from the datastore or some other persistence
        # mechanism.
        access_token_key = 'access_token_%s' % current_user.email()
        gdata.gauth.ae_save(request_token, access_token_key)

        # Write user info into database

        user_name = current_user.email().split('@')[0]
        logging.info(user_name)
        person = models.Person().get_by_key_name(user_name)
        if not person:
            person = models.Person(gaia=current_user,
                                   email=current_user.email(),
                                   key_name=user_name)
        person.is_setup = True
        person.put()
        memcache.flush_all()
        self.redirect('/')
Example #59
0
def setup_for_testing(require_indexes=True, define_queues=[]):
  """Sets up the stubs for testing.

  Args:
    require_indexes: True if indexes should be required for all indexes.
    define_queues: Additional queues that should be available.
  """
  from google.appengine.api import apiproxy_stub_map
  from google.appengine.api import memcache
  from google.appengine.api import queueinfo
  from google.appengine.datastore import datastore_stub_util
  from google.appengine.tools import old_dev_appserver
  from google.appengine.tools import dev_appserver_index
  before_level = logging.getLogger().getEffectiveLevel()
  try:
    logging.getLogger().setLevel(100)
    root_path = os.path.realpath(os.path.dirname(__file__))
    old_dev_appserver.SetupStubs(
        TEST_APP_ID,
        root_path=root_path,
        login_url='',
        datastore_path=tempfile.mktemp(suffix='datastore_stub'),
        history_path=tempfile.mktemp(suffix='datastore_history'),
        blobstore_path=tempfile.mktemp(suffix='blobstore_stub'),
        require_indexes=require_indexes,
        clear_datastore=False)
    dev_appserver_index.SetupIndexes(TEST_APP_ID, root_path)
    # Actually need to flush, even though we've reallocated. Maybe because the
    # memcache stub's cache is at the module level, not the API stub?
    memcache.flush_all()
  finally:
    logging.getLogger().setLevel(before_level)

  datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
  hr_policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1)
  datastore_stub.SetConsistencyPolicy(hr_policy)

  taskqueue_stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
  taskqueue_stub.queue_yaml_parser = (
      lambda x: queueinfo.LoadSingleQueue(
          'queue:\n- name: default\n  rate: 1/s\n' +
          '\n'.join('- name: %s\n  rate: 1/s' % name
                    for name in define_queues)))
    def setUpClass(cls):
        # Configuring AppEngine Testbed
        cls.testbed = testbed.Testbed()
        cls.testbed.activate()
        cls.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
            probability=1)
        cls.testbed.init_datastore_v3_stub(consistency_policy=cls.policy)
        cls.testbed.init_memcache_stub()

        # Setting Ndb Cache
        ndb.get_context().set_cache_policy(False)
        ndb.get_context().clear_cache()

        # Flushing Memcache
        memcache.flush_all()

        # Creating Creating test objects
        cls.counter_name = 'test-counter'
        cls.counter_name_list = ['test-1', 'test-2', 'test-3']