Пример #1
0
 def delete(self, **kwargs):
     if self.imageid:
         b = BlobInfo.get(self.imageid.split('.')[0])
         if b:
             b.delete()
     if self.apkkey:
         b = BlobInfo.get(self.apkkey)
         if b:
             b.delete()
     for imguri in self.imagelist:
         b = BlobInfo.get(imguri.split('.')[0])
         if b:
             b.delete()
     super(Plugin, self).delete(**kwargs)
     pluginCount = PluginCount.get_or_insert('plugin_count')
     pluginCount.num -= 1
     pluginCount.put()
     memcache.delete('allplugincount')
     memcache.delete('appnamelist')
     memcache.delete('pluginid%s' % self.key().id())
     memcache.delete('user_applist_%s' % (self.username))
     l = []
     for i in range(0, pluginCount.num % 30):
         l.append('applist__%s' % i)
     l.append('applist__%s' % len(l))
     memcache.delete_multi(l)
Пример #2
0
 def delete(self):
     try:
         query = _AppEngineUtilities_SessionData.all()
         query.filter(u"session_key = ", self.session_key)
         results = query.fetch(1000)
         db.delete(results)
         db.delete(self)
         memcache.delete_multi(
             [
                 u"_AppEngineUtilities_Session_" + unicode(self.session_key),
                 u"_AppEngineUtilities_SessionData_" + unicode(self.session_key),
             ]
         )
     except:
         mc = memcache.get(u"_AppEngineUtilities_Session_" + unicode(self.session_key))
         if mc:
             mc.deleted = True
         else:
             # not in the memcache, check to see if it should be
             query = _AppEngineUtilities_Session.all()
             query.filter(u"sid = ", self.sid)
             results = query.fetch(1)
             if len(results) > 0:
                 results[0].deleted = True
                 memcache.set(u"_AppEngineUtilities_Session_" + unicode(session_key), results[0])
Пример #3
0
    def delete(self):
        """
        Deletes a session and all it's associated data from the datastore and
        memcache.

        Returns True
        """
        try:
            query = _AppEngineUtilities_SessionData.all()
            query.filter(u"session = ", self)
            results = query.fetch(1000)
            db.delete(results)
            db.delete(self)
            memcache.delete_multi([u"_AppEngineUtilities_Session_%s" % \
                (str(self.key())), \
                u"_AppEngineUtilities_SessionData_%s" % \
                (str(self.key()))])
        except:
            mc = memcache.get(u"_AppEngineUtilities_Session_%s" % \
                (str(self.key())))
            if mc:
                mc.deleted = True
            else:
                # not in the memcache, check to see if it should be
                query = _AppEngineUtilities_Session.all()
                query.filter(u"sid = ", self.sid)
                results = query.fetch(1)
                if len(results) > 0:
                    results[0].deleted = True
                    memcache.set(u"_AppEngineUtilities_Session_%s" % \
                        (unicode(self.key())), results[0])
        return True
  def testGet(self):
    sharded_cache.Set('foo', SMALL_CONTENT)
    data = sharded_cache.Get('foo')
    self.assertEqual(len(SMALL_CONTENT), len(data))
    self.assertEqual(SMALL_CONTENT, data)

    # No shards have been evicted.
    sharded_cache.Set('foo', LARGE_CONTENT)
    data = sharded_cache.Get('foo')
    self.assertEqual(len(LARGE_CONTENT), len(data))
    self.assertEqual(LARGE_CONTENT, data)

    # Shard map was evicted.
    sharded_cache.Set('foo', LARGE_CONTENT)
    memcache.delete(sharded_cache.MEMCACHE_PREFIX + 'foo')
    self.assertEqual(None, sharded_cache.Get('foo'))

    # 1 content shard was evicted.
    sharded_cache.Set('foo', LARGE_CONTENT)
    memcache.delete(sharded_cache.MEMCACHE_PREFIX + 'foo1')
    self.assertEqual(None, sharded_cache.Get('foo'))
    # The shard map and unevicted shards should be deleted.
    cache_keys = ['foo', 'foo0', 'foo1', 'foo2', 'foo3']
    memcache_keys = [sharded_cache.MEMCACHE_PREFIX + key for key in cache_keys]
    content = memcache.get_multi(memcache_keys)
    self.assertFalse(any(content))

    # All content shards were evicted.
    sharded_cache.Set('foo', LARGE_CONTENT)
    cache_keys = ['foo0', 'foo1', 'foo2']
    memcache_keys = [sharded_cache.MEMCACHE_PREFIX + key for key in cache_keys]
    memcache.delete_multi(memcache_keys)
    self.assertEqual(None, sharded_cache.Get('foo'))
Пример #5
0
 def clear_properties_cache(cls, name):
     keys = list()
     keys.extend(
         CrashReport.recent_crash_property_key(name, key) for key in
         ['date_time', 'state', 'labels', 'issue', 'argv']
     )
     memcache.delete_multi(keys=keys)
Пример #6
0
def user_activate(request, level=1):
	if request.method == "POST":
		try:
			order = Messages_Orders.get_by_id(int(request.POST['activate']))
			if order.destination != request.user:
				return HttpResponseRedirect('/user/activate/%d' % int(level))
			order.activated = True
			order.save()
			if order.source.Orders_out.filter('source =', order.source).filter('activated =', True).filter('level =',
			                                                                                               order.level).count() >= 3:
				order.source.level += 1
				order.source.save()
				if order.source.level == 1:
					memcache.delete_multi([m.count1level])
				if order.source.level > 1:
					memcache.delete_multi([m.count1level, m.count2level])
				mail.send_mail(sender="*****@*****.**",
				               to="%s <%s>" % (order.source.first_name, order.source.email),
				               subject='You has level up',
				               body='You has level up, your new level is %s' % order.source.level
				)
			return HttpResponseRedirect('/user/activate/%d' % int(level))
		except:
			pass
		try:
			order = Messages_Orders.get_by_id(int(request.POST['deactivate']))
			if order.destination != request.user:
				return HttpResponseRedirect('/user/activate/%d' % int(level))
			order.delete()
			return HttpResponseRedirect('/user/activate/%d' % int(level))
		except:
			return HttpResponse('ert')
	return {
		'orders': request.user.Orders_in.filter('level =', int(level)).filter('activated =', False),
		'level': level}
 def invalidate(instances):
     '''Removes all cached copies of the given GraphShard instances.
        @param instances: A collection of GraphShard instances
     '''
     for instance in instances:
         GraphShard._graph_cache[instance._parsed_memcache_key()] = GraphShard._not_found
     memcache.delete_multi([instance._parsed_memcache_key() for instance in instances])
 def memcacheFlush(self, event_key):
     keys = [
         self._render_cache_key(self.CACHE_KEY_FORMAT.format(event_key, n))
         for n in range(10)
     ]
     memcache.delete_multi(keys)
     return keys
Пример #9
0
def invalidate_keys(keys):
    """
    Removes the given entities from memcache.
    """

    memcache.incr(INVALIDATIONS, delta=len(keys), initial_value=0)
    memcache.delete_multi([key.urlsafe() for key in keys], key_prefix=RTC)
Пример #10
0
 def CleanPageCache():
     #clean all cache
     keylist = []
     for s in PageCacheStat.all():
         keylist.append(s.cachekey)
     memcache.delete_multi(list(set(keylist)))
     db.delete(PageCacheStat.all())
Пример #11
0
    def _post_put_hook(self, future):
        """Clear the cache of entries maching these keywords."""

        memcache.delete_multi(self.keywords, key_prefix=FTS)
        utils.invalidate_keys([self.key])

        return super(FullTextMixin, self)._post_put_hook(future)
Пример #12
0
def create_post(form):
    """
	Creates and saves a new post.
	When a post is created, it's put at cache (by key) and the cache for all posts/drafts
	is cleaned.
	"""
    slug = slugify(form["slug"]) if (len(form["slug"]) > 0) else slugify(form["title"])
    tags = __strip_tags(form["tags"].split(",")) if (len(form["tags"]) > 0) else []
    striped = strip_html_code(form["content"])
    desc = form["desc"]
    as_draft = form.has_key("draft")
    html = bbcode_to_html(striped)
    post = Post(
        title=form["title"],
        slug=slug,
        tags=tags,
        desc=desc,
        author=users.get_current_user(),
        coded_content=striped,
        html_content=html,
        as_draft=as_draft,
    )
    post.put()
    memcache.set(str(post.key()), post)
    memcache.delete_multi(["all_posts_10", "all_drafts_10"])
    update_sitemap()
    twit_post(str(post.key()))
    return post
Пример #13
0
	def delete(self, *args, **kwargs):
		# We need to get the key before the instance is deleted.
		cache_keys = ['%s:%s' % (x, self.key_name) for x in self._memo_prefixes]
		cache_keys.insert(0, self.key_name)
		memcache.delete_multi(cache_keys)

		return super(ModelCaching, self).delete(*args, **kwargs)
Пример #14
0
def flush(id):
  """Flushes all ACL's for the specified account.
  """

  key_prefix = '%s.' % id
  # pylint: disable=E1101
  memcache.delete_multi(RIGHTS, key_prefix=key_prefix)
Пример #15
0
 def invalidate(instances):
     """Removes all cached copies of the given GraphShard instances.
        @param instances: A collection of GraphShard instances
     """
     for instance in instances:
         GraphShard._graph_cache[instance._parsed_memcache_key()] = GraphShard._not_found
     memcache.delete_multi([instance._parsed_memcache_key() for instance in instances])
Пример #16
0
 def CleanPageCache():
     #clean all cache
     keylist = []
     for s in PageCacheStat.all():
         keylist.append(s.cachekey)
     memcache.delete_multi(list(set(keylist)))
     db.delete(PageCacheStat.all())
Пример #17
0
 def post(self):
     #删除需要同步的股票di缓存
     gupiaogroupid=self.request.get('needdelgroupid','')
     if gupiaogroupid:
         kl=gupiaogroupid.split(',')
         memcache.delete_multi(kl)
     return
Пример #18
0
 def delete_photos_cache(albumname, photonames, types=["photo","thumb"]):
     cache_keys = []
     for t in types:
         for name in photonames:
             key = "photo_cache_%s_%s_%s" % (t, albumname, name)
             cache_keys.append(key)
     memcache.delete_multi(cache_keys)
Пример #19
0
def Get(key):
    """Get a memcache entry, or None."""
    key = MEMCACHE_PREFIX + key
    shard_map = memcache.get(key)
    if not shard_map:
        # The shard_map was evicted or never set.
        return

    # If zero shards, the content was small enough and stored in the shard_map.
    num_shards = shard_map['num_shards']
    if num_shards == 0:
        return pickle.loads(shard_map['content'])

    keys = ['%s%d' % (key, i) for i in range(num_shards)]
    shards = memcache.get_multi(keys)
    if len(shards) != num_shards:
        # One or more content shards were evicted, delete map and content shards.
        memcache.delete_multi([key] + keys)
        return

    # All shards present, stitch contents back together and unpickle.
    shards = tuple([shards[key] for key in keys])
    value = '%s' * shard_map['num_shards']
    value = pickle.loads(value % shards)
    return value
Пример #20
0
 def write(self, key, iterable):
     super(DataStoreRepository, self).write(key, iterable)
     parent_cache_keys = []
     for i in xrange(1, len(key)):
         parent_key = key[:i]
         if not self.exists(parent_key):
             Slot(depth=len(parent_key),
                  key=make_db_key(parent_key),
                  blob=None).put()
     delete_multi(parent_cache_keys, namespace='slot')
     delete_multi(parent_cache_keys, namespace='list')
     size = 0
     cache_value_buffer = []
     for chunk in iterable:
         size += len(chunk)
         cache_value_buffer.append(chunk)
         if size >= CACHE_BYTES_LIMIT:
             break
     else:
         cache_key = make_cache_key(key)
         cache_value = ''.join(itertools.chain('F', cache_value_buffer))
         put(cache_key, cache_value, namespace='slot')
         defer(put_slot, key, cache_value_buffer)
         return
     iterable = itertools.chain(cache_value_buffer, iterable)
     put_slot(key, iterable)
Пример #21
0
 def _delete_tasklet(self, todo):
   assert todo
   by_options = {}
   delete_keys = []  # For memcache.delete_multi()
   for fut, key, options in todo:
     if self._use_memcache(key, options):
       delete_keys.append(key.urlsafe())
     if options in by_options:
       futures, keys = by_options[options]
     else:
       futures, keys = by_options[options] = [], []
     futures.append(fut)
     keys.append(key)
   if delete_keys:  # Pre-emptively delete from memcache.
     memcache.delete_multi(delete_keys, seconds=_LOCK_TIME,
                           key_prefix=self._memcache_prefix)
   for options, (futures, keys) in by_options.iteritems():
     datastore_keys = []
     for key in keys:
       if self._use_datastore(key, options):
         datastore_keys.append(key)
     if datastore_keys:
       yield self._conn.async_delete(options, datastore_keys)
     for fut in futures:
       fut.set_result(None)
Пример #22
0
    def _post_put_hook(self, future):
        """Clear the cache of entries maching these keywords."""

        memcache.delete_multi(self.keywords, key_prefix=FTS)
        utils.invalidate_keys([self.key])

        return super(FullTextMixin, self)._post_put_hook(future)
Пример #23
0
	def get_api(self):
		memcache_key_prefix = 'refresh_item_api_'
		memcache_keys = ['page', 'pages']

		page_info = memcache.get_multi(memcache_keys, key_prefix = memcache_key_prefix)
		if 'page' not in page_info or 'pages' not in page_info:
			page = pages = 1
		else:
			page, pages = page_info['page'], page_info['pages']

		put_rpcs = []
		while page <= pages:
			memcache.set_multi({'page' : page, 'pages' : pages}, key_prefix = memcache_key_prefix)
			logging.info('Getting page = %d / %d ...' % (page, pages))
			method = '/simple/auctions.list'
			content = fetch_api(method, {'defs' : 1, 'per_page' : 1000, 'page' : page})
			auctions_info = parse_json(content, None)
			page = auctions_info['page'] + 1
			pages = auctions_info['pages']
			items_info = auctions_info['items']
			for item_info in items_info.itervalues():
				item_def = item_info['item_def']
				image = Image(key_name = item_def['class_tsid'], url = item_def['iconic_url'],
						width = 40, height = 40)
				put_rpcs.append(put_async(image))
				item = Item(key_name = item_def['class_tsid'], category = item_def['category'],
						name = item_def['name_single'], image = image)
				put_rpcs.append(put_async(item))
		memcache.delete_multi(memcache_keys, key_prefix = memcache_key_prefix)
		return put_rpcs
Пример #24
0
def flush(id):
  """Flushes all ACL's for the specified account.
  """

  key_prefix = '%s.' % id
  # pylint: disable=E1101
  memcache.delete_multi(RIGHTS, key_prefix=key_prefix)
Пример #25
0
 def _post_put_hook(self, future):
     match = future.get_result().get()
     memcache.delete_multi([
         '[MatchAll]', '[BestBet]True', '[BestBet]False',
         '[MatchId]' + str(match.matchid), '[StageName]' + match.stage,
         '[TeamName]' + match.team_a, '[TeamName]' + match.team_b
     ])
Пример #26
0
 def _post_put_hook(self, future):
     bet = future.get_result().get()
     memcache.delete_multi([
         '[BetAll]', '[BestBet]True', '[BestBet]False',
         '[BetMatchid]' + str(bet.bet_match_id), '[BetUserId]' + bet.userid,
         '[BetMatchIdUserId]' + str(bet.bet_match_id) + ':' + bet.userid
     ])
Пример #27
0
 def delete(cls, data):
     keys = ['a/'+data.slug, 'a$ten', 'a$archive', 'xml$atom', 'xml$rss', 'xml$sitemap']
     for tag in data.keyword.split():
         keys.append('a$keyword/' + tag)
     memcache.delete_multi(keys)
     db.delete(data)
     return data
Пример #28
0
 def write(self, key, iterable):
     super(DataStoreRepository, self).write(key, iterable)
     parent_cache_keys = []
     for i in xrange(1, len(key)):
         parent_key = key[:i]
         if not self.exists(parent_key):
             Slot(
                 depth=len(parent_key),
                 key=make_db_key(parent_key),
                 blob=None
             ).put()
     delete_multi(parent_cache_keys, namespace='slot')
     delete_multi(parent_cache_keys, namespace='list')
     size = 0
     cache_value_buffer = []
     for chunk in iterable:
         size += len(chunk)
         cache_value_buffer.append(chunk)
         if size >= CACHE_BYTES_LIMIT:
             break
     else:
         cache_key = make_cache_key(key)
         cache_value = ''.join(itertools.chain('F', cache_value_buffer))
         put(cache_key, cache_value, namespace='slot')
         defer(put_slot, key, cache_value_buffer)
         return
     iterable = itertools.chain(cache_value_buffer, iterable)
     put_slot(key, iterable)
Пример #29
0
    def after_put(self, *args, **kwargs):
        if self.tasklist:
            # Tasklist might not always be present; it is if created via
            # create(), but not if fetched from the datastore.
            self.tasklist.put()

        # Reset memcache for cached properties of related objects.
        # This relationship is "down" so there may be many keys to clear so
        # don't try to actually refresh the cached values, just set up a cache
        # miss for their next read and they'll recover.
        to_delete = []

        for pc in model.ProjectCohort.get(n=float('inf'), project_id=self.uid):
            # These keys are for individual project cohort entities.
            to_delete.append(util.cached_properties_key(pc.uid))
            # These are for caches of whole query results.
            kwargs = {
                'program_label': pc.program_label,
                'cohort_label': pc.cohort_label
            }
            to_delete.append(util.cached_query_key('SuperDashboard', **kwargs))
            taskqueue.add(
                url='/task/cache_dashboard',
                headers={'Content-Type': 'application/json; charset=utf-8'},
                payload=json.dumps(kwargs),
                countdown=config.task_consistency_countdown,
            )
        # Also clear the dashboard's organization query.
        to_delete.append(
            util.cached_query_key('SuperDashboard',
                                  organization_id=self.organization_id))

        memcache.delete_multi(to_delete)
def SetPublicEndpointStatus(api_query, status=None):
  """Change the public endpoint status of an API Query.

  Args:
    api_query: The API Query to change
    status: The status to change the API Query to. If status=None then the
            status of the API Query will be toggled.

  Returns:
    True if status change was successful, False otherwise.
  """
  if api_query and status in (None, True, False):
    if not status:
      api_query.is_active = not api_query.is_active
    else:
      api_query.is_active = status

    if api_query.is_active is False:
      api_query.is_scheduled = False

    try:
      api_query.put()
      memcache.delete_multi(['api_query'] + co.SUPPORTED_FORMATS.keys(),
                            key_prefix=str(api_query.key()))
      return True
    except db.TransactionFailedError:
      return False
  return False
Пример #31
0
def Get(key):
  """Get a memcache entry, or None."""
  key = MEMCACHE_PREFIX + key
  shard_map = memcache.get(key)
  if not shard_map:
    # The shard_map was evicted or never set.
    return

  # If zero shards, the content was small enough and stored in the shard_map.
  num_shards = shard_map['num_shards']
  if num_shards == 0:
    return pickle.loads(shard_map['content'])

  keys = ['%s%d' % (key, i) for i in range(num_shards)]
  shards = memcache.get_multi(keys)
  if len(shards) != num_shards:
    # One or more content shards were evicted, delete map and content shards.
    memcache.delete_multi([key] + keys)
    return

  # All shards present, stitch contents back together and unpickle.
  shards = tuple([shards[key] for key in keys])
  value = '%s' * shard_map['num_shards']
  value = pickle.loads(value % shards)
  return value
Пример #32
0
 def delete(self):
     keys = ['melody_' + self.label, str(self.key())]
     if 'demo' == self.label:
         keys.append('melody_demo_%s' % self.url)
     memcache.delete_multi(keys)
     db.delete(self)
     return self
Пример #33
0
def main(request):
    user = users.get_current_user()
    if not user:
        return direct_to_template(request, template = 'homepage.html', extra_context = {'login_url' : users.create_login_url(request.get_full_path())})

    if request.method == 'POST':
        return_msg = {}
        try:
            date = datetime.date.fromtimestamp(time.mktime(time.strptime(request.POST['date'], '%Y-%m-%d')))
            tracker = WeightTracker.all().filter('user = '******'date = ', date).get()
            if tracker:
                tracker.weight = float(request.POST['weight'])
                tracker.put()
            else:
                tracker = WeightTracker(weight = float(request.POST['weight']), date = date)
                tracker.put()
            return_msg['error'] = 0
            return_msg['msg'] = 'Data was saved succesfully'
            return_msg['weight'] = str(tracker.weight)
        except ValueError, e:
            return_msg['error'] = 1
            return_msg['msg'] = 'The date was not in the correct format'
        
        #Delete all cache data whenever a new entry is made
        memcache.delete_multi([CHART_DATA_CACHE_KEY(), BMI_CACHE_KEY()])
        if request.is_ajax():
            return HttpResponse(simplejson.dumps(return_msg), mimetype = 'application/json')
        else:
            return HttpResponseRedirect('/')
Пример #34
0
def delete(key):
    chunk_keys = memcache.get(key)
    if chunk_keys is None:
        return False
    chunk_keys.append(key)
    memcache.delete_multi(chunk_keys)
    return True
Пример #35
0
def SetPublicEndpointStatus(api_query, status=None):
    """Change the public endpoint status of an API Query.

  Args:
    api_query: The API Query to change
    status: The status to change the API Query to. If status=None then the
            status of the API Query will be toggled.

  Returns:
    True if status change was successful, False otherwise.
  """
    if api_query and status in (None, True, False):
        if not status:
            api_query.is_active = not api_query.is_active
        else:
            api_query.is_active = status

        if api_query.is_active is False:
            api_query.is_scheduled = False

        try:
            api_query.put()
            memcache.delete_multi(['api_query'] + co.SUPPORTED_FORMATS.keys(),
                                  key_prefix=str(api_query.key()))
            return True
        except db.TransactionFailedError:
            return False
    return False
Пример #36
0
  def post(self):
    if not requires_registered(self):
      return

    if not self.request.get('nickname') and not self.request.get('avatar'):
      self.redirect('/profile')
      return

    # clear cached parts where nicknames and photos may appear
    memcache.delete_multi([ "ranks", "matches_home_admin", "matches_home", "comments" ])

    # update nickname
    if self.request.get('nickname'):
      models.update_nickname(users.get_current_user(), self.request.get('nickname'))

    # upload image if relevant
    if self.request.get('avatar'):
      try:
        avatar = images.resize(self.request.get("avatar"), 80, 80)
      except images.Error:
        self.redirect('/profile')
        return

      models.update_avatar(users.get_current_user(), avatar)

    self.redirect('/profile')
Пример #37
0
 def _delete_tasklet(self, todo):
     assert todo
     by_options = {}
     delete_keys = []  # For memcache.delete_multi()
     for fut, key, options in todo:
         if self._use_memcache(key, options):
             delete_keys.append(key.urlsafe())
         if options in by_options:
             futures, keys = by_options[options]
         else:
             futures, keys = by_options[options] = [], []
         futures.append(fut)
         keys.append(key)
     if delete_keys:  # Pre-emptively delete from memcache.
         memcache.delete_multi(delete_keys,
                               seconds=_LOCK_TIME,
                               key_prefix=self._memcache_prefix)
     for options, (futures, keys) in by_options.iteritems():
         datastore_keys = []
         for key in keys:
             if self._use_datastore(key, options):
                 datastore_keys.append(key)
         if datastore_keys:
             yield self._conn.async_delete(options, datastore_keys)
         for fut in futures:
             fut.set_result(None)
Пример #38
0
def clear_sponsorships_cache():
	t = tournament.get_tournament()
	memcache.delete_multi(["%s/all" % t.name,
						   "%s/Double Eagle" % t.name,
						   "%s/Hole in One" % t.name,
						   "%s/Eagle" % t.name,
						   "%s/Birdie" % t.name,
						   "%s/Angel" % t.name])
Пример #39
0
def get_multi_test():

    memcache.set_multi(DATA, 30, key_prefix='memcache_get_multi_test')
    now = time()
    memcache.get_multi(MULTI_KEYS, key_prefix='memcache_get_multi_test')
    result = time() - now
    memcache.delete_multi(MULTI_KEYS, key_prefix='memcache_get_multi_test')
    return result
Пример #40
0
 def save(self):
     self.last_modified = datetime.datetime.now()
     self.put()
     obsolete = [
         'archive', 'all_articles', 'archive_output', 'feed_output',
         'index', 'index_output', 'writer_articles', 'writer_urls'
     ]
     memcache.delete_multi(obsolete)
Пример #41
0
def del_titles():
    try:
        emails = get_recent_emails()
        keys = ['model\ttitles\t%s' % email
                for email in emails + ['None']]
        memcache.delete_multi(keys)
    except:
        return None
Пример #42
0
    def _clear_relevant_memcache(cls, urls):
        if not urls:
            return

        memcache_keys = [cls._generate_memcache_key(url) for url in urls]
        for memcache_key in memcache_keys:
            logging.info("Dropped memcache key: " + memcache_key)
        memcache.delete_multi(memcache_keys)
Пример #43
0
    def _clear_relevant_memcache(cls, urls):
        if not urls:
            return

        memcache_keys = [cls._generate_memcache_key(url) for url in urls]
        for memcache_key in memcache_keys:
            logging.info("Dropped memcache key: " + memcache_key)
        memcache.delete_multi(memcache_keys)
Пример #44
0
 def memcacheFlush(self):
     year = datetime.datetime.now().year
     keys = [
         self.CACHE_KEY_FORMAT.format(year, True),
         self.CACHE_KEY_FORMAT.format(year, False)
     ]
     memcache.delete_multi(keys)
     return keys
Пример #45
0
def del_titles():
    try:
        emails = get_recent_emails()
        keys = ['model\ttitles\t%s' % email
                for email in emails + ['None']]
        memcache.delete_multi(keys)
    except:
        return None
Пример #46
0
 def delete(self):
     keys = ['a_atom', 'a_rss', 'a_sitemap', 'a_all', 'a_show']
     keys.append('a_kw_%s' % self.keyword)
     keys.append('a_slug_%s' % self.slug)
     keys.append(str(self.key()))
     memcache.delete_multi(keys)
     db.delete(self)
     return self
Пример #47
0
    def post(self, author_slug):

        blog = self.blog
        author = None
        if author_slug:
            author = model.BlogAuthor.get_by_id(author_slug, parent=blog.key)
            if not author:
                return self.renderError(404)

        form_data, errors, valid_data = self.validate()

        if errors:
            return self.redisplay(
                form_data, errors,
                self.blog_url + '/admin/author/' + author_slug)

        name = valid_data["name"]
        slug = model.makeSlug(name, blog, model.BlogAuthor, author)
        if author:
            # if the name is different, remake the entity since the key name needs to change
            if name != author.name:
                # get lists of other entities to update (must be outside of transaction since they're a non-ancestor query)
                posts = list(author.posts)
                comments = list(author.comments)

                # update all the posts and comments referencing the author at the same time that a new author object is created
                def author_transaction(author, slug, blog, ref_lists):
                    # re-create the author object
                    author = model.makeNew(
                        author,
                        id=slug,
                        parent=blog.key,
                        use_transaction=False)  # no nested transactions
                    # update the others to reference the new object
                    for ref_list in ref_lists:
                        new_objects = []
                        for ref_object in ref_list:
                            ref_object.author = author
                            new_objects.append(ref_object)
                        model.db.put(new_objects)
                    return author

                author = model.db.run_in_transaction(author_transaction,
                                                     author, slug, blog,
                                                     [posts, comments])
            author.populate(**valid_data)
        else:
            author = model.BlogAuthor(id=slug, parent=blog.key, **valid_data)

        author.put()

        memcache.delete_multi(getCacheKeys(blog))

        if blog.authors.count() > 1:
            self.redirect(self.blog_url + '/admin/authors')

        else:
            self.redirect(self.blog_url + '/admin')
    def _Dynamic_Commit(self, transaction, transaction_response):
        """Intercepts the commit of transactions and deletes all entities that were modified/delete by this transaction"""
        # We delete from cache before we commit otherwise we have a race condition.
        to_delete = self.local.to_delete[transaction.handle()]
        if to_delete:
            memcache.delete_multi(to_delete)
        del self.local.to_delete[transaction.handle()]

        self.CallWrappedStub('Commit', transaction, transaction_response)
Пример #49
0
def gaeDelete(key):
    chunk_keys = gaeMemcache.get(key)
    if chunk_keys is None:
        return False
    if isinstance(chunk_keys, _STR_BASE):
        chunk_keys = []
    chunk_keys.append(key)
    gaeMemcache.delete_multi(chunk_keys)
    return True
 def delete_multi(cls, names):
   '''
     This function deletes multiple counters at once
     Args:
       name : list of counter names to be deleted
   '''
   counter_id_list = cls._get_multi_memcache_ids(names)
   ndb.delete_multi([ndb.Key(cls, cid) for cid in counter_id_list])
   memcache.delete_multi(counter_id_list)
Пример #51
0
def clear_article_memcache(id=None, url=None):
	keys = ['get_articles_for_feed', 'get_articles_for_homepage']
	if id:
		keys.append('get_article_by_id:%s' % id)
		tenjin.helpers.fragment_cache.store.delete('article:%s' % id)
	if url:
		keys.append('get_article_by_url:%s' % hash(url))
	memcache.delete_multi(keys)
	yui.flush_all_server_cache()
Пример #52
0
 def delete(cls, *mkeys):
     """
     Deletes the memcache keys both from the local cache and from
     memcache. Use this instead of calling memcache.delete() directly.
     """
     for mkey in mkeys:
         if cls.fetched_data.has_key(mkey):
             del cls.fetched_data[mkey]
     memcache.delete_multi(mkeys)
Пример #53
0
def clear_blog_entries_cache():
    keys = [C_BLOG_ENTRIES_KEYS, C_BLOG_COUNT, C_BLOG_TOP]

    # add one key per page for get_blog_entries_page and get_blog_entries_keys_page
    for p in range(1,
                   get_blog_count() / models.BlogEntry.ENTRIES_PER_PAGE + 2):
        keys.extend([C_BLOG_ENTRIES_PAGE % p, C_BLOG_ENTRIES_KEYS_PAGE % p])

    memcache.delete_multi(keys)
Пример #54
0
def ExecuteApiQueryTask(api_query):
  """Executes a refresh of an API Query from the task queue.

    Attempts to fetch and update an API Query and will also log any errors.
    Schedules the API Query for next execution.

  Args:
    api_query: The API Query to refresh.

  Returns:
    A boolean. True if the API refresh was a success and False if the API
    Query is not valid or an error was logged.
  """
  if api_query:
    query_id = str(api_query.key())
    api_query.in_queue = False

    api_response_content = FetchApiQueryResponse(api_query)

    if not api_response_content or api_response_content.get('error'):
      InsertApiQueryError(api_query, api_response_content)

      if api_query.is_error_limit_reached:
        api_query.is_scheduled = False

      SaveApiQuery(api_query)

      # Since it failed, execute the query again unless the refresh interval of
      # query is less than the random countdown, then schedule it normally.
      if api_query.refresh_interval < co.MAX_RANDOM_COUNTDOWN:
        schedule_helper.ScheduleApiQuery(api_query)  # Run at normal interval.
      else:
        schedule_helper.ScheduleApiQuery(api_query, randomize=True, countdown=0)
      return False

    else:
      SaveApiQueryResponse(api_query, api_response_content)

      # Check that public  endpoint wasn't disabled after task added to queue.
      if api_query.is_active:
        memcache.set_multi({'api_query': api_query,
                            co.DEFAULT_FORMAT: api_response_content},
                           key_prefix=query_id,
                           time=api_query.refresh_interval)
        # Delete the transformed content in memcache since it will be updated
        # at the next request.
        delete_keys = set(co.SUPPORTED_FORMATS) - set([co.DEFAULT_FORMAT])
        memcache.delete_multi(list(delete_keys), key_prefix=query_id)

        SaveApiQuery(api_query)
        schedule_helper.ScheduleApiQuery(api_query)
        return True

      # Save the query state just in case the user disabled it
      # while it was in the task queue.
      SaveApiQuery(api_query)
  return False
Пример #55
0
 def _pre_put_hook(self):
     if hasattr(self, 'key'):
         origins_key = origins_cache_key(self.__class__, self.destination)
         destinations_key = destinations_cache_key(self.__class__,
                                                   self.origin)
         keys = [
             origins_key, destinations_key, IMG_CACHE_PREFIX + origins_key,
             IMG_CACHE_PREFIX + destinations_key
         ]
         memcache.delete_multi(keys)
Пример #56
0
 def _delete_tasklet(self, todo):
   assert todo
   keys = set(key for (_, key) in todo)
   yield self._conn.async_delete(None, keys)
   for fut, _ in todo:
     fut.set_result(None)
   # Now update memcache.
   memkeys = [key.urlsafe() for key in keys if self.should_memcache(key)]
   if memkeys:
     memcache.delete_multi(memkeys)