示例#1
0
    def test_repair_missing_markers_on_non_default_namespace(self):
        self.i3 = TestModel.objects.using("ns1").create(id=self.i1.pk, name="name1", counter1=1, counter2=1)
        self.i4 = TestModel.objects.using("ns1").create(id=self.i2.pk, name="name3", counter1=1, counter2=2)
        NS1_NAMESPACE = settings.DATABASES["ns1"]["NAMESPACE"]

        instance_key = datastore.Key.from_path(TestModel._meta.db_table, self.i2.pk, namespace=DEFAULT_NAMESPACE)
        instance_key_ns1 = datastore.Key.from_path(TestModel._meta.db_table, self.i2.pk, namespace=NS1_NAMESPACE)
        marker = "{}|name:{}".format(TestModel._meta.db_table, md5(self.i2.name).hexdigest())
        marker_key_default = datastore.Key.from_path(UniqueMarker.kind(), marker, namespace=DEFAULT_NAMESPACE)
        marker_key_ns1 = datastore.Key.from_path(UniqueMarker.kind(), marker, namespace=NS1_NAMESPACE)
        datastore.Delete(marker_key_ns1)
        datastore.Delete(marker_key_default)

        UniqueAction.objects.create(action_type="repair", model=encode_model(TestModel), db="ns1")
        process_task_queues()

        a = UniqueAction.objects.get()
        self.assertEqual(a.status, "done")

        # Is the missing marker for the default namespace left alone?
        self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get, marker_key_default)
        # Is the missing marker restored?
        marker = datastore.Get(marker_key_ns1)
        self.assertTrue(marker)
        self.assertTrue(isinstance(marker["instance"], datastore.Key))
        self.assertEqual(instance_key_ns1, marker["instance"])
        self.assertTrue(marker["created"])
示例#2
0
    def execute(self):
        table = self.table
        query = datastore.Query(table,
                                keys_only=True,
                                namespace=self.namespace)
        while query.Count():
            datastore.Delete(query.Run())

        # Delete the markers we need to
        from djangae.db.constraints import UniqueMarker
        query = datastore.Query(UniqueMarker.kind(),
                                keys_only=True,
                                namespace=self.namespace)
        query["__key__ >="] = datastore.Key.from_path(UniqueMarker.kind(),
                                                      self.table,
                                                      namespace=self.namespace)
        query["__key__ <"] = datastore.Key.from_path(UniqueMarker.kind(),
                                                     u"{}{}".format(
                                                         self.table,
                                                         u'\ufffd'),
                                                     namespace=self.namespace)
        while query.Count():
            datastore.Delete(query.Run())

        # TODO: ideally we would only clear the cached objects for the table that was flushed, but
        # we have no way of doing that
        memcache.flush_all()
        caching.get_context().reset()
示例#3
0
    def map(entity, model, *args, **kwargs):
        """ The Clean mapper maps over all UniqueMarker instances. """

        alias = kwargs.get("db", "default")
        namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE", "")

        model = decode_model(model)
        if not entity.key().id_or_name().startswith(model._meta.db_table + "|"):
            # Only include markers which are for this model
            return

        assert namespace == entity.namespace()
        with disable_cache():
            # At this point, the entity is a unique marker that is linked to an instance of 'model', now we should see if that instance exists!
            instance_id = entity["instance"].id_or_name()
            try:
                instance = model.objects.using(alias).get(pk=instance_id)
            except model.DoesNotExist:
                logger.info("Deleting unique marker %s because the associated instance no longer exists", entity.key().id_or_name())
                datastore.Delete(entity)
                return

            # Get the possible unique markers for the entity, if this one doesn't exist in that list then delete it
            instance_entity, _ = django_instance_to_entities(connections[alias], instance._meta.fields, raw=True, instance=instance, check_null=False)
            identifiers = unique_identifiers_from_entity(model, instance_entity, ignore_pk=True)
            identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=entity["instance"].namespace()) for i in identifiers]
            if entity.key() not in identifier_keys:
                logger.info("Deleting unique marker %s because the it no longer represents the associated instance state", entity.key().id_or_name())
                datastore.Delete(entity)
示例#4
0
    def DeleteBlob(self, blob_key):
        """Delete blob data from the datastore.

    Args:
      blob_key: Blob-key of existing blob to delete.
    Raises:
      ApplicationError: When a blob is not found or unable to be read.  
    """
        blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
                                                str(blob_key),
                                                namespace='')
        try:
            blob_info = datastore.Get(blob_info_key)
        except datastore_errors.EntityNotFoundError:
            raise apiproxy_errors.ApplicationError(
                blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)

        block_count = blob_info["size"] / blobstore.MAX_BLOB_FETCH_SIZE
        block_set = []
        try:
            while block_count >= 0:
                entity = datastore.Entity(_BLOB_CHUNK_KIND_,
                                          name=str(blob_key) + "__" +
                                          str(block_count),
                                          namespace='')
                block_set.append(entity)
                block_count -= 1
            datastore.Delete(block_set)
            datastore.Delete(blob_info_key)
        except:
            raise apiproxy_errors.ApplicationError(
                blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
    def DeleteBlob(cls, blobkey, storage):
        """Delete a blob.

    Args:
      blobkey: blobkey in str.
      storage: blobstore storage stub.
    """
        datastore.Delete(cls.ToDatastoreBlobKey(blobkey))

        blobinfo = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND,
                                                 blobkey,
                                                 namespace='')
        datastore.Delete(blobinfo)
        storage.DeleteBlob(blobkey)
示例#6
0
    def execute(self):
        self.select.execute()

        # This is a little bit more inefficient than just doing a keys_only query and
        # sending it to delete, but I think this is the sacrifice to make for the unique caching layer
        keys = []

        def spawn_query(kind, key):
            qry = Query(
                kind, namespace=key.namespace() or None
            )  # TODO: is the namespace necessary if we're passing the key?
            qry["__key__ ="] = key
            return qry

        queries = [
            spawn_query(x.key().kind(), x.key()) for x in self.select.results
        ]
        if not queries:
            return

        for entity in QueryByKeys(self.model, queries, [],
                                  self.namespace).Run():
            keys.append(entity.key())

            # Delete constraints if that's enabled
            if constraints.constraint_checks_enabled(self.model):
                constraints.release(self.model, entity)

        caching.remove_entities_from_cache_by_key(keys, self.namespace)
        datastore.Delete(keys)
    def testDatastoreTypes(self):
        """Puts and gets different basic datastore types."""

        entity = datastore.Entity('TestKind')

        entity.update({
            'rating':
            datastore_types.Rating(1),
            'category':
            datastore_types.Category('bugs'),
            'key':
            datastore_types.Key.from_path('foo', 'bar'),
            'user':
            users.User('*****@*****.**'),
            'text':
            datastore_types.Text('some text'),
            'blob':
            datastore_types.Blob('data'),
            'bytestring':
            datastore_types.ByteString('data'),
            'im':
            datastore_types.IM('http://example.com/', 'Larry97'),
            'geopt':
            datastore_types.GeoPt(1.1234, -1.1234),
            'email':
            datastore_types.Email('*****@*****.**'),
            'blobkey':
            datastore_types.BlobKey('27f5a7'),
        })

        datastore.Put(entity)
        e = datastore.Get(entity)
        datastore.Delete(entity)
示例#8
0
    def test_clean_removes_markers_with_different_values_on_non_default_namespace(self):
        self.i3 = TestModel.objects.using("ns1").create(id=self.i1.pk, name="name1", counter1=1, counter2=1)
        self.i4 = TestModel.objects.using("ns1").create(id=self.i2.pk, name="name3", counter1=1, counter2=2)

        NS1_NAMESPACE = settings.DATABASES["ns1"]["NAMESPACE"]

        marker1 = "{}|name:{}".format(TestModel._meta.db_table, md5(self.i3.name).hexdigest())
        marker_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=NS1_NAMESPACE)
        default_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=DEFAULT_NAMESPACE)
        original_marker = datastore.Get(marker_key)
        default_marker = datastore.Get(default_key)

        marker2 = "{}|name:{}".format(TestModel._meta.db_table, md5("bananas").hexdigest())
        new_marker = datastore.Entity(UniqueMarker.kind(), name=marker2, namespace=NS1_NAMESPACE)
        new_marker.update(original_marker)
        datastore.Put(new_marker)

        # This allows us to test: 1) namespaced markers will check against their namespace models (not all of them)"
        self.i1.delete()
        #... 2) the mapper only cleans the desired namespace
        datastore.Put(default_marker)

        UniqueAction.objects.create(action_type="clean", model=encode_model(TestModel), db="ns1")
        process_task_queues()

        self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get, new_marker.key())
        self.assertTrue(datastore.Get(default_marker.key()))
        self.assertTrue(datastore.Get(marker_key))
        datastore.Delete(default_marker)
    def post(self):
        kind = self.request.get('kind')

        keys = []
        index = 0
        num_keys = int(self.request.get('numkeys'))
        for i in xrange(1, num_keys + 1):
            key = self.request.get('key%d' % i)
            if key:
                keys.append(key)

        if self.request.get('action') == 'Delete':
            num_deleted = 0

            for key in keys:
                datastore.Delete(datastore.Key(key))
                num_deleted = num_deleted + 1
            message = '%d entit%s deleted.' % (num_deleted,
                                               ('ies', 'y')[num_deleted == 1])
            self.redirect(
                '%s&msg=%s' %
                (self.request.get('next'), urllib.quote_plus(message)))
            return

        self.error(404)
示例#10
0
 def flush(self):
     """Helper function to remove the current datastore and re-open the stubs"""
     if self.remote:
         import random, string
         code = ''.join([random.choice(string.ascii_letters) for x in range(4)])
         print '\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
         print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
         print "Warning! You're about to delete the *production* datastore!"
         print 'Only models defined in your INSTALLED_APPS can be removed!'
         print 'If you want to clear the whole datastore you have to use the ' \
               'datastore viewer in the dashboard. Also, in order to delete all '\
               'unneeded indexes you have to run appcfg.py vacuum_indexes.'
         print 'In order to proceed you have to enter the following code:'
         print code
         response = raw_input('Repeat: ')
         if code == response:
             print 'Deleting...'
             from django.db import models
             from google.appengine.api import datastore as ds
             for model in models.get_models():
                 print 'Deleting %s...' % model._meta.db_table
                 while True:
                     data = ds.Query(model._meta.db_table, keys_only=True).Get(200)
                     if not data:
                         break
                     ds.Delete(data)
             print "Datastore flushed! Please check your dashboard's " \
                   'datastore viewer for any remaining entities and remove ' \
                   'all unneeded indexes with manage.py vacuum_indexes.'
         else:
             print 'Aborting'
             exit()
     else:
         destroy_datastore(*self._get_paths())
     self._setup_stubs()
示例#11
0
    def execute(self):
        table = self.table
        query = datastore.Query(table, keys_only=True)
        while query.Count():
            datastore.Delete(query.Run())

        # Delete the markers we need to
        from djangae.db.constraints import UniqueMarker
        query = datastore.Query(UniqueMarker.kind(), keys_only=True)
        query["__key__ >="] = datastore.Key.from_path(UniqueMarker.kind(), self.table)
        query["__key__ <"] = datastore.Key.from_path(UniqueMarker.kind(), u"{}{}".format(self.table, u'\ufffd'))
        while query.Count():
            datastore.Delete(query.Run())

        cache.clear()
        clear_context_cache()
  def __Increment(self, nodes_with_children, score_entities,
                  score_entities_to_delete):
    """Changes child counts for given nodes.

    This method will create nodes as needed.

    Args:
      nodes_with_children: A dict of (node_key, child) tuples to deltas
      score_entities: Additional score entities to persist as part of
        this transaction
    Returns:
      None
    """
    keys = list(set(key for ((key, _), delta) in nodes_with_children.iteritems()
                    if delta != 0))
    if not keys:
      return  # Nothing to do
    nodes = datastore.Get(keys)

    node_dict = {}
    for (key, node) in zip(keys, nodes):
      if not node:
        node = datastore.Entity("ranker_node", parent=self.rootkey,
                                name=key.name())
        node["child_counts"] = [0] * self.branching_factor
      node_dict[key] = node
    for ((key, child), amount) in nodes_with_children.iteritems():
      if amount != 0:
        node = node_dict[key]
        node["child_counts"][child] += amount
        assert node["child_counts"][child] >= 0
    datastore.Put(node_dict.values() + score_entities)
    if score_entities_to_delete:
      datastore.Delete(score_entities_to_delete)
示例#13
0
    def execute(self):
        table = self.table
        query = datastore.Query(table, keys_only=True)
        while query.Count():
            datastore.Delete(query.Run())

        cache.clear()
示例#14
0
文件: db.py 项目: Xeon2003/server
def Delete(keys, **kwargs):
    """
		Deletes one or more entities from the data store.

		:warning: Permanently deletes entities, use with care!

		Deletes the given entity or entities from the data store. You can only delete
		entities from your app. If there is an error, the function raises a
		subclass of :exc:`datastore_errors.Error`.

		:param keys: Key, str or list of keys or strings to be deleted.
		:type keys: Key | str | list of Key | list of str

		:param config: Optional configuration to use for this request. This must be specified\
		as a keyword argument.
		:type config: dict

		:raises: :exc:`TransactionFailedError`, if the deletion could not be committed.
	"""
    if conf["viur.db.caching"] > 0:
        if isinstance(keys, datastore_types.Key) or isinstance(
                keys, basestring):  #Just one:
            memcache.delete(str(keys),
                            namespace=__CacheKeyPrefix__,
                            seconds=__cacheLockTime__)
        elif isinstance(keys, list):
            for key in keys:
                assert isinstance(key, datastore_types.Key) or isinstance(
                    key, basestring)
                memcache.delete(str(key),
                                namespace=__CacheKeyPrefix__,
                                seconds=__cacheLockTime__)
    return (datastore.Delete(keys, **kwargs))
示例#15
0
    def execute(self):
        self.select.execute()

        # This is a little bit more inefficient than just doing a keys_only query and
        # sending it to delete, but I think this is the sacrifice to make for the unique caching layer
        keys = []

        def spawn_query(kind, key):
            qry = Query(kind)
            qry["__key__ ="] = key
            return qry

        queries = [spawn_query(self.select.db_table, x.key()) for x in self.select.results]
        if not queries:
            return

        for entity in QueryByKeys(self.select.model, queries, []).Run():
            keys.append(entity.key())

            # Delete constraints if that's enabled
            if constraints.constraint_checks_enabled(self.select.model):
                constraints.release(self.select.model, entity)

            caching.remove_entity_from_cache_by_key(entity.key())
        datastore.Delete(keys)
示例#16
0
    def post(self):
        kind = self.request.get('kind')
        entity_key = self.request.get('key')
        if entity_key:
            if self.request.get('action') == 'Delete':
                datastore.Delete(datastore.Key(entity_key))
                self.redirect(self.request.get('next'))
                return
            entity = datastore.Get(datastore.Key(entity_key))
        else:
            entity = datastore.Entity(kind)

        args = self.request.arguments()
        for arg in args:
            bar = arg.find('|')
            if bar > 0:
                data_type_name = arg[:bar]
                field_name = arg[bar + 1:]
                form_value = self.request.get(arg)
                data_type = DataType.get_by_name(data_type_name)
                if entity and entity.has_key(field_name):
                    old_formatted_value = data_type.format(entity[field_name])
                    if old_formatted_value == ustr(form_value):
                        continue

                if len(form_value) > 0:
                    value = data_type.parse(form_value)
                    entity[field_name] = value
                elif entity.has_key(field_name):
                    del entity[field_name]

        datastore.Put(entity)

        self.redirect(self.request.get('next'))
示例#17
0
    def run(self, context):
        # This code builds leaf categories for selection with complete names, 3.8k of them.
        if not isinstance(self.cfg, dict):
            self.cfg = {}
        update_file_path = self.cfg.get('file', None)
        debug_environment = self.cfg.get('debug_environment', False)
        if not update_file_path:
            raise orm.TerminateAction()
        Category = context.models['24']
        gets = datastore.Query('24', namespace=None, keys_only=True).Run()
        keys = list(gets)
        datastore.Delete(keys)
        categories = []
        put_entities = []
        structure = {}
        with file(update_file_path) as f:
            for line in f:
                if not line.startswith('#'):
                    item = line.replace('\n', '')
                    categories.append(item)
                    full_path = item.split(' > ')
                    current_structure = structure
                    for xi, path in enumerate(full_path):
                        if path not in current_structure:
                            current_structure[path] = {}
                        current_structure = current_structure[path]

        for i, item in enumerate(categories):
            full_path = item.split(' > ')
            path_map = structure
            current = full_path
            parent = current[:-1]
            category = {}
            category['id'] = hashlib.md5(''.join(current)).hexdigest()
            if parent:
                category['parent_record'] = Category.build_key(
                    hashlib.md5(''.join(parent)).hexdigest())
            else:
                category['parent_record'] = None
            category['name'] = ' / '.join(current)
            category['state'] = ['indexable']
            leaf = False
            for path in full_path:
                if path in path_map:
                    path_map = path_map[path]
                if not len(path_map):
                    leaf = True
            if leaf:
                category['state'].append(
                    'visible')  # marks the category as leaf
            category = Category(**category)
            category._use_rule_engine = False
            category._use_record_engine = False
            category._use_memcache = False
            category._use_cache = False
            put_entities.append(category)
        tools.log.debug('Writing %s categories' % len(put_entities))
        orm.put_multi(put_entities)
示例#18
0
        def Dispatch(self, request, outfile, base_env_dict=None):
            """Handle post dispatch.

      This dispatcher will handle all uploaded files in the POST request, store
      the results in the blob-storage, close the upload session and transform
      the original request in to one where the uploaded files have external
      bodies.

      Returns:
        New AppServerRequest indicating request forward to upload success
        handler.
      """
            if base_env_dict['REQUEST_METHOD'] != 'POST':
                outfile.write('Status: 400\n\n')
                return

            upload_key = re.match(UPLOAD_URL_PATTERN,
                                  request.relative_url).group(1)
            try:
                upload_session = datastore.Get(upload_key)
            except datastore_errors.EntityNotFoundError:
                upload_session = None

            if upload_session:
                success_path = upload_session['success_path']

                upload_form = cgi.FieldStorage(fp=request.infile,
                                               headers=request.headers,
                                               environ=base_env_dict)

                try:
                    mime_message_string = self.__cgi_handler.GenerateMIMEMessageString(
                        upload_form)
                    datastore.Delete(upload_session)
                    self.current_session = upload_session

                    header_end = mime_message_string.find('\n\n') + 1
                    content_start = header_end + 1
                    header_text = mime_message_string[:header_end]
                    content_text = mime_message_string[content_start:]

                    complete_headers = ('%s'
                                        'Content-Length: %d\n'
                                        '\n') % (header_text,
                                                 len(content_text))

                    return dev_appserver.AppServerRequest(
                        success_path,
                        None,
                        mimetools.Message(
                            cStringIO.StringIO(complete_headers)),
                        cStringIO.StringIO(content_text),
                        force_admin=True)
                except dev_appserver_upload.InvalidMIMETypeFormatError:
                    outfile.write('Status: 400\n\n')
            else:
                logging.error('Could not find session for %s', upload_key)
                outfile.write('Status: 404\n\n')
    def DeleteMultiple(self, node_indexes):
        """Delete multiple nodes at once.

    Args:
      node_indexes: [node index, ...]  # where node_index is an integer
    """
        db_nodes = datastore.Delete([
            self._RankerNodeKey(node_index) for node_index in set(node_indexes)
        ])
示例#20
0
 def delete_all(self, kind, batch_size=1000):
     from google.appengine.api import datastore
     count = 0
     query = datastore.Query(kind=kind, keys_only=True)
     results = query.Get(batch_size)
     while results:
         datastore.Delete(results)
         count += len(results)
         results = query.Get(batch_size)
     return count
示例#21
0
    def syncDeletedEntity(self, key):
        """Delete entity.

        :param string key: The remote key.
        """

        sync_info = SyncInfo.get_by_key_name(key)
        datastore.Delete([sync_info.target_key(), sync_info.key()])

        return {"status": ENTITY_DELETED}
示例#22
0
    def execute(self):
        table = self.table
        query = datastore.Query(table, keys_only=True)
        while query.Count():
            datastore.Delete(query.Run())

        cache.clear()

        from .caching import clear_context_cache
        clear_context_cache()
示例#23
0
  def removeAssociation(self, server_url, handle):
    """
    This method removes the matching association if it's found, and returns
    whether the association was removed or not.
    """
    query = datastore.Query('Association',
                            {'url =': server_url, 'handle =': handle})

    results = query.Get(1)
    if results:
      datastore.Delete(results[0].key())
示例#24
0
    def DeleteUrlBase(self, request, response):
        """Trivial implementation of ImagesService::DeleteUrlBase.

    Args:
      request: ImagesDeleteUrlBaseRequest, contains a blobkey to an image.
      response: ImagesDeleteUrlBaseResonse - currently unused.
    """
        key = datastore.Key.from_path(BLOB_SERVING_URL_KIND,
                                      request.blob_key(),
                                      namespace="")
        datastore.Delete(key)
示例#25
0
    def tearDown(self):
        """Clean up."""

        query = blobstore.BlobInfo.all()
        cursor = query.fetch(10)

        for b in cursor:
            key = datastore_types.Key.from_path('__BlobInfo__', str(b.key()))
            datastore.Delete(key)

        os.unlink(self.datastore_path)
示例#26
0
 def respond(self):
   models = io.Engine.get_schema()
   kinds = ['0', '6', '83', '5', '35', '36', '62', '61', '39', '38', '60', '8', '57', '77', '10', '15', '16', '17', '18', '19', '49', '47']
   namespaces = metadata.get_namespaces()
   indexes = []
   keys_to_delete = []
   if self.request.get('kinds'):
     kinds = self.request.get('kinds').split(',')
   
   util.log('DELETE KINDS %s' % kinds)
   
   ignore = ['15', '16', '17', '18', '19']
   @orm.tasklet
   def wipe(kind):
     util.log(kind)
     @orm.tasklet
     def generator():
       model = models.get(kind)
       if model and not kind.startswith('__'):
         keys = yield model.query().fetch_async(keys_only=True)
         keys_to_delete.extend(keys)
         indexes.append(search.Index(name=kind))
         for namespace in namespaces:
           util.log(namespace)
           keys = yield model.query(namespace=namespace).fetch_async(keys_only=True)
           keys_to_delete.extend(keys)
           indexes.append(search.Index(name=kind, namespace=namespace))
     yield generator()
   if self.request.get('delete'):
     futures = []
     for kind in kinds:
       if kind not in ignore:
         futures.append(wipe(kind))
     orm.Future.wait_all(futures)
   if self.request.get('and_system'):
     futures = []
     for kind in kinds:
       if kind in ignore:
         futures.append(wipe(kind))
     orm.Future.wait_all(futures)
   if keys_to_delete:
     datastore.Delete([key.to_old_key() for key in keys_to_delete])
   indexes.append(search.Index(name='catalogs'))
   # empty catalog index!
   for index in indexes:
     while True:
       document_ids = [document.doc_id for document in index.get_range(ids_only=True)]
       if not document_ids:
         break
       try:
         index.delete(document_ids)
       except:
         pass
   mem.flush_all()
示例#27
0
    def DeleteUrlBase(self, request, unused_response):
        """Trivial implementation of an API call.

    Args:
      request: ImagesDeleteUrlBaseRequest - Contains a blobkey to an image.
      unused_response: ImagesDeleteUrlBaseResponse - Unused.
    """
        key = datastore.Key.from_path(BLOB_SERVING_URL_KIND,
                                      request.blob_key(),
                                      namespace='')
        datastore.Delete(key)
示例#28
0
def deleteUserData(id):
    # memcache.delete(key="MakerSecret-"+id)
    # memcache.delete(key="USER-"+id)
    # dashid=memcache.get(key="User-dash-"+id)
    # memcache.delete(key = "User-dash-"+id)
    # memcache.delete(key = "Dash-user-"+str(dashid))

    try:
        key = Key.from_path('UserData', id)
        entity = datastore.Delete(key)
    except:
        logging.debug(id + u"は登録されていません。")
示例#29
0
    def _Dynamic_DeleteBlob(self, request, response):
        """Delete a blob by its blob-key.

        Delete a blob from the blobstore using its blob-key.  Deleting blobs
        that do not exist is a no-op.

        Args:
            request: A fully initialized DeleteBlobRequest instance.
            response: Not used but should be a VoidProto.
        """
        for blob_key in request.blob_key_list():
            key = datastore_types.Key.from_path('__BlobInfo__', str(blob_key))
            self.__storage.DeleteBlob(key)
            datastore.Delete(key)
示例#30
0
def flush_task_markers():
    """ Delete all ShardedTaskMarker objects from the DB.
        Useful to call in setUp(), as Django doesn't wipe this kind because there's
        no model for it.
    """
    namespaces = set()
    namespaces.add(settings.DATABASES['default'].get('NAMESPACE', ''))
    namespaces.add(settings.DATABASES.get('ns1', {}).get('NAMESPACE', ''))

    for namespace in namespaces:
        query = datastore.Query(ShardedTaskMarker.KIND,
                                namespace=namespace,
                                keys_only=True).Run()
        datastore.Delete([x for x in query])