def testKindsRendering(self): """Test list of kinds renders in list_actions with supplied list.""" entity_kinds = ['test1', 'test2'] for kind in entity_kinds: datastore.Put(datastore.Entity(kind)) self.handler.request.set('kind', entity_kinds) self.handler.get() response = self.handler.response.out.getvalue() kind_list = [stat['kind_name'] for stat in self.params['kind_stats']] self.assertListEqual(kind_list, entity_kinds)
def post(self, project_id, entity_key_string=None): """ Handles mutations to a given entity. Args: project_id: A string specifying the project ID. entity_key_string: A string specifying the entity key. """ self.ensure_user_has_admin(project_id) ds_access = DatastoreDistributed(project_id, DATASTORE_LOCATION, trusted=True) if self.request.get('action:delete'): if entity_key_string: _delete_entities(ds_access, [datastore.Key(entity_key_string)]) redirect_url = self.request.get( 'next', '/datastore_viewer/{}'.format(project_id)) self.redirect(str(redirect_url)) else: self.response.set_status(400) return if entity_key_string: entity = _get_entity_by_key(ds_access, datastore.Key(entity_key_string)) else: kind = self.request.get('kind') namespace = self.request.get('namespace', None) entity = datastore.Entity(kind, _namespace=namespace, _app=project_id) for arg_name in self.request.arguments(): # Arguments are in <property_type>|<property_name>=<value> format. if '|' not in arg_name: continue data_type_name, property_name = arg_name.split('|') form_value = self.request.get(arg_name) data_type = DataType.get_by_name(data_type_name) if (entity and property_name in entity and data_type.format(entity[property_name]) == form_value): # If the property is unchanged then don't update it. This will prevent # empty form values from causing the property to be deleted if the # property was already empty. continue # TODO: Handle parse exceptions. entity[property_name] = data_type.parse(form_value) _put_entity(ds_access, entity) redirect_url = self.request.get( 'next', '/datastore_viewer/{}'.format(project_id)) self.redirect(str(redirect_url))
def setUp(self): datastore_batch = appscale_datastore_batch.DatastoreFactory.getDatastore(DB) self.app_datastore = datastore_server.DatastoreDistributed(datastore_batch) self.entities = [] prev = None for ii in range(0,3): entity = datastore.Entity("TestKind", _app="test", name = str(ii), parent = prev, namespace='e') prev = entity # have properties with different values bye same property names entity.update({'aaa': "1111_" + str(ii), 'bbb': "2222"}) self.entities.append(entity.ToPb()) self.entities2 = [] prev = None for ii in range(0,3): entity = datastore.Entity("TestKind", _app="test", name = str(ii), parent = prev, namespace='e') prev = entity # have properties with different values bye same property names entity.update({'aaa': "x111_" + str(ii), 'bbb': "x222"}) self.entities2.append(entity.ToPb()) tuples = sorted((self.app_datastore.GetTablePrefix(x), x) for x in self.entities) # keys should be the same for entities and entities2 self.keys = self.app_datastore.GetIndexKVFromTuple(tuples, reverse=False) self.keys = [x[0] for x in self.keys] tuples = sorted((self.app_datastore.GetTablePrefix(x), x) for x in self.entities2) # keys should be the same for entities and entities2 self.keys2 = self.app_datastore.GetIndexKVFromTuple(tuples, reverse=False) self.keys2 = [x[0] for x in self.keys2]
def Query(self, entities, query, filters, orders): """Perform a query on this pseudo-kind. Args: entities: all the app's entities. query: the original datastore_pb.Query. filters: the filters from query. orders: the orders from query. Returns: (results, remaining_filters, remaining_orders) results is a list of entity_pb.EntityProto remaining_filters and remaining_orders are the filters and orders that should be applied in memory """ namespace_range = datastore_stub_util.ParseNamespaceQuery( query, filters, orders) app_str = query.app() namespaces = set() for app_namespace, _ in entities: (app_id, namespace) = datastore_types.DecodeAppIdNamespace(app_namespace) if app_id == app_str and namespace_range.Contains(namespace): namespaces.add(namespace) namespace_entities = [] for namespace in namespaces: if namespace: namespace_e = datastore.Entity(self.name, name=namespace, _app=query.app()) else: namespace_e = datastore.Entity( self.name, id=datastore_types._EMPTY_NAMESPACE_ID, _app=query.app()) namespace_entities.append(namespace_e._ToPb()) return (namespace_entities, [], [])
def __ComputeScoreDeltas(self, scores): """Compute which scores have to be incremented and decremented. Args: scores: A dict mapping entity names to scores Returns: A tuple (score_deltas, score_entities, score_entities_to_delete). 'score_deltas' is a dict, mapping scores (represented as tuples) to integers. 'score_deltas[s]' represents how many times the score 's' has to be incremented (or decremented). 'score_entities' is a list of 'ranker_score' entities that have to be updated in the same transaction as modifying the ranker nodes. The entities already contain the updated score. Similarly, 'score_entities_to_delete' is a list of entities that have to be deleted in the same transaction as modifying the ranker nodes. """ score_keys = [self.__KeyForScore(score) for score in scores] old_scores = {} for old_score in datastore.Get(score_keys): if old_score: old_scores[old_score.key().name()] = old_score score_deltas = {} # Score entities to update score_ents = [] score_ents_del = [] for score_name, score_value in scores.iteritems(): if score_name in old_scores: score_ent = old_scores[score_name] if score_ent["value"] == score_value: continue # No change in score => nothing to do old_score_key = tuple(score_ent["value"]) score_deltas.setdefault(old_score_key, 0) score_deltas[old_score_key] -= 1 else: score_ent = datastore.Entity("ranker_score", parent=self.rootkey, name=score_name) if score_value: score_key = tuple(score_value) score_deltas.setdefault(score_key, 0) score_deltas[score_key] += 1 score_ent["value"] = score_value score_ents.append(score_ent) else: # Do we have to delete an old score entity? if score_name in old_scores: score_ents_del.append(old_scores[score_name]) return (score_deltas, score_ents, score_ents_del)
def PopulateEntities(self): """Insert entities for metadata queries into the datastore.""" for ns in MetadataTest.NAMESPACES: namespace_manager.set_namespace(ns) foo_e = datastore.Entity('Foo') foo_e['num'] = 1 foo_e['data'] = None datastore.Put(foo_e) bar_e = datastore.Entity('Bar', unindexed_properties=['data']) bar_e['str'] = 'yeah' bar_e['data'] = 'gasp!' datastore.Put(bar_e) baz1_e = datastore.Entity('Baz') baz1_e['fun'] = True datastore.Put(baz1_e) baz2_e = datastore.Entity('Baz') baz2_e['fun'] = 'string' datastore.Put(baz2_e) namespace_manager.set_namespace('')
def map(instance, *args, **kwargs): """ Figure out what markers the instance should use and verify they're attached to this instance. Log any weirdness and in repair mode - recreate missing markers. """ action_id = kwargs.get("action_pk") repair = kwargs.get("repair") entity = django_instance_to_entity(connection, type(instance), instance._meta.fields, raw=True, instance=instance, check_null=False) identifiers = unique_identifiers_from_entity(type(instance), entity, ignore_pk=True) identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i) for i in identifiers] markers = datastore.Get(identifier_keys) instance_key = str(entity.key()) markers_to_save = [] for i, m in zip(identifier_keys, markers): marker_key = str(i) if m is None: # Missig marker if repair: new_marker = datastore.Entity(UniqueMarker.kind(), name=i.name()) new_marker['instance'] = entity.key() new_marker['created'] = datetime.datetime.now() markers_to_save.append(new_marker) else: log(action_id, "missing_marker", instance_key, marker_key) elif 'instance' not in m or not m['instance']: # Marker with missining instance attribute if repair: m['instance'] = entity.key() markers_to_save.append(m) else: log(action_id, "missing_instance", instance_key, marker_key) elif m['instance'] != entity.key(): if isinstance(m['instance'], basestring): m['instance'] = datastore.Key(m['instance']) if repair: markers_to_save.append(m) else: log(action_id, "old_instance_key", instance_key, marker_key) if m['instance'] != entity.key(): # Marker already assigned to a different instance log(action_id, "already_assigned", instance_key, marker_key) # Also log in repair mode as reparing would break the other instance. if markers_to_save: datastore.Put(markers_to_save)
def testDefaultClass(self): """Tests that the root class is used when missing discriminator.""" class MyRoot(polymodel.PolyModel): pass entity = datastore.Entity('MyRoot') datastore.Put(entity) (reloaded, ) = list(MyRoot.all()) self.assertIsInstance(reloaded, MyRoot) (reloaded, ) = list(MyRoot.gql('')) self.assertIsInstance(reloaded, MyRoot)
def CreateStatEntity(self, kind, kind_name=None, property_type=None, property_name=None, subject_namespace=None, composite_index_id=None, has_entity_bytes=None, has_builtin_index_stats=None, has_composite_index_stats=None): """Create a single Statistic datastore entity. Args: kind: The name of the kind to store. kind_name: The value of the 'kind_name' property to set on the entity. property_type: The value of the 'property_type' property to set on the entity. property_name: The value of the 'property_name' property to set on the entity. subject_namespace: The namespace for NamespaceStat entities. composite_index_id: The index id of composite index. has_entity_bytes: The stat has the entity_bytes property. has_builtin_index_stats: The stat entity has builtin_index_bytes and builtin_index_count. has_composite_index_stats: The stat entity has composite_index_bytes and composite_index_count. """ stat = datastore.Entity(kind) stat['bytes'] = 4 stat['count'] = 2 stat['timestamp'] = datetime.datetime.utcfromtimestamp(40) if has_entity_bytes: stat['entity_bytes'] = 2 if has_builtin_index_stats: stat['builtin_index_count'] = 3 stat['builtin_index_bytes'] = 1 if has_composite_index_stats: stat['composite_index_count'] = 2 stat['composite_index_bytes'] = 1 if kind_name is not None: stat['kind_name'] = kind_name if property_type is not None: stat['property_type'] = property_type if property_name is not None: stat['property_name'] = property_name if subject_namespace is not None: stat['subject_namespace'] = subject_namespace if composite_index_id is not None: stat['index_id'] = composite_index_id datastore.Put(stat)
def setUp(self): datastore_batch = appscale_datastore_batch.DatastoreFactory.getDatastore(DB) self.app_datastore = datastore_server.DatastoreDistributed(datastore_batch) self.entities = [] self.keys = [] self.entities2 = [] self.keys2 = [] prev = None for ii in range(0,3): entity = datastore.Entity(kind="ATestKind", _app="test", name = str(ii), parent = prev, namespace='f') prev = entity # have properties with different values bye same property names entity.update({'aaa': "1111_" + str(ii), 'bbb': "2222"}) self.entities.append(entity.ToPb()) self.keys = [e.key() for e in self.entities] self.app_datastore.PutEntities(self.entities) prev = None for ii in range(0,3): entity = datastore.Entity(kind="BTestKind", _app="test", name = str(ii), parent = prev, namespace='f') prev = entity # have properties with different values bye same property names entity.update({'aaa': "1111_" + str(ii), 'bbb': "2222"}) self.entities2.append(entity.ToPb()) self.keys2 = [e.key() for e in self.entities2] self.app_datastore.PutEntities(self.entities2)
def __create_instance(self, input_dict, bulkload_state): """Return a model instance or entity from an input_dict. Args: input_dict: Neutral input dictionary describing a single input record. bulkload_state: bulkload_state object describing the global state. Returns: Entity or model instance, or collection of entity or model instances, to be uploaded. """ key = None if self._create_key: key = self.__dict_to_prop(self._create_key, input_dict, bulkload_state) if isinstance(key, int): key = datastore.Key.from_path(self._transformer_spec.kind, key) if self._transformer_spec.model: if isinstance(key, datastore.Key): return self._transformer_spec.model(key=key) else: return self._transformer_spec.model(key_name=key) else: if isinstance(key, datastore.Key): parent = key.parent() if key.name() is None: return datastore.Entity(self._transformer_spec.kind, parent=parent, id=key.id()) else: return datastore.Entity(self._transformer_spec.kind, parent=parent, name=key.name()) elif self._transformer_spec.model: return self._transformer_spec.model() return datastore.Entity(self._transformer_spec.kind, name=key)
def save_queues_to_db(self): """ Stores file queue information into the datastore. Raises: ValueError: If queue info has not been set. """ if not self._queue_info_file: raise ValueError("Queue info must be set before saving the queues") json_queues = json.dumps(self._queue_info_file) entity = datastore.Entity(self.QUEUE_KIND, name=self._app_id, _app=self.APPSCALE_QUEUES) entity.update({self.QUEUE_INFO: datastore_types.Blob(json_queues), self.APP_NAME: datastore_types.ByteString(self._app_id)}) datastore.Put(entity)
def testIncludeNotInStats(self): """Test kinds which are not present in stats.""" entity_kinds = ['test1', 'test2'] stat_test_kinds = ['test1', 'test3'] for kind in entity_kinds: datastore.Put(datastore.Entity(kind)) self.CreateStatEntity(stats.GlobalStat.STORED_KIND_NAME) for kind in stat_test_kinds: self.CreateStatEntity(stats.KindStat.STORED_KIND_NAME, kind) self.handler.request.set('kind', ['test1', 'test2', 'test3']) self.handler.get() kind_list = [stat['kind_name'] for stat in self.params['kind_stats']] self.assertListEqual(entity_kinds, kind_list)
def StoreBlob(self, form_item, creation): """Store form-item to blob storage. Args: form_item: FieldStorage instance that represents a specific form field. This instance should have a non-empty filename attribute, meaning that it is an uploaded blob rather than a normal form field. creation: Timestamp to associate with new blobs creation time. This parameter is provided so that all blobs in the same upload form can have the same creation date. Returns: datastore.Entity('__BlobInfo__') associated with the upload. """ main_type, sub_type = _SplitMIMEType(form_item.type) blob_key = self.__generate_blob_key() blob_file = form_item.file if 'Content-Transfer-Encoding' in form_item.headers: if form_item.headers['Content-Transfer-Encoding'] == 'base64': blob_file = cStringIO.StringIO( base64.urlsafe_b64decode(blob_file.read())) self.__blob_storage.StoreBlob(blob_key, blob_file) content_type_formatter = base.MIMEBase(main_type, sub_type, **form_item.type_options) blob_entity = datastore.Entity('__BlobInfo__', name=str(blob_key), namespace='') blob_entity['content_type'] = ( content_type_formatter['content-type'].decode('utf-8')) blob_entity['creation'] = creation blob_entity['filename'] = form_item.filename.decode('utf-8') blob_file.seek(0) digester = hashlib.md5() while True: block = blob_file.read(1 << 20) if not block: break digester.update(block) blob_entity['md5_hash'] = digester.hexdigest() blob_entity['size'] = blob_file.tell() blob_file.seek(0) datastore.Put(blob_entity) return blob_entity
def __entity_for_mongo_document(self, document): key = self.__key_for_id(document.pop("_id")) entity = datastore.Entity(kind=key.kind(), parent=key.parent(), name=key.name()) for k in document.keys(): v = self.__create_value_for_mongo_value(document[k]) entity[k] = v pb = entity._ToPb() # no decent way to initialize an Entity w/ an existing key... if not key.name(): pb.key().path().element_list()[-1].set_id(key.id()) return pb
def test_clean_removes_markers_with_different_values_on_non_default_namespace( self): self.i3 = TestModel.objects.using("ns1").create(id=self.i1.pk, name="name1", counter1=1, counter2=1) self.i4 = TestModel.objects.using("ns1").create(id=self.i2.pk, name="name3", counter1=1, counter2=2) NS1_NAMESPACE = settings.DATABASES["ns1"]["NAMESPACE"] marker1 = "{}|name:{}".format(TestModel._meta.db_table, md5(self.i3.name).hexdigest()) marker_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=NS1_NAMESPACE) default_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=DEFAULT_NAMESPACE) original_marker = datastore.Get(marker_key) default_marker = datastore.Get(default_key) marker2 = "{}|name:{}".format(TestModel._meta.db_table, md5("bananas").hexdigest()) new_marker = datastore.Entity(UniqueMarker.kind(), name=marker2, namespace=NS1_NAMESPACE) new_marker.update(original_marker) datastore.Put(new_marker) # This allows us to test: 1) namespaced markers will check against their namespace models (not all of them)" self.i1.delete() #... 2) the mapper only cleans the desired namespace datastore.Put(default_marker) UniqueAction.objects.create(action_type="clean", model=encode_model(TestModel), db="ns1") process_task_queues() self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get, new_marker.key()) self.assertTrue(datastore.Get(default_marker.key())) self.assertTrue(datastore.Get(marker_key)) datastore.Delete(default_marker)
def test_clean_removes_markers_with_different_values(self): marker1 = "{}|name:{}".format(TestModel._meta.db_table, md5(self.i1.name).hexdigest()) marker_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=DEFAULT_NAMESPACE) original_marker = datastore.Get(marker_key) marker2 = "{}|name:{}".format(TestModel._meta.db_table, md5("bananas").hexdigest()) new_marker = datastore.Entity(UniqueMarker.kind(), name=marker2, namespace=DEFAULT_NAMESPACE) new_marker.update(original_marker) datastore.Put(new_marker) UniqueAction.objects.create(action_type="clean", model=encode_model(TestModel)) process_task_queues() self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get, new_marker.key()) self.assertTrue(datastore.Get(marker_key))
def setUp(self): datastore_batch = appscale_datastore_batch.DatastoreFactory.getDatastore(DB) self.app_datastore = datastore_server.DatastoreDistributed(datastore_batch) self.entities = [] prev = None for ii in range(0,3): entity = datastore.Entity("TestKind", _app="test", name=str(ii), parent = prev, namespace='d') prev = entity # have properties with different values bye same property names entity.update({'aaa': "1111_" + str(ii), 'bbb': "2222"}) self.entities.append(entity.ToPb()) self.keys = ['test/d/TestKind/aaa/1111_1\x00/TestKind:0!TestKind:1!']
def test_invalid_data_in_datastore_doesnt_throw_an_error(self): """ If invalid data is found while reading the entity data, then we should silently ignore the error and just return the data as-is rather than converting to list/dict. The reason is that if we blow up on load, then there's no way to load the entity (in Django) to repair the data. This is also consistent with the behaviour of Django when (for example) you load a NULL from the database into a field that is non-nullable. The field value will still be None when read. """ entity = datastore.Entity(JSONFieldModel._meta.db_table, id=1, namespace=settings.DATABASES["default"]["NAMESPACE"]) entity["json_field"] = "bananas" datastore.Put(entity) instance = JSONFieldModel.objects.get(pk=1) self.assertEqual(instance.json_field, "bananas")
def test_empty_request_and_populated_datastore(self): entity = datastore.Entity('Kind1', id=123, _app=self.app_id) entity['intprop'] = 1 entity['listprop'] = [7, 8, 9] datastore.Put(entity) request = webapp2.Request.blank('/datastore') response = webapp2.Response() handler = datastore_viewer.DatastoreRequestHandler(request, response) self.mox.ReplayAll() handler.get() self.mox.VerifyAll() self.assertEqual(302, response.status_int) self.assertEqual('http://localhost/datastore?kind=Kind1', response.location)
def _end_creation(self, token, _upload_filename): """End object upload. Args: token: upload token returned by post_start_creation. Returns: _AE_GCSFileInfo Entity for this file. Raises: ValueError: if token is invalid. Or file is corrupted during upload. Save file content to blobstore. Save blobinfo and _AE_GCSFileInfo. """ gcs_file = _AE_GCSFileInfo_.get_by_key_name(token) if not gcs_file: raise ValueError('Invalid token') if gcs_file.finalized: return gcs_file error_msg, content = self._get_content(gcs_file) if error_msg: raise ValueError(error_msg) gcs_file.etag = hashlib.md5(content).hexdigest() gcs_file.creation = datetime.datetime.utcnow() gcs_file.size = len(content) blob_info = datastore.Entity('__BlobInfo__', name=str(token), namespace='') blob_info['content_type'] = gcs_file.content_type blob_info['creation'] = gcs_file.creation blob_info['filename'] = _upload_filename blob_info['md5_hash'] = gcs_file.etag blob_info['size'] = gcs_file.size datastore.Put(blob_info) self.blob_storage.StoreBlob(token, StringIO.StringIO(content)) gcs_file.finalized = True gcs_file.next_offset = -1 gcs_file.put() return gcs_file
def CreateBlob(self, blob_key, content): """Create new blob and put in storage and Datastore. This is useful in testing where you have access to the stub. Args: blob_key: String blob-key of new blob. content: Content of new blob as a string. Returns: New Datastore entity without blob meta-data fields. """ entity = datastore.Entity(blobstore.BLOB_INFO_KIND, name=blob_key, namespace='') entity['size'] = len(content) datastore.Put(entity) self.storage.CreateBlob(blob_key, content) return entity
def finalize(self, filename): """Marks file as finalized.""" upload = self.uploads[filename] self.finalized.add(filename) upload.buf.seek(0) self.blob_storage.StoreBlob(self.get_blob_key(upload.key), upload.buf) del self.sequence_keys[filename] encoded_key = blobstore.create_gs_key(upload.key) file_info = datastore.Entity(GS_INFO_KIND, name=encoded_key, namespace='') file_info['creation'] = _now_function() file_info['filename'] = upload.key file_info['size'] = upload.buf.len file_info['content_type'] = upload.content_type file_info['storage_key'] = self.get_blob_key(upload.key) datastore.Put(file_info)
def finalize(self): """Finalize a file. Copies temp file data to the blobstore. """ self.file_storage.finalize(self.filename) blob_key = dev_appserver_upload.GenerateBlobKey() self.file_storage.register_blob_key(self.ticket, blob_key) size = self.file_storage.save_blob(self.filename, blob_key) blob_entity = datastore.Entity('__BlobInfo__', name=str(blob_key), namespace='') blob_entity['content_type'] = self.mime_content_type blob_entity['creation'] = _now_function() blob_entity['filename'] = self.ticket blob_entity['size'] = size datastore.Put(blob_entity)
def save(self): """Creates or edits this page in the datastore.""" now = datetime.datetime.now() if self.entity: entity = self.entity else: entity = datastore.Entity('Page') entity['name'] = self.name entity['created'] = now entity['content'] = datastore_types.Text(self.content) entity['modified'] = now if users.get_current_user(): entity['user'] = users.get_current_user() elif entity.has_key('user'): del entity['user'] datastore.Put(entity)
def create_blob(self): """Create a blob in the datastore and on disk. Returns: BlobKey of new blob. """ contents = 'a blob' blob_key = blobstore.BlobKey('blob-key-1') self.blob_storage.StoreBlob(blob_key, cStringIO.StringIO(contents)) entity = datastore.Entity(blobstore.BLOB_INFO_KIND, name=str(blob_key), namespace='') entity['content_type'] = 'image/png' entity['creation'] = datetime.datetime(1999, 10, 10, 8, 42, 0) entity['filename'] = 'largeblob.png' entity['size'] = len(contents) datastore.Put(entity) return blob_key
def CreateEntity(self, values, key_name=None): """ Creates an entity from a list of property values. Args: values: list/tuple of str key_name: if provided, the name for the (single) resulting Entity Returns: list of datastore.Entity The returned entities are populated with the property values from the argument, converted to native types using the properties map given in the constructor, and passed through HandleEntity. They're ready to be inserted. Raises: AssertionError if the number of values doesn't match the number of properties in the properties map. """ Validate(values, (list, tuple)) assert len(values) == len( self.__properties), ('Expected %d CSV columns, found %d.' % (len(self.__properties), len(values))) entity = datastore.Entity(self.__kind, name=key_name) for (name, converter), val in zip(self.__properties, values): if converter is bool and val.lower() in ('0', 'false', 'no'): val = False entity[name] = converter(val) entities = self.HandleEntity(entity) if entities is not None: if not isinstance(entities, (list, tuple)): entities = [entities] for entity in entities: if not isinstance(entity, datastore.Entity): raise TypeError( 'Expected a datastore.Entity, received %s (a %s).' % (entity, entity.__class__)) return entities
def store_login(self, oidrequest, kind): """Stores the details of an OpenID login in the datastore. Args: oidrequest: OpenIDRequest kind: string 'remembered', 'confirmed', or 'declined' """ assert kind in ['remembered', 'confirmed', 'declined'] user = users.get_current_user() assert user login = datastore.Entity('Login') login['relying_party'] = oidrequest.trust_root login['time'] = datetime.datetime.now() login['kind'] = kind login['user'] = user datastore.Put(login)
def post(self, entity_key_string=None): super(DatastoreEditRequestHandler, self).post(entity_key_string) if self.request.get('action:delete'): if entity_key_string: datastore.Delete(datastore.Key(entity_key_string)) self.redirect(str(self.request.get('next', '/datastore'))) else: self.response.set_status(400) return if entity_key_string: entity = datastore.Get(datastore.Key(entity_key_string)) else: kind = self.request.get('kind') namespace = self.request.get('namespace', None) entity = datastore.Entity(kind, _namespace=namespace) for arg_name in self.request.arguments(): # Arguments are in <property_type>|<property_name>=<value> format. if '|' not in arg_name: continue data_type_name, property_name = arg_name.split('|') form_value = self.request.get(arg_name) data_type = DataType.get_by_name(data_type_name) if (entity and property_name in entity and data_type.format(entity[property_name]) == form_value): # If the property is unchanged then don't update it. This will prevent # empty form values from causing the property to be deleted if the # property was already empty. continue if form_value: # TODO: Handle parse exceptions. entity[property_name] = data_type.parse(form_value) elif property_name in entity: # TODO: Treating empty input as deletion is a not a good # interface. del entity[property_name] datastore.Put(entity) self.redirect(str(self.request.get('next', '/datastore')))
def match(document, topic=None, result_key=None, result_relative_url='/_ah/prospective_search', result_task_queue='default', result_batch_size=DEFAULT_RESULT_BATCH_SIZE, result_return_document=True): """Match document with all subscribed queries on specified topic.""" # Convert document to datastore.Entity. topic = _get_document_topic(document.__class__, topic) pb = document._to_pb() entity = datastore.Entity('temp-kind').FromPb(pb) return prospective_search.match( entity, topic=topic, result_key=result_key, result_relative_url=result_relative_url, result_task_queue=result_task_queue, result_batch_size=result_batch_size, result_return_document=result_return_document)