Example #1
0
    def test_run_operation_creates_and_updates_task_marker(self):
        """ If we run one of our custom operations, then it should create the task marker in the DB
            and defer a task, then set the marker to 'is_finished' when done.
        """
        TestModel.objects.create()

        operation = operations.AddFieldData(
            "testmodel", "new_field",
            models.CharField(max_length=100, default="squirrel"))
        self.start_operation(operation)

        # Now check that the task marker has been created.
        # Usefully, calling database_forwards() on the operation will have caused it to set the
        # `identifier` attribute on itself, meaning we can now just call _get_task_marker()
        task_marker = datastore.Get([
            ShardedTaskMarker.get_key(operation.identifier,
                                      operation.namespace)
        ])[0]
        if task_marker is None:
            self.fail("Migration operation did not create its task marker")

        self.assertFalse(task_marker.get("is_finished"))
        self.assertNumTasksEquals(1)
        self.process_task_queues()

        # Now check that the task marker has been marked as finished
        task_marker = datastore.Get([
            ShardedTaskMarker.get_key(operation.identifier,
                                      operation.namespace)
        ])[0]
        self.assertTrue(task_marker["is_finished"])
        self.assertNumTasksEquals(0)
Example #2
0
    def test_clean_removes_markers_with_different_values(self):
        marker1 = "{}|name:{}".format(TestModel._meta.db_table,
                                      md5(self.i1.name).hexdigest())
        marker_key = datastore.Key.from_path(UniqueMarker.kind(),
                                             marker1,
                                             namespace=DEFAULT_NAMESPACE)

        original_marker = datastore.Get(marker_key)

        marker2 = "{}|name:{}".format(TestModel._meta.db_table,
                                      md5("bananas").hexdigest())

        new_marker = datastore.Entity(UniqueMarker.kind(),
                                      name=marker2,
                                      namespace=DEFAULT_NAMESPACE)
        new_marker.update(original_marker)
        datastore.Put(new_marker)

        UniqueAction.objects.create(action_type="clean",
                                    model=encode_model(TestModel))
        process_task_queues()

        self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get,
                          new_marker.key())
        self.assertTrue(datastore.Get(marker_key))
Example #3
0
    def test_clean_removes_markers_with_different_values_on_non_default_namespace(self):
        self.i3 = TestModel.objects.using("ns1").create(id=self.i1.pk, name="name1", counter1=1, counter2=1)
        self.i4 = TestModel.objects.using("ns1").create(id=self.i2.pk, name="name3", counter1=1, counter2=2)

        NS1_NAMESPACE = settings.DATABASES["ns1"]["NAMESPACE"]

        marker1 = "{}|name:{}".format(TestModel._meta.db_table, md5(self.i3.name).hexdigest())
        marker_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=NS1_NAMESPACE)
        default_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=DEFAULT_NAMESPACE)
        original_marker = datastore.Get(marker_key)
        default_marker = datastore.Get(default_key)

        marker2 = "{}|name:{}".format(TestModel._meta.db_table, md5("bananas").hexdigest())
        new_marker = datastore.Entity(UniqueMarker.kind(), name=marker2, namespace=NS1_NAMESPACE)
        new_marker.update(original_marker)
        datastore.Put(new_marker)

        # This allows us to test: 1) namespaced markers will check against their namespace models (not all of them)"
        self.i1.delete()
        #... 2) the mapper only cleans the desired namespace
        datastore.Put(default_marker)

        UniqueAction.objects.create(action_type="clean", model=encode_model(TestModel), db="ns1")
        process_task_queues()

        self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get, new_marker.key())
        self.assertTrue(datastore.Get(default_marker.key()))
        self.assertTrue(datastore.Get(marker_key))
        datastore.Delete(default_marker)
Example #4
0
    def test_running_finished_operation_does_not_trigger_new_task(self):
        """ If we re-trigger an operation which has already been run and finished, it should simply
            return without starting a new task or updating the task marker.
        """
        TestModel.objects.create()

        operation = operations.AddFieldData(
            "testmodel", "new_field", models.CharField(max_length=100, default="squirrel")
        )
        # Run the operation and check that it finishes
        with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
            self.start_operation(operation)
            self.assertTrue(start.called)
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertFalse(task_marker["is_finished"])
        self.assertNumTasksEquals(1)
        self.process_task_queues()
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertTrue(task_marker["is_finished"])

        # Run the operation again.  It should see that's it's finished and just return immediately.
        self.assertNumTasksEquals(0)
        with sleuth.watch("djangae.db.migrations.operations.AddFieldData._start_task") as start:
            self.start_operation(operation, detonate=False)
            self.assertFalse(start.called)
        self.assertNumTasksEquals(0)
        task_marker = datastore.Get(
            ShardedTaskMarker.get_key(operation.identifier, operation.namespace)
        )
        self.assertTrue(task_marker["is_finished"])
Example #5
0
 def expect_datatore_lookup(self, blob_key, expected_result):
     """Setup a mox expectation to datastore.Get."""
     self.mox.StubOutWithMock(datastore, 'Get')
     blob_url = datastore.Entity('__BlobServingUrl__', name=blob_key)
     if expected_result:
         datastore.Get(blob_url.key()).AndReturn(True)
     else:
         datastore.Get(blob_url.key()).AndRaise(
             datastore_errors.EntityNotFoundError)
Example #6
0
def get_blob_key(create_file_name):
    """Get a blob key for finalized blobstore file.

  Args:
    create_file_name: Writable blobstore filename as obtained from create()
    function. The file should be finalized.

  Returns:
    An instance of apphosting.ext.blobstore.BlobKey for corresponding blob
    or None if the blob referred to by the file name is not finalized.

  Raises:
    google.appengine.api.files.InvalidFileNameError if the file name is not
    a valid nonfinalized blob file name.
  """
    if not create_file_name:
        raise files.InvalidArgumentError('Empty file name')
    if not isinstance(create_file_name, six.string_types):
        raise files.InvalidArgumentError('Expected string for file name')
    if not create_file_name.startswith(_BLOBSTORE_DIRECTORY):
        raise files.InvalidFileNameError(
            'Filename %s passed to get_blob_key doesn\'t have prefix %s' %
            (create_file_name, _BLOBSTORE_DIRECTORY))
    ticket = create_file_name[len(_BLOBSTORE_DIRECTORY):]

    if not ticket.startswith(files._CREATION_HANDLE_PREFIX):

        return blobstore.BlobKey(ticket)

    blob_file_index = datastore.Get([
        datastore.Key.from_path(_BLOB_FILE_INDEX_KIND,
                                _get_blob_file_index_key_name(ticket),
                                namespace='')
    ])[0]
    if blob_file_index:
        blob_key_str = blob_file_index[_BLOB_KEY_PROPERTY_NAME]

        results = datastore.Get([
            datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
                                    blob_key_str,
                                    namespace='')
        ])
        if results[0] is None:
            return None
    elif len(ticket) >= _DATASTORE_MAX_PROPERTY_SIZE:
        return None
    else:

        query = datastore.Query(blobstore.BLOB_INFO_KIND,
                                {'creation_handle =': ticket},
                                keys_only=True,
                                namespace='')
        results = query.Get(1)
        if not results:
            return None
        blob_key_str = results[0].name()
    return blobstore.BlobKey(blob_key_str)
Example #7
0
 def Run(self, **kwargs):
     try:
         try:
             return iter([datastore.Get(self['key ='])])
         except KeyError:
             return iter([datastore.Get(self['key =='])])
     except datastore_errors.EntityNotFoundError:
         return iter([None])
     except KeyError:
         return super(Query, self).Run(**kwargs)
Example #8
0
    def test_clean_after_instance_deleted(self):
        marker1 = "{}|name:{}".format(TestModel._meta.db_table, md5(self.i1.name).hexdigest())
        marker_key = datastore.Key.from_path(UniqueMarker.kind(), marker1, namespace=DEFAULT_NAMESPACE)

        self.assertTrue(datastore.Get(marker_key))

        datastore.Delete(datastore.Key.from_path(TestModel._meta.db_table, self.i1.pk, namespace=DEFAULT_NAMESPACE)) # Delete the first instance

        self.assertTrue(datastore.Get(marker_key))

        UniqueAction.objects.create(action_type="clean", model=encode_model(TestModel))
        process_task_queues()

        self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get, marker_key)
  def _OpenBlob(self, blob_key):
    key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND,
                                        blob_key,
                                        namespace='')
    try:
      datastore.Get(key)
    except datastore_errors.Error:


      logging.exception('Blob with key %r does not exist', blob_key)
      raise apiproxy_errors.ApplicationError(
          images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR)

    blobstore_stub = apiproxy_stub_map.apiproxy.GetStub("blobstore")


    try:
      blob_file = blobstore_stub.storage.OpenBlob(blob_key)
    except IOError:
      logging.exception('Could not get file for blob_key %r', blob_key)

      raise apiproxy_errors.ApplicationError(
          images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)

    try:
      return Image.open(blob_file)
    except IOError:
      logging.exception('Could not open image %r for blob_key %r',
                        blob_file, blob_key)

      raise apiproxy_errors.ApplicationError(
          images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
Example #10
0
  def get(cls, blob_keys):
    """Retrieves a `BlobInfo` by key or by a list of keys.

    Args:
      blob_keys: A key or a list of keys. Keys can be in string, Unicode, or
          `BlobKey` format.

    Returns:
      A `BlobInfo` instance that is associated with the provided key or a list
      of `BlobInfo` instances if a list of keys was provided. Keys that are not
      found in Blobstore return `None`.
    """
    blob_keys = cls.__normalize_and_convert_keys(blob_keys)
    try:
      entities = datastore.Get(blob_keys)
    except datastore_errors.EntityNotFoundError:
      return None
    if isinstance(entities, datastore.Entity):
      return BlobInfo(entities)
    else:
      references = []
      for entity in entities:
        if entity is not None:
          references.append(BlobInfo(entity))
        else:
          references.append(None)
      return references
Example #11
0
    def _update_entity(self, key):
        caching.remove_entity_from_context_cache_by_key(key)

        result = datastore.Get(key)
        original = copy.deepcopy(result)

        for field, param, value in self.values:
            result[field.column] = get_prepared_db_value(self.connection, MockInstance(field, value), field)

            #Add special indexed fields
            for index in special_indexes_for_column(self.model, field.column):
                indexer = REQUIRES_SPECIAL_INDEXES[index]
                result[indexer.indexed_column_name(field.column)] = indexer.prep_value_for_database(value)

        to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result)

        #Acquire first, because if that fails then we don't want to alter what's already there
        constraints.acquire_identifiers(to_acquire, result.key())
        try:
            datastore.Put(result)
            caching.add_entity_to_context_cache(self.model, result)
        except:
            constraints.release_identifiers(to_acquire)
            raise
        else:
            #Now we release the ones we don't want anymore
            constraints.release_identifiers(to_release)
Example #12
0
    def Run(self, limit, offset):
        opts = self._gae_query._Query__query_options
        if opts.keys_only or opts.projection:
            return self._gae_query.Run(limit=limit, offset=offset)

        ret = caching.get_from_cache(self._identifier)
        if ret is not None and not utils.entity_matches_query(
                ret, self._gae_query):
            ret = None

        if ret is None:
            # We do a fast keys_only query to get the result
            keys_query = Query(self._gae_query._Query__kind, keys_only=True)
            keys_query.update(self._gae_query)
            keys = keys_query.Run(limit=limit, offset=offset)

            # Do a consistent get so we don't cache stale data, and recheck the result matches the query
            ret = [
                x for x in datastore.Get(keys)
                if utils.entity_matches_query(x, self._gae_query)
            ]
            if len(ret) == 1:
                caching.add_entity_to_cache(
                    self._model, ret[0],
                    caching.CachingSituation.DATASTORE_GET)
            return iter(ret)

        return iter([ret])
Example #13
0
    def _OpenBlob(self, blob_key):
        """Create an Image from the blob data read from blob_key."""

        try:
            _ = datastore.Get(
                blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(
                    blob_key))
        except datastore_errors.Error:

            logging.exception("Blob with key %r does not exist", blob_key)
            raise apiproxy_errors.ApplicationError(
                images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR)

        blobstore_storage = apiproxy_stub_map.apiproxy.GetStub("blobstore")

        try:
            blob_file = blobstore_storage.storage.OpenBlob(blob_key)
        except IOError:
            logging.exception("Could not get file for blob_key %r", blob_key)

            raise apiproxy_errors.ApplicationError(
                images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)

        try:
            return Image.open(blob_file)
        except IOError:
            logging.exception("Could not open image %r for blob_key %r",
                              blob_file, blob_key)

            raise apiproxy_errors.ApplicationError(
                images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
Example #14
0
    def test_check_old_style_marker(self):
        instance_key = datastore.Key.from_path(TestModel._meta.db_table,
                                               self.i2.pk,
                                               namespace=DEFAULT_NAMESPACE)

        marker1 = "{}|name:{}".format(TestModel._meta.db_table,
                                      md5(self.i2.name).hexdigest())
        marker_key = datastore.Key.from_path(UniqueMarker.kind(),
                                             marker1,
                                             namespace=DEFAULT_NAMESPACE)
        marker = datastore.Get(marker_key)
        marker['instance'] = str(instance_key)  #Make the instance a string
        datastore.Put(marker)

        UniqueAction.objects.create(action_type="check",
                                    model=encode_model(TestModel))
        process_task_queues()

        a = UniqueAction.objects.get()
        self.assertEqual(a.status, "done")
        self.assertEqual(1, a.actionlog_set.count())
        error = a.actionlog_set.all()[0]

        self.assertEqual(error.log_type, "old_instance_key")
        self.assertEqual(error.instance_key, str(instance_key))
        self.assertEqual(error.marker_key, str(marker_key))
Example #15
0
        def txn():
            try:
                marker = datastore.Get(self.key())
                marker.__class__ = ShardedTaskMarker

                queued_shards = marker[ShardedTaskMarker.QUEUED_KEY]
                processing_shards = marker[ShardedTaskMarker.RUNNING_KEY]
                queued_count = len(queued_shards)

                for j in xrange(min(BATCH_SIZE, queued_count)):
                    pickled_shard = queued_shards.pop()
                    processing_shards.append(pickled_shard)
                    shard = cPickle.loads(str(pickled_shard))
                    deferred.defer(
                        self.run_shard,
                        query,
                        shard,
                        operation,
                        operation_method,
                        entities_per_task=entities_per_task,
                        # Defer this task onto the correct queue with `_queue`, passing the `queue`
                        # parameter back to the function again so that it can do the same next time
                        queue=queue,
                        _queue=queue,
                        _transactional=True,
                    )

                marker.put()
            except datastore_errors.EntityNotFoundError:
                logging.error(
                    "Unable to start task %s as marker is missing",
                    self.key().id_or_name()
                )
                return
Example #16
0
    def map(self, key):
        """Copy data map handler.

    Args:
      key: Datastore entity key or entity itself to copy.

    Yields:
      A db operation to store the entity in the target app.
      An operation which updates max used ID if necessary.
      A counter operation incrementing the count for the entity kind.
    """

        mapper_params = get_mapper_params()
        target_app = mapper_params['target_app']

        if isinstance(key, datastore.Entity):

            entity = key
            key = entity.key()
        else:
            entity = datastore.Get(key)
        entity_proto = entity._ToPb()
        utils.FixKeys(entity_proto, target_app)
        target_entity = datastore.Entity._FromPb(entity_proto)

        yield operation.db.Put(target_entity)
        yield utils.AllocateMaxId(key, target_app)
        yield operation.counters.Increment(KindPathFromKey(key))
Example #17
0
    def get(cls, keys):
        """Get one or more synchronization info entities.

        :param key|list keys: One or a list of `datastore_types.Key` instances.
        """

        if isinstance(keys, datastore_types.Key):
            keys_ = [keys]
        elif isinstance(keys, list):
            keys_ = keys
        else:
            raise TypeError("SyncInfo.get(keys) takes a key or list of keys")

        results = []

        for key in keys_:
            try:
                results.append(cls(datastore.Get(key)))
            except datastore_errors.EntityNotFoundError:
                results.append(None)

        if isinstance(keys, datastore_types.Key):
            return results[0]
        elif isinstance(keys, list):
            return results
Example #18
0
        def delete_batch(key_slice):
            entities = datastore.Get(key_slice)

            #FIXME: We need to make sure the entity still matches the query!
            #            entities = (x for x in entities if utils.entity_matches_query(x, self.select.gae_query))

            to_delete = []
            to_update = []
            updated_keys = []

            # Go through the entities
            for entity in entities:
                if entity is None:
                    continue

                wipe_polymodel_from_entity(entity, self.table_to_delete)
                if not entity.get('class'):
                    to_delete.append(entity)
                    constraints.release(self.model, entity)
                else:
                    to_update.append(entity)
                updated_keys.append(entity.key())

            datastore.DeleteAsync([x.key() for x in to_delete])
            datastore.PutAsync(to_update)

            caching.remove_entities_from_cache_by_key(updated_keys,
                                                      self.namespace)

            return len(updated_keys)
Example #19
0
    def fetch(self, qset, limit, offset):
        limit = datastore.MAXIMUM_RESULTS if limit == -1 else limit
        orderings = []
        try:
            name, how = qset.order
            how = Query.ASCENDING if how == 'ASC' else Query.DESCENDING
            orderings = [(name, how)]
        except:
            pass

        keys = self._keys(qset)
        result = []

        if keys:  # if only key filter
            result = [e for e in datastore.Get(keys) if e]
        else:  # else build query, the results should be ANDed
            query_set = self._build_query_set(qset, orderings)
            result_set = [[e for e in q.Get(limit, offset) if e]
                          for q in query_set]
            keys = [set([e.key() for e in result]) for result in result_set]
            keys = reduce(lambda a, b: a & b, keys)

            result = {}
            for e in chain(*tuple(result_set)):
                if e.key() in keys:
                    result.setdefault(e.key(), e)
            result = result.values()

        for e in sort_result(result, orderings)[:limit]:
            yield dict(e, key=str(e.key()), _payload=e)
Example #20
0
    def post(self):
        kind = self.request.get('kind')
        entity_key = self.request.get('key')
        if entity_key:
            if self.request.get('action') == 'Delete':
                datastore.Delete(datastore.Key(entity_key))
                self.redirect(self.request.get('next'))
                return
            entity = datastore.Get(datastore.Key(entity_key))
        else:
            entity = datastore.Entity(kind)

        args = self.request.arguments()
        for arg in args:
            bar = arg.find('|')
            if bar > 0:
                data_type_name = arg[:bar]
                field_name = arg[bar + 1:]
                form_value = self.request.get(arg)
                data_type = DataType.get_by_name(data_type_name)
                if entity and entity.has_key(field_name):
                    old_formatted_value = data_type.format(entity[field_name])
                    if old_formatted_value == ustr(form_value):
                        continue

                if len(form_value) > 0:
                    value = data_type.parse(form_value)
                    entity[field_name] = value
                elif entity.has_key(field_name):
                    del entity[field_name]

        datastore.Put(entity)

        self.redirect(self.request.get('next'))
Example #21
0
 def txn():
     pickled_shard = cPickle.dumps(shard)
     marker = datastore.Get(self.key())
     marker.__class__ = ShardedTaskMarker
     marker[ShardedTaskMarker.RUNNING_KEY].remove(pickled_shard)
     marker[ShardedTaskMarker.FINISHED_KEY].append(pickled_shard)
     marker.put()
Example #22
0
    def test_post_entity_key_string(self):
        request = webapp2.Request.blank(
            '/datastore/edit/%s' % self.entity4.key(),
            POST={
                'overflowdatetime|dateprop': str(2**60),
                'int|intprop': '123',
                'string|stringprop': '',
                'next': 'http://redirect/'
            })
        response = webapp2.Response()
        handler = datastore_viewer.DatastoreEditRequestHandler(
            request, response)
        admin_request_handler.AdminRequestHandler(handler).post(
            str(self.entity4.key()))

        self.mox.ReplayAll()
        handler.post(str(self.entity4.key()))
        self.mox.VerifyAll()

        self.assertEqual(302, response.status_int)
        self.assertEqual('http://redirect/', response.location)

        # Check that the entity was updated.
        entity = datastore.Get(self.entity4.key())
        self.assertEqual(2**60, entity['dateprop'])
        self.assertEqual(123, entity['intprop'])
        self.assertEqual([10, 11], entity['listprop'])
        self.assertNotIn('stringprop', entity)
Example #23
0
    def txn(shards):
        marker_key = ShardedTaskMarker.get_key(identifier, query._Query__namespace)
        try:
            datastore.Get(marker_key)

            # If the marker already exists, don't do anything - just return
            return
        except datastore_errors.EntityNotFoundError:
            pass

        marker = ShardedTaskMarker(identifier, query, namespace=query._Query__namespace)

        if shards:
            for shard in shards:
                marker["shards_queued"].append(cPickle.dumps(shard))
        else:
            # No shards, then there is nothing to do!
            marker["is_finished"] = True
        marker["time_started"] = datetime.utcnow()
        marker.put()
        if not marker["is_finished"]:
            deferred.defer(
                marker.begin_processing, operation, operation_method, entities_per_task, queue,
                _transactional=True, _queue=queue
            )

        return marker_key
  def __Increment(self, nodes_with_children, score_entities,
                  score_entities_to_delete):
    """Changes child counts for given nodes.

    This method will create nodes as needed.

    Args:
      nodes_with_children: A dict of (node_key, child) tuples to deltas
      score_entities: Additional score entities to persist as part of
        this transaction
    Returns:
      None
    """
    keys = list(set(key for ((key, _), delta) in nodes_with_children.iteritems()
                    if delta != 0))
    if not keys:
      return  # Nothing to do
    nodes = datastore.Get(keys)

    node_dict = {}
    for (key, node) in zip(keys, nodes):
      if not node:
        node = datastore.Entity("ranker_node", parent=self.rootkey,
                                name=key.name())
        node["child_counts"] = [0] * self.branching_factor
      node_dict[key] = node
    for ((key, child), amount) in nodes_with_children.iteritems():
      if amount != 0:
        node = node_dict[key]
        node["child_counts"][child] += amount
        assert node["child_counts"][child] >= 0
    datastore.Put(node_dict.values() + score_entities)
    if score_entities_to_delete:
      datastore.Delete(score_entities_to_delete)
Example #25
0
    def get(cls, blob_keys):
        """Retrieve BlobInfo by key or list of keys.

    Args:
      blob_keys: A key or a list of keys.  Keys may be instances of str,
      unicode and BlobKey.

    Returns:
      A BlobInfo instance associated with provided key or a list of BlobInfo
      instances if a list of keys was provided.  Keys that are not found in
      Blobstore return None as their values.
    """
        blob_keys = cls.__normalize_and_convert_keys(blob_keys)
        try:
            entities = datastore.Get(blob_keys)
        except datastore_errors.EntityNotFoundError:
            return None
        if isinstance(entities, datastore.Entity):
            return BlobInfo(entities)
        else:
            references = []
            for entity in entities:
                if entity is not None:
                    references.append(BlobInfo(entity))
                else:
                    references.append(None)
            return references
Example #26
0
    def serve_image(self, environ, start_response):
        """Dynamically serve an image from blobstore."""
        blobkey, options = self._parse_path(environ['PATH_INFO'])
        # Make sure that the blob URL has been registered by
        # calling get_serving_url
        key = datastore.Key.from_path(_BLOB_SERVING_URL_KIND,
                                      blobkey,
                                      namespace='')
        try:
            datastore.Get(key)
        except datastore_errors.EntityNotFoundError:
            logging.error(
                'The blobkey %s has not registered for image '
                'serving. Please ensure get_serving_url is '
                'called before attempting to serve blobs.', blobkey)
            start_response('404 %s' % http.client.responses[404], [])
            return []

        resize, crop = self._parse_options(options)

        if resize is None and not crop:
            return self.serve_unresized_image(blobkey, environ, start_response)
        elif not _HAS_WORKING_IMAGES_STUB:
            logging.warning(
                'Serving resized images requires a working Python "PIL" '
                'module. The image is served without resizing.')
            return self.serve_unresized_image(blobkey, environ, start_response)
        else:
            # Use Images service to transform blob.
            image, mime_type = self._transform_image(blobkey, resize, crop)
            start_response(
                '200 OK',
                [('Content-Type', mime_type),
                 ('Cache-Control', 'public, max-age=600, no-transform')])
            return [image]
    def get(self, entity_key_string=None):
        if entity_key_string:
            entity_key = datastore.Key(entity_key_string)
            entity_key_name = entity_key.name()
            entity_key_id = entity_key.id()
            namespace = entity_key.namespace()
            kind = entity_key.kind()
            entities = [datastore.Get(entity_key)]
            parent_key = entity_key.parent()
            if parent_key:
                parent_key_string = _format_datastore_key(parent_key)
            else:
                parent_key_string = None
        else:
            entity_key = None
            entity_key_string = None
            entity_key_name = None
            entity_key_id = None
            namespace = self.request.get('namespace')
            kind = self.request.get('kind')
            entities, _ = _get_entities(kind,
                                        namespace,
                                        order=None,
                                        start=0,
                                        count=20)
            parent_key = None
            parent_key_string = None

            if not entities:
                self.redirect('/datastore?%s' % (urllib.parse.urlencode([
                    ('kind', kind),
                    ('message',
                     'Cannot create the kind "%s" in the "%s" namespace because '
                     'no template entity exists.' % (kind, namespace)),
                    ('namespace', namespace)
                ])))
                return

        property_name_to_values = _property_name_to_values(entities)
        fields = []
        for property_name, values in sorted(property_name_to_values.items()):
            data_type = DataType.get(values[0])
            field = data_type.input_field(
                '%s|%s' % (data_type.name(), property_name),
                values[0] if entity_key else None, values, self.request.uri)
            fields.append((property_name, data_type.name(), field))

        self.response.write(
            self.render(
                'datastore_edit.html', {
                    'fields': fields,
                    'key': entity_key_string,
                    'key_id': entity_key_id,
                    'key_name': entity_key_name,
                    'kind': kind,
                    'namespace': namespace,
                    'next': self.request.get('next', '/datastore'),
                    'parent_key': parent_key,
                    'parent_key_string': parent_key_string
                }))
    def map(self, key):
        """Copy data map handler.

    Args:
      key: Datastore entity key to copy.

    Yields:
      A db operation to store the entity in the target app.
      A counter operation incrementing the count for the entity kind.
    """
        if not self.remote_api_stub_initialized:
            self.setup_stub()

        mapper_params = get_mapper_params()
        target_app = mapper_params['target_app']

        if isinstance(key, datastore.Entity):

            entity = key
            key = entity.key()
        else:
            entity = datastore.Get(key)
        entity_proto = entity._ToPb()
        FixKeys(entity_proto, target_app)
        target_entity = datastore.Entity._FromPb(entity_proto)

        yield operation.db.Put(target_entity)
        yield AllocateMaxId(key, target_app)
        yield operation.counters.Increment(KindPathFromKey(key))
def GenerateBlobKey(time_func=time.time, random_func=random.random):
    """Generate a unique BlobKey.

  BlobKey is generated using the current time stamp combined with a random
  number.  The two values are subject to an md5 digest and base64 url-safe
  encoded.  The new key is checked against the possibility of existence within
  the datastore and the random number is regenerated until there is no match.

  Args:
    time_func: Function used for generating the timestamp.  Used for
      dependency injection.  Allows for predictable results during tests.
      Must return a floating point UTC timestamp.
    random_func: Function used for generating the random number.  Used for
      dependency injection.  Allows for predictable results during tests.

  Returns:
    String version of BlobKey that is unique within the BlobInfo datastore.
    None if there are too many name conflicts.
  """
    timestamp = str(time_func())
    tries = 0
    while tries < 10:
        number = str(random_func())
        digester = md5.md5()
        digester.update(timestamp)
        digester.update(number)
        blob_key = base64.urlsafe_b64encode(digester.digest())
        datastore_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
                                                blob_key)
        try:
            datastore.Get(datastore_key)
            tries += 1
        except datastore_errors.EntityNotFoundError:
            return blob_key
    return None
Example #30
0
    def Run(self, limit=None, offset=None):
        assert not self.query._Query__ancestor_pb #FIXME: We don't handle this yet

        opts = self.query._Query__query_options

        results = None

        #If we have a single key lookup going on, just hit the cache
        if len(self.keys) == 1:
            ret = caching.get_from_cache_by_key(self.keys[0])
            if ret is not None:
                results = [ret]

        #If there was nothing in the cache, or we had more than one key, then use Get()
        if results is None:
            results = sorted((x for x in datastore.Get(self.keys) if x is not None), cmp=partial(utils.django_ordering_comparison, self.ordering))

        results = [
            _convert_entity_based_on_query_options(x, opts)
            for x in results if utils.entity_matches_query(x, self.query)
        ]

        if offset:
            results = results[offset:]

        if limit is not None:
            results = results[:limit]

        return iter(results)