Exemple #1
0
    def dispatch(self, request, response):
        """Main routing method called by every request."""

        # TODO(user): memoize this and override match, so that we don't
        # have to rematch again in the super default_dispatcher call.
        route, unused_args, unused_kwargs = self.match(request)

        # SSL redirect handling. Must come before auth handling.
        require_secure = getattr(route, 'require_secure', False)
        if require_secure and not request.scheme == 'https':
            redirect_url = 'https://{}{}'.format(request.server_name,
                                                 request.path_qs)
            raise webob_exceptions.HTTPMovedPermanently(location=redirect_url)

        # Maybe redirect to login or raise 403 Forbidden for non-admins.
        require_login = getattr(route, 'require_login', False)
        require_admin = getattr(route, 'require_admin', False)
        if require_login or require_admin:
            user = users.get_current_user(oauth_scopes=users.OAUTH_SCOPES)
            if not user:
                login_url = users.create_login_url(dest_url=request.url)
                raise webob_exceptions.HTTPFound(location=login_url)
            elif require_admin and not user.is_admin:
                raise webob_exceptions.HTTPForbidden

        return super(_RouterWithMiddleware,
                     self).default_dispatcher(request, response)
Exemple #2
0
  def dispatch(self, request, response):
    """Main routing method called by every request."""

    # TODO(user): memoize this and override match, so that we don't
    # have to rematch again in the super default_dispatcher call.
    route, unused_args, unused_kwargs = self.match(request)

    # SSL redirect handling. Must come before auth handling.
    require_secure = getattr(route, 'require_secure', False)
    if require_secure and not request.scheme == 'https':
      redirect_url = 'https://{}{}'.format(
          request.server_name, request.path_qs)
      raise webob_exceptions.HTTPMovedPermanently(location=redirect_url)

    # Maybe redirect to login or raise 403 Forbidden for non-admins.
    require_login = getattr(route, 'require_login', False)
    require_admin = getattr(route, 'require_admin', False)
    if require_login or require_admin:
      user = users.get_current_user(oauth_scopes=users.OAUTH_SCOPES)
      if not user:
        login_url = users.create_login_url(dest_url=request.url)
        raise webob_exceptions.HTTPFound(location=login_url)
      elif require_admin and not user.is_admin:
        raise webob_exceptions.HTTPForbidden

    return super(_RouterWithMiddleware, self).default_dispatcher(
        request, response)
Exemple #3
0
 def post(self):
     user = users.get_current_user()
     message = self.request.get('message')
     broadcast_channel = channel.BroadcastChannel(
         key=_GetBroadcastChannelKey())
     message = '%s: %s' % (user.email, cgi.escape(message, quote=True))
     broadcast_channel.send_message(message)
Exemple #4
0
 def get(self):
     # This is the low-level App Engine Channel API client_id, and should
     # be unique for each individual JavaScript client.
     # https://developers.google.com/appengine/docs/python/channel/functions
     user = users.get_current_user()
     # Make each client unique based on the current user email and request ID.
     client_id = hashlib.md5(user.email + os.environ["REQUEST_ID_HASH"])
     client_id = client_id.hexdigest()
     token = channel.create_channel(client_id)
     context = {"client_id": client_id, "broadcast_channel_key": _GetBroadcastChannelKey(), "token": token}
     template = jinja_environment.get_template("templates/index.html")
     self.response.out.write(template.render(context))
Exemple #5
0
def log(key, user=None, meta=None):
    """Logs a file based activity."""
    if user is None:
        user = users.get_current_user()
    activity = Activity(key, user=user, meta=meta)
    activity_logger = FileActivityLogger(activity)
    activity_logger.store()

    # If inside of a task then process now instead of waiting.
    if 'HTTP_X_APPENGINE_TASKNAME' in os.environ:
        process_activity_loggers()

    return activity
def log(key, user=None, meta=None):
  """Logs a file based activity."""
  if user is None:
    user = users.get_current_user()
  activity = Activity(key, user=user, meta=meta)
  activity_logger = FileActivityLogger(activity)
  activity_logger.store()

  # If inside of a task then process now instead of waiting.
  if 'HTTP_X_APPENGINE_TASKNAME' in os.environ:
    process_activity_loggers()

  return activity
Exemple #7
0
  def get(self):
    template = jinja_environment.get_template('templates/index.html')

    # This is the low-level App Engine Channel API client_id, and should
    # be unique for each individual JavaScript client.
    # https://developers.google.com/appengine/docs/python/channel/functions
    user = users.get_current_user()
    client_id = hashlib.md5(user.email + os.environ['REQUEST_ID_HASH'])
    client_id = client_id.hexdigest()
    token = channel.create_channel(client_id)
    context = {
        'client_id': client_id,
        'token': token,
    }
    self.response.out.write(template.render(context))
Exemple #8
0
    def get(self):
        template = jinja_environment.get_template('templates/index.html')

        # This is the low-level App Engine Channel API client_id, and should
        # be unique for each individual JavaScript client.
        # https://developers.google.com/appengine/docs/python/channel/functions
        user = users.get_current_user()
        client_id = hashlib.md5(user.email + os.environ['REQUEST_ID_HASH'])
        client_id = client_id.hexdigest()
        token = channel.create_channel(client_id)
        context = {
            'client_id': client_id,
            'token': token,
        }
        self.response.out.write(template.render(context))
Exemple #9
0
  def delete(self, **kwargs):
    """Delete method. See superclass docstring."""
    kwargs['_delete_old_blob'] = False
    file_kwargs = self._original_kwargs.copy()
    file_kwargs.update({'path': self.path})

    # Defer microversion task.
    user = users.get_current_user()
    data = {
        'file_kwargs': file_kwargs,
        'method_kwargs': kwargs,
        'email': user.email if user else None,
        'action': _Actions.DELETE,
        'time': time.time(),
    }
    task = taskqueue.Task(method='PULL', payload=pickle.dumps(data))
    task.add(queue_name=TASKQUEUE_NAME)

    return super(MicroversioningMixin, self).delete(**kwargs)
Exemple #10
0
  def write(self, **kwargs):
    """Write method. See superclass docstring."""
    # If content is big enough, write the content to blobstore earlier and pass
    # the blob to both the superclass and to the pull task. This allows large
    # files to be microversioned AND to share the same exact blob between the
    # root file and the microversioned file.
    #
    # Duplicate some method calls from files.File.write:
    # If given unicode, encode it as UTF-8 and flag it for future decoding.
    kwargs['content'], kwargs['encoding'] = self._maybe_encode_content(
        kwargs['content'], kwargs['encoding'])
    # If big enough, store content in blobstore. Must come after encoding.
    kwargs['content'], kwargs['blob'] = self._maybe_write_to_blobstore(
        kwargs['content'], kwargs['blob'])

    kwargs['_delete_old_blob'] = False
    file_kwargs = self._original_kwargs.copy()
    file_kwargs.update({'path': self.path})

    # Defer microversion task.
    user = users.get_current_user()
    data = {
        'file_kwargs': file_kwargs,
        'method_kwargs': kwargs,
        'email': user.email if user else None,
        'action': _Actions.WRITE,
        'time': time.time(),
    }
    try:
      task = taskqueue.Task(method='PULL', payload=pickle.dumps(data))
    except taskqueue.TaskTooLargeError:
      # Different objects pickle to different sizes, so it is difficult to
      # know ahead of time if the content will bloat pickling or not.
      # If it does, force the file to save to blobstore and recreate the task.
      kwargs['content'], kwargs['blob'] = self._maybe_write_to_blobstore(
          kwargs['content'], kwargs['blob'], force_blobstore=True)
      task = taskqueue.Task(method='PULL', payload=pickle.dumps(data))
    task.add(queue_name=TASKQUEUE_NAME)

    return super(MicroversioningMixin, self).write(**kwargs)
Exemple #11
0
def _GetBroadcastChannelKey():
    user = users.get_current_user()
    return hashlib.md5(user.email).hexdigest()
Exemple #12
0
 def post(self):
     user = users.get_current_user()
     message = self.request.get("message")
     broadcast_channel = channel.BroadcastChannel(key=_GetBroadcastChannelKey())
     message = "%s: %s" % (user.email, cgi.escape(message, quote=True))
     broadcast_channel.send_message(message)
Exemple #13
0
def _GetBroadcastChannelKey():
    user = users.get_current_user()
    return hashlib.md5(user.email).hexdigest()
Exemple #14
0
  def write(self, content=None, blob=None, mime_type=None, meta=None,
            encoding=None, created=None, modified=None, created_by=None,
            modified_by=None, _delete_old_blob=True):
    """Write or update a File.

    Updates: if the File already exists, write will accept any of the given args
    and only perform an update of the given data, without affecting other data.

    Args:
      content: File contents, either as a str or unicode object.
          This will handle content greater than the 1MB limit by storing it in
          blobstore. However, this technique should only be used for relatively
          small files; it is much less efficient than directly uploading
          to blobstore and passing the resulting BlobKeys to the blobs argument.
      blob: If content is not provided, a BlobKey pointing to the file.
      mime_type: Content type of the file; will be guessed if not given.
      meta: A dictionary of properties to be added to the file.
      encoding: The optional encoding of the content if given a bytestring.
          The encoding will be automatically determined if "content" is passed
          a unicode string.
      created: Optional datetime.datetime to override the created property.
      modified: Optional datetime.datetime to override the modified property.
      created_by: Optional TitanUser to override the created_by property.
      modified_by: Optional TitanUser to override the modified_by property.
      _delete_old_blob: Whether or not to delete the old blob if it changed.
    Raises:
      TypeError: For missing arguments.
      ValueError: For invalid arguments.
      BadFileError: If updating meta information on a non-existent file.
    Returns:
      Self-reference.
    """
    logging.info('Writing Titan file: %s', self.real_path)

    # Argument sanity checks.
    _TitanFile.validate_meta_properties(meta)
    is_content_update = content is not None or blob is not None
    is_meta_update = (mime_type is not None or meta is not None
                      or created is not None or modified is not None
                      or created_by is not None or modified_by is not None)
    if not is_content_update and not is_meta_update:
      raise TypeError('Arguments expected, but none given.')
    if not self.exists and is_meta_update and not is_content_update:
      raise BadFileError('File does not exist: %s' % self.real_path)
    if created is not None and not hasattr(created, 'timetuple'):
      raise ValueError('"created" must be a datetime.datetime instance.')
    if modified is not None and not hasattr(modified, 'timetuple'):
      raise ValueError('"modified" must be a datetime.datetime instance.')
    if created_by is not None and not isinstance(created_by, users.TitanUser):
      raise ValueError('"created_by" must be a users.TitanUser instance.')
    if modified_by is not None and not isinstance(modified_by, users.TitanUser):
      raise ValueError('"modified_by" must be a users.TitanUser instance.')
    if encoding is not None and content is None and blob is None:
      raise TypeError(
          '"content" or "blob" must be passed if "encoding" is passed.')

    # If given unicode, encode it as UTF-8 and flag it for future decoding.
    content, encoding = self._maybe_encode_content(content, encoding)

    # If big enough, store content in blobstore. Must come after encoding.
    content, blob = self._maybe_write_to_blobstore(content, blob)

    now = datetime.datetime.now()
    override_created_by = created_by is not None
    created_by = created_by or users.get_current_user()
    modified_by = modified_by or users.get_current_user()
    if not self.exists:
      # Create new _File entity.
      # Guess the MIME type if not given.
      if not mime_type:
        mime_type = utils.guess_mime_type(self.real_path)

      # Create a new _File.
      paths = utils.split_path(self.real_path)
      file_ent = _TitanFile(
          # NDB args:
          id=self.real_path,
          namespace=self.namespace,
          # Model:
          name=os.path.basename(self.real_path),
          dir_path=paths[-1],
          paths=paths,
          # Root files are at depth 0.
          depth=len(paths) - 1,
          mime_type=mime_type,
          encoding=encoding,
          created=created or now,
          modified=modified or now,
          content=content,
          blob=blob,
          # Backwards-compatibility with deprecated "blobs" property:
          blobs=[],
          created_by=created_by,
          modified_by=modified_by,
          md5_hash=None if blob else hashlib.md5(content).hexdigest(),
      )
      # Add meta attributes.
      if meta:
        for key, value in meta.iteritems():
          setattr(file_ent, key, value)
      self._file_ent = file_ent
      self._file_ent.put()
      return self

    # Updating an existing _File.
    file_ent = self._file

    blob_to_delete = None

    if override_created_by:
      file_ent.created_by = created_by
    file_ent.modified_by = modified_by

    if mime_type and file_ent.mime_type != mime_type:
      file_ent.mime_type = mime_type

    if created:
      file_ent.created = created
    file_ent.modified = modified or now

    # Auto-migrate entities from old "blobs" to new "blob" property on write:
    if file_ent.blobs:
      file_ent.blob = file_ent.blobs[0]
      file_ent.blobs = []

    if content is not None and file_ent.content != content:
      file_ent.content = content
      file_ent.md5_hash = hashlib.md5(content).hexdigest()
      if file_ent.blob and _delete_old_blob:
        blob_to_delete = self.blob
      # Clear the current blob association for this file.
      file_ent.blob = None

    if blob is not None and file_ent.blob != blob:
      if file_ent.blob and _delete_old_blob:
        blob_to_delete = self.blob
      # Associate the new blob to this file.
      file_ent.blob = blob
      file_ent.md5_hash = None
      file_ent.content = None

    if encoding != file_ent.encoding:
      file_ent.encoding = encoding

    # Update meta attributes.
    if meta is not None:
      for key, value in meta.iteritems():
        if not hasattr(file_ent, key) or getattr(file_ent, key) != value:
          setattr(file_ent, key, value)
    self._file_ent = file_ent
    self._file_ent.put()

    if blob_to_delete and _delete_old_blob:
      # Delete the actual blobstore data after the file write to avoid
      # orphaned files.
      _delete_blobs(blobs=[blob_to_delete], file_paths=[self.real_path])

    return self