예제 #1
0
    def sign(cls, string):
        """Returns the signature of a string.

        This signature is generated using the singleton private key, then
        base64-encoded. It's of the form expected by Google Cloud Storage query
        string authentication. See
        https://developers.google.com/storage/docs/accesscontrol#Signed-URLs.
        """

        # All Google API keys have "notasecret" as their passphrase
        value = cls.get_oauth()
        if value is None: raise "Private key has not been set."
        if handlers.is_production():
            # TODO(nweiz): This currently doesn't work on the development server
            # without adding 'AESCipher', 'blockalgo', and '_AES' to the
            # __CRYPTO_CIPHER_ALLOWED_MODULES constant in
            # google/appengine/tools/dev_appserver_import_hook.py. However, it
            # does work in production, so to make it work locally, we just do a
            # dumb hash of the private key and the string.
            #
            # See http://code.google.com/p/googleappengine/issues/detail?id=8188
            key = RSA.importKey(value, passphrase='notasecret')
            return base64.b64encode(
                PKCS1_v1_5.new(key).sign(SHA256.new(string)))
        else:
            m = hashlib.md5()
            m.update(value)
            m.update(string)
            return base64.b64encode(m.digest())
예제 #2
0
    def sign(cls, string):
        """Returns the signature of a string.

        This signature is generated using the singleton private key, then
        base64-encoded. It's of the form expected by Google Cloud Storage query
        string authentication. See
        https://developers.google.com/storage/docs/accesscontrol#Signed-URLs.
        """

        # All Google API keys have "notasecret" as their passphrase
        value = cls.get_oauth()
        if value is None: raise "Private key has not been set."
        if handlers.is_production():
            # TODO(nweiz): This currently doesn't work on the development server
            # without adding 'AESCipher', 'blockalgo', and '_AES' to the
            # __CRYPTO_CIPHER_ALLOWED_MODULES constant in
            # google/appengine/tools/dev_appserver_import_hook.py. However, it
            # does work in production, so to make it work locally, we just do a
            # dumb hash of the private key and the string.
            #
            # See http://code.google.com/p/googleappengine/issues/detail?id=8188
            key = RSA.importKey(value, passphrase='notasecret')
            return base64.b64encode(PKCS1_v1_5.new(key).sign(SHA256.new(string)))
        else:
            m = hashlib.md5()
            m.update(value)
            m.update(string)
            return base64.b64encode(m.digest())
예제 #3
0
    def upload(self,
               file,
               key,
               acl=None,
               policy=None,
               signature=None,
               success_action_redirect=None,
               **kwargs):
        """A development-only action for uploading a package archive.

        In production, package archives are uploaded directly to cloud storage,
        using a signed form for authentication. The signed form doesn't work for
        the development server, since it uses a local database in place of cloud
        storage, so this action emulates it by manually saving the file to the
        development database.
        """

        if handlers.is_production(): raise handlers.http_error(404)
        if PrivateKey.sign(policy) != signature: raise handlers.http_error(403)

        write_path = files.gs.create('/gs/' + key, acl=acl)
        with files.open(write_path, 'a') as f:
            f.write(file.file.read())
        files.finalize(write_path)

        if success_action_redirect:
            raise cherrypy.HTTPRedirect(success_action_redirect)
        cherrypy.response.status = 204
        return ""
예제 #4
0
    def upload(
        self, package_id, file, key, acl=None, policy=None, signature=None, success_action_redirect=None, **kwargs
    ):
        """A development-only action for uploading a package archive.

        In production, package archives are uploaded directly to cloud storage,
        using a signed form for authentication. The signed form doesn't work for
        the development server, since it uses a local database in place of cloud
        storage, so this action emulates it by manually saving the file to the
        development database.
        """

        if handlers.is_production():
            raise handlers.http_error(404)
        if PrivateKey.sign(policy) != signature:
            raise handlers.http_error(403)

        write_path = files.gs.create("/gs/" + key, acl=acl)
        with files.open(write_path, "a") as f:
            f.write(file.file.read())
        files.finalize(write_path)

        if success_action_redirect:
            raise cherrypy.HTTPRedirect(success_action_redirect)
        cherrypy.response.status = 204
        return ""
예제 #5
0
파일: root.py 프로젝트: a14n/pub-dartlang
    def serve(self, filename):
        """Serves a cloud storage file for the development server."""

        if handlers.is_production(): return handlers.http_error(404)

        cherrypy.response.headers['Content-Type'] = 'application/octet-stream'
        cherrypy.response.headers['Content-Disposition'] = \
            'attachment; filename=%s' % os.path.basename(filename)

        try:
            with cloud_storage.open(filename) as f: return f.read()
        except KeyError, ExistenceError:
            handlers.http_error(404)
예제 #6
0
    def serve(self, filename):
        """Serves a cloud storage file for the development server."""

        if handlers.is_production(): return handlers.http_error(404)

        cherrypy.response.headers['Content-Type'] = 'application/octet-stream'
        cherrypy.response.headers['Content-Disposition'] = \
            'attachment; filename=%s' % os.path.basename(filename)

        try:
            with cloud_storage.open(filename) as f: return f.read()
        except KeyError, ExistenceError:
            handlers.http_error(404)
예제 #7
0
파일: root.py 프로젝트: a14n/pub-dartlang
    def admin(self):
        """Retrieve a page for performing administrative tasks."""

        if not users.get_current_user():
            raise cherrypy.HTTPRedirect(users.create_login_url(cherrypy.url()))
        elif not users.is_current_user_admin():
            raise handlers.http_error(403)

        reload_status = PackageVersion.get_reload_status()
        if reload_status is not None:
            reload_status['percentage'] = '%d%%' % (
                100.0 * reload_status['count'] / reload_status['total'])

        return handlers.render('admin',
               reload_status=reload_status,
               private_keys_set=PrivateKey.get_oauth() is not None,
               production=handlers.is_production(),
               layout={'title': 'Admin Console'})
예제 #8
0
    def admin(self):
        """Retrieve a page for performing administrative tasks."""

        if not users.get_current_user():
            raise cherrypy.HTTPRedirect(users.create_login_url(cherrypy.url()))
        elif not users.is_current_user_admin():
            raise handlers.http_error(403)

        reload_status = PackageVersion.get_reload_status()
        if reload_status is not None:
            reload_status['percentage'] = '%d%%' % (
                100.0 * reload_status['count'] / reload_status['total'])

        return handlers.render('admin',
               reload_status=reload_status,
               private_keys_set=PrivateKey.get_oauth() is not None,
               production=handlers.is_production(),
               layout={'title': 'Admin Console'})
예제 #9
0
    def admin(self):
        """Retrieve a page for performing administrative tasks."""

        if not users.get_current_user():
            raise cherrypy.HTTPRedirect(users.create_login_url(cherrypy.url()))
        elif not users.is_current_user_admin():
            raise handlers.http_error(403)

        reload_status = PackageVersion.get_reload_status()
        if reload_status is not None:
            reload_status["percentage"] = "%d%%" % (100.0 * reload_status["count"] / reload_status["total"])

        return handlers.render(
            "admin",
            reload_status=reload_status,
            private_key_set=PrivateKey.get() is not None,
            production=handlers.is_production(),
            layout={"title": "Admin Console"},
        )
예제 #10
0
def modify_object(obj,
                  content_encoding=None,
                  content_type=None,
                  content_disposition=None,
                  acl=None,
                  copy_source=None,
                  copy_source_if_match=None,
                  copy_source_if_none_match=None,
                  copy_source_if_modified_since=None,
                  copy_source_if_unmodified_since=None,
                  copy_metadata=True,
                  metadata={}):
    """Modifies or copies a cloud storage object.

    Most arguments are identical to the form fields listed in
    https://developers.google.com/storage/docs/reference-methods#putobject, but
    there are a few differences:

    * The copy_metadata argument can be True, indicating that the metadata
      should be copied, or False, indicating that it should be replaced.
    * The metadata argument is a dictionary of metadata header names to values.
      Each one is transformed into an x-goog-meta- field. The keys should not
      include "x-goog-meta-". Null values are ignored.
    """

    if not handlers.is_production():
        # The only way to modify an existing object using only the Python API
        # seems to be to copy it over itself. It's not a big deal since this is
        # only for development.
        if copy_source is None: copy_source = obj
        contents = None
        with files.open(_appengine_object_path(copy_source), 'r') as f:
            contents = f.read()

        if content_type is None: content_type = 'application/octet-stream'
        write_path = files.gs.create(_appengine_object_path(obj),
                                     mime_type=content_type,
                                     acl=acl,
                                     content_encoding=content_encoding,
                                     content_disposition=content_disposition,
                                     user_metadata=metadata)
        with files.open(write_path, 'a') as f: f.write(contents)
        files.finalize(write_path)
        return

    auth = "OAuth " + app_identity.get_access_token(_FULL_CONTROL_SCOPE)[0]
    headers = {
        "Authorization": auth,
        "Content-Encoding": content_encoding,
        "Content-Type": content_type,
        "Content-Disposition": content_disposition,
        "x-goog-api-version": "2",
        "x-goog-acl": acl,
        "x-goog-copy-source": _object_path(copy_source),
        "x-goog-copy-source-if-match": copy_source_if_match,
        "x-goog-copy-source-if-none-match": copy_source_if_none_match,
        "x-goog-copy-source-if-modified-since": copy_source_if_modified_since,
        "x-goog-copy-source-if-unmodified-since":
            copy_source_if_unmodified_since,
        "x-goog-copy-metadata-directive":
            "COPY" if copy_metadata else "REPLACE"
    }
    for (key, value) in metadata.iteritems():
        headers["x-goog-meta-" + key] = value
    headers = {key: value for key, value in headers.iteritems()
               if value is not None}

    return urlfetch.fetch("https://storage.googleapis.com/" +
                            urllib.quote(_object_path(obj)),
                          method="PUT", headers=headers)
예제 #11
0
    def __init__(self,
                 obj,
                 lifetime=10 * 60,
                 acl=None,
                 cache_control=None,
                 content_disposition=None,
                 content_encoding=None,
                 content_type=None,
                 expires=None,
                 success_redirect=None,
                 success_status=None,
                 size_range=None,
                 metadata={}):
        """Create a new Upload.

        Most arguments are identical to the form fields listed in
        https://developers.google.com/storage/docs/reference-methods#postobject, but
        there are a few differences:

        * The expires argument takes a number of seconds since the epoch.
        * The key argument only specifies the key name, not the bucket.
        * The metadata argument is a dictionary of metadata header names to values.
          Each one is transformed into an x-goog-meta- field. The keys should not
          include "x-goog-meta-". Null values are ignored.
        * The policy document is automatically created and signed. It ensures that
          all fields have the assigned values when they're submitted to Cloud
          Storage.

        The lifetime argument specifies how long the form is valid. It defaults to
        ten minutes.

        The size_range argument should be a tuple indicating the lower and upper
        bounds on the size of the uploaded file, in bytes.
        """

        obj = _object_path(obj)

        metadata = {
            'x-goog-meta-' + key: value
            for key, value in metadata.iteritems()
        }
        if expires is not None: expires = _iso8601(expires)

        policy = {}
        policy['expiration'] = _iso8601(time.time() + lifetime)
        policy['conditions'] = [{'key': obj}]

        def _try_add_condition(name, value):
            if value is not None: policy['conditions'].append({name: value})

        _try_add_condition('acl', acl)
        _try_add_condition('cache-control', cache_control)
        _try_add_condition('content-disposition', content_disposition)
        _try_add_condition('content-encoding', content_encoding)
        _try_add_condition('content-type', content_type)
        _try_add_condition('expires', expires)
        _try_add_condition('success_action_redirect', success_redirect)
        _try_add_condition('success_action_status', success_status)
        for key, value in metadata.items():
            _try_add_condition(key, value)
        if size_range is not None:
            policy['conditions'].append(
                ['content-length-range', size_range[0], size_range[1]])
        policy = b64encode(json.dumps(policy))
        signature = PrivateKey.sign(policy)

        self._fields = {
            'key': obj,
            'acl': acl,
            'Cache-Control': cache_control,
            'Content-Disposition': content_disposition,
            'Content-Encoding': content_encoding,
            'Content-Type': content_type,
            'expires': expires,
            'GoogleAccessId': _ACCESS_KEY,
            'policy': policy,
            'signature': signature,
            'success_action_redirect': success_redirect,
            'success_action_status': success_status
        }
        self._fields.update(metadata)

        if handlers.is_production():
            self._url = "https://storage.googleapis.com"
        else:
            self._url = routes.url_for(controller="api.versions",
                                       action="upload",
                                       qualified=True)
예제 #12
0
def object_url(obj):
    """Returns the URL for an object in cloud storage."""
    if handlers.is_production():
        return 'https://commondatastorage.googleapis.com/' + _object_path(obj)
    else:
        return '/gs_/' + urllib.quote(obj)
예제 #13
0
def modify_object(obj,
                  content_encoding=None,
                  content_type=None,
                  content_disposition=None,
                  acl=None,
                  copy_source=None,
                  copy_source_if_match=None,
                  copy_source_if_none_match=None,
                  copy_source_if_modified_since=None,
                  copy_source_if_unmodified_since=None,
                  copy_metadata=True,
                  metadata={}):
    """Modifies or copies a cloud storage object.

    Most arguments are identical to the form fields listed in
    https://developers.google.com/storage/docs/reference-methods#putobject, but
    there are a few differences:

    * The copy_metadata argument can be True, indicating that the metadata
      should be copied, or False, indicating that it should be replaced.
    * The metadata argument is a dictionary of metadata header names to values.
      Each one is transformed into an x-goog-meta- field. The keys should not
      include "x-goog-meta-". Null values are ignored.
    """

    if not handlers.is_production():
        # The only way to modify an existing object using only the Python API
        # seems to be to copy it over itself. It's not a big deal since this is
        # only for development.
        if copy_source is None: copy_source = obj
        contents = None
        with files.open(_appengine_object_path(copy_source), 'r') as f:
            contents = f.read()

        if content_type is None: content_type = 'application/octet-stream'
        write_path = files.gs.create(_appengine_object_path(obj),
                                     mime_type=content_type,
                                     acl=acl,
                                     content_encoding=content_encoding,
                                     content_disposition=content_disposition,
                                     user_metadata=metadata)
        with files.open(write_path, 'a') as f:
            f.write(contents)
        files.finalize(write_path)
        return

    auth = "OAuth " + app_identity.get_access_token(_FULL_CONTROL_SCOPE)[0]
    headers = {
        "Authorization": auth,
        "Content-Encoding": content_encoding,
        "Content-Type": content_type,
        "Content-Disposition": content_disposition,
        "x-goog-api-version": "2",
        "x-goog-acl": acl,
        "x-goog-copy-source": _object_path(copy_source),
        "x-goog-copy-source-if-match": copy_source_if_match,
        "x-goog-copy-source-if-none-match": copy_source_if_none_match,
        "x-goog-copy-source-if-modified-since": copy_source_if_modified_since,
        "x-goog-copy-source-if-unmodified-since":
        copy_source_if_unmodified_since,
        "x-goog-copy-metadata-directive":
        "COPY" if copy_metadata else "REPLACE"
    }
    for (key, value) in metadata.iteritems():
        headers["x-goog-meta-" + key] = value
    headers = {
        key: value
        for key, value in headers.iteritems() if value is not None
    }

    response = urlfetch.fetch("https://storage.googleapis.com/" +
                              urllib.quote(_object_path(obj)),
                              method="PUT",
                              headers=headers,
                              follow_redirects=True)
    if response.status_code == 200: return

    xml = ElementTree.XML(response.content)
    raise handlers.http_error(
        500, "Cloud storage %s error: %s\n%s" %
        (response.status_code, xml.find('Code').text,
         xml.find('Message').text))
예제 #14
0
 def test_in_production_is_false_in_tests(self):
     # A little sanity check to make sure the tests don't run against
     # production.
     self.assertFalse(handlers.is_production())
예제 #15
0
def object_url(obj):
    """Returns the URL for an object in cloud storage."""
    if handlers.is_production():
        return 'http://commondatastorage.googleapis.com/' + _object_path(obj)
    else:
        return '/gs_/' + urllib.quote(obj)
예제 #16
0
    def __init__(self, obj, lifetime=10*60, acl=None, cache_control=None,
                 content_disposition=None, content_encoding=None,
                 content_type=None, expires=None, success_redirect=None,
                 success_status=None, size_range=None, metadata={}):
        """Create a new Upload.

        Most arguments are identical to the form fields listed in
        https://developers.google.com/storage/docs/reference-methods#postobject, but
        there are a few differences:

        * The expires argument takes a number of seconds since the epoch.
        * The key argument only specifies the key name, not the bucket.
        * The metadata argument is a dictionary of metadata header names to values.
          Each one is transformed into an x-goog-meta- field. The keys should not
          include "x-goog-meta-". Null values are ignored.
        * The policy document is automatically created and signed. It ensures that
          all fields have the assigned values when they're submitted to Cloud
          Storage.

        The lifetime argument specifies how long the form is valid. It defaults to
        ten minutes.

        The size_range argument should be a tuple indicating the lower and upper
        bounds on the size of the uploaded file, in bytes.
        """

        obj = _object_path(obj)

        metadata = {'x-goog-meta-' + key: value for key, value
                    in metadata.iteritems()}
        if expires is not None: expires = _iso8601(expires)

        policy = {}
        policy['expiration'] = _iso8601(time.time() + lifetime)
        policy['conditions'] = [{'key': obj}]
        def _try_add_condition(name, value):
            if value is not None: policy['conditions'].append({name: value})
        _try_add_condition('acl', acl)
        _try_add_condition('cache-control', cache_control)
        _try_add_condition('content-disposition', content_disposition)
        _try_add_condition('content-encoding', content_encoding)
        _try_add_condition('content-type', content_type)
        _try_add_condition('expires', expires)
        _try_add_condition('success_action_redirect', success_redirect)
        _try_add_condition('success_action_status', success_status)
        for key, value in metadata.items(): _try_add_condition(key, value)
        if size_range is not None:
            policy['conditions'].append(
                ['content-length-range', size_range[0], size_range[1]])
        policy = b64encode(json.dumps(policy))
        signature = PrivateKey.sign(policy)

        self._fields = {'key': obj,
                       'acl': acl,
                       'Cache-Control': cache_control,
                       'Content-Disposition': content_disposition,
                       'Content-Encoding': content_encoding,
                       'Content-Type': content_type,
                       'expires': expires,
                       'GoogleAccessId': _ACCESS_KEY,
                       'policy': policy,
                       'signature': signature,
                       'success_action_redirect': success_redirect,
                       'success_action_status': success_status}
        self._fields.update(metadata)

        if handlers.is_production():
            self._url = "https://storage.googleapis.com"
        else:
            self._url = routes.url_for(controller="versions",
                                       action="upload",
                                       package_id=None,
                                       qualified=True)
예제 #17
0
 def test_in_production_is_false_in_tests(self):
     # A little sanity check to make sure the tests don't run against
     # production.
     self.assertFalse(handlers.is_production())