def upload(self, file, key, acl=None, policy=None, signature=None, success_action_redirect=None, **kwargs): """A development-only action for uploading a package archive. In production, package archives are uploaded directly to cloud storage, using a signed form for authentication. The signed form doesn't work for the development server, since it uses a local database in place of cloud storage, so this action emulates it by manually saving the file to the development database. """ if handlers.is_production(): raise handlers.http_error(404) if PrivateKey.sign(policy) != signature: raise handlers.http_error(403) write_path = files.gs.create('/gs/' + key, acl=acl) with files.open(write_path, 'a') as f: f.write(file.file.read()) files.finalize(write_path) if success_action_redirect: raise cherrypy.HTTPRedirect(success_action_redirect) cherrypy.response.status = 204 return ""
def upload( self, package_id, file, key, acl=None, policy=None, signature=None, success_action_redirect=None, **kwargs ): """A development-only action for uploading a package archive. In production, package archives are uploaded directly to cloud storage, using a signed form for authentication. The signed form doesn't work for the development server, since it uses a local database in place of cloud storage, so this action emulates it by manually saving the file to the development database. """ if handlers.is_production(): raise handlers.http_error(404) if PrivateKey.sign(policy) != signature: raise handlers.http_error(403) write_path = files.gs.create("/gs/" + key, acl=acl) with files.open(write_path, "a") as f: f.write(file.file.read()) files.finalize(write_path) if success_action_redirect: raise cherrypy.HTTPRedirect(success_action_redirect) cherrypy.response.status = 204 return ""
def __init__(self, obj, lifetime=10*60, acl=None, cache_control=None, content_disposition=None, content_encoding=None, content_type=None, expires=None, success_redirect=None, success_status=None, size_range=None, metadata={}): """Create a new Upload. Most arguments are identical to the form fields listed in https://developers.google.com/storage/docs/reference-methods#postobject, but there are a few differences: * The expires argument takes a number of seconds since the epoch. * The key argument only specifies the key name, not the bucket. * The metadata argument is a dictionary of metadata header names to values. Each one is transformed into an x-goog-meta- field. The keys should not include "x-goog-meta-". Null values are ignored. * The policy document is automatically created and signed. It ensures that all fields have the assigned values when they're submitted to Cloud Storage. The lifetime argument specifies how long the form is valid. It defaults to ten minutes. The size_range argument should be a tuple indicating the lower and upper bounds on the size of the uploaded file, in bytes. """ obj = _object_path(obj) metadata = {'x-goog-meta-' + key: value for key, value in metadata.iteritems()} if expires is not None: expires = _iso8601(expires) policy = {} policy['expiration'] = _iso8601(time.time() + lifetime) policy['conditions'] = [{'key': obj}] def _try_add_condition(name, value): if value is not None: policy['conditions'].append({name: value}) _try_add_condition('acl', acl) _try_add_condition('cache-control', cache_control) _try_add_condition('content-disposition', content_disposition) _try_add_condition('content-encoding', content_encoding) _try_add_condition('content-type', content_type) _try_add_condition('expires', expires) _try_add_condition('success_action_redirect', success_redirect) _try_add_condition('success_action_status', success_status) for key, value in metadata.items(): _try_add_condition(key, value) if size_range is not None: policy['conditions'].append( ['content-length-range', size_range[0], size_range[1]]) policy = b64encode(json.dumps(policy)) signature = PrivateKey.sign(policy) self._fields = {'key': obj, 'acl': acl, 'Cache-Control': cache_control, 'Content-Disposition': content_disposition, 'Content-Encoding': content_encoding, 'Content-Type': content_type, 'expires': expires, 'GoogleAccessId': _ACCESS_KEY, 'policy': policy, 'signature': signature, 'success_action_redirect': success_redirect, 'success_action_status': success_status} self._fields.update(metadata) if handlers.is_production(): self._url = "https://storage.googleapis.com" else: self._url = routes.url_for(controller="versions", action="upload", package_id=None, qualified=True)
def __init__(self, obj, lifetime=10 * 60, acl=None, cache_control=None, content_disposition=None, content_encoding=None, content_type=None, expires=None, success_redirect=None, success_status=None, size_range=None, metadata={}): """Create a new Upload. Most arguments are identical to the form fields listed in https://developers.google.com/storage/docs/reference-methods#postobject, but there are a few differences: * The expires argument takes a number of seconds since the epoch. * The key argument only specifies the key name, not the bucket. * The metadata argument is a dictionary of metadata header names to values. Each one is transformed into an x-goog-meta- field. The keys should not include "x-goog-meta-". Null values are ignored. * The policy document is automatically created and signed. It ensures that all fields have the assigned values when they're submitted to Cloud Storage. The lifetime argument specifies how long the form is valid. It defaults to ten minutes. The size_range argument should be a tuple indicating the lower and upper bounds on the size of the uploaded file, in bytes. """ obj = _object_path(obj) metadata = { 'x-goog-meta-' + key: value for key, value in metadata.iteritems() } if expires is not None: expires = _iso8601(expires) policy = {} policy['expiration'] = _iso8601(time.time() + lifetime) policy['conditions'] = [{'key': obj}] def _try_add_condition(name, value): if value is not None: policy['conditions'].append({name: value}) _try_add_condition('acl', acl) _try_add_condition('cache-control', cache_control) _try_add_condition('content-disposition', content_disposition) _try_add_condition('content-encoding', content_encoding) _try_add_condition('content-type', content_type) _try_add_condition('expires', expires) _try_add_condition('success_action_redirect', success_redirect) _try_add_condition('success_action_status', success_status) for key, value in metadata.items(): _try_add_condition(key, value) if size_range is not None: policy['conditions'].append( ['content-length-range', size_range[0], size_range[1]]) policy = b64encode(json.dumps(policy)) signature = PrivateKey.sign(policy) self._fields = { 'key': obj, 'acl': acl, 'Cache-Control': cache_control, 'Content-Disposition': content_disposition, 'Content-Encoding': content_encoding, 'Content-Type': content_type, 'expires': expires, 'GoogleAccessId': _ACCESS_KEY, 'policy': policy, 'signature': signature, 'success_action_redirect': success_redirect, 'success_action_status': success_status } self._fields.update(metadata) if handlers.is_production(): self._url = "https://storage.googleapis.com" else: self._url = routes.url_for(controller="api.versions", action="upload", qualified=True)