def object_put(auth, path, data, mimetype='application/octet-stream'): bucket, filename = path.split(':', 1) media = MediaIoBaseUpload(data, mimetype=mimetype, chunksize=CHUNKSIZE, resumable=True) request = API_Storage(auth).objects().insert( bucket=bucket, name=filename, media_body=media).execute(run=False) response = None errors = 0 while response is None: error = None try: status, response = request.next_chunk() if project.verbose and status: print('Uploaded %d%%.' % int(status.progress() * 100)) except HttpError as e: if e.resp.status < 500: raise error = e except (httplib2.HttpLib2Error, IOError) as e: error = e errors = (errors + 1) if error else 0 if errors > RETRIES: raise error if project.verbose: print('Uploaded 100%.')
def object_list(auth, path, raw=False, files_only=False): bucket, prefix = path.split(':', 1) for item in API_Storage(auth, iterate=True).objects().list( bucket=bucket, prefix=prefix).execute(): if files_only and item['name'].endswith('/'): continue yield item if raw else '%s:%s' % (bucket, item['name'])
def object_exists(config, auth, path): bucket, filename = path.split(':', 1) try: API_Storage(config, auth).objects().get(bucket=bucket, object=filename).execute() return True except: return False
def bucket_get(auth, name): try: return API_Storage(auth).buckets().get(bucket=name).execute() except HttpError as e: if e.resp.status == 404: return None elif e.resp.status in [403, 500, 503]: sleep(5) else: raise
def object_copy(auth, path_from, path_to): from_bucket, from_filename = path_from.split(':', 1) to_bucket, to_filename = path_to.split(':', 1) body = { 'kind': 'storage#object', 'bucket': to_bucket, 'name': to_filename, 'storageClass': 'REGIONAL', } return API_Storage(auth).objects().rewrite(sourceBucket=from_bucket, sourceObject=from_filename, destinationBucket=to_bucket, destinationObject=to_filename, body=body).execute()
def bucket_create(config, auth, project, name, location='us-west1'): if bucket_get(config, auth, name) is None: body = { 'kind': 'storage#bucket', 'name': name, 'storageClass': 'REGIONAL', 'location': location, } try: return API_Storage(config, auth).buckets().insert(project=project, body=body).execute() sleep(1) except HttpError as e: if e.resp.status in [403, 500, 503]: sleep(5) elif json.loads(e.content.decode())['error']['code'] == 409: pass # already exists ( ignore ) else: raise
def bucket_access(config, auth, project, name, role, emails=[], groups=[], services=[], domains=[]): entities = map(lambda e: 'user-%s' % e, emails) + \ map(lambda e: 'group-%s' % e, groups) + \ map(lambda e: 'user-%s' % e, services) + \ map(lambda e: 'domain-%s' % e, domains) for entity in entities: body = { 'kind': 'storage#bucketAccessControl', 'bucket': name, 'entity': entity, 'role': role } API_Storage(config, auth).bucketAccessControls().insert(bucket=name, body=body).execute()
def bucket_delete(auth, name): return API_Storage(auth).buckets().delete(bucket=name).execute()
def object_delete(auth, path): bucket, filename = path.split(':', 1) return API_Storage(auth).objects().delete(bucket=bucket, object=filename).execute()
def object_get_chunks(auth, path, chunksize=CHUNKSIZE, encoding=None): bucket, filename = path.split(':', 1) data = BytesIO() request = API_Storage(auth).objects().get_media( bucket=bucket, object=filename).execute(run=False) yield from media_download(request, chunksize, encoding)
def object_get(auth, path): bucket, filename = path.split(':', 1) return API_Storage(auth).objects().get_media(bucket=bucket, object=filename).execute()