def get_key(env): bucket_name, key_path = get_bucket_name_and_key_path(env) try: bucket = bucketstore.get(bucket_name) except ValueError: if click.confirm("The bucket {} does not exist, would you like to create it?".format(bucket_name)): try: bucket = bucketstore.get(bucket_name, create=True) except botocore.exceptions.ClientError as e: if (e.response["Error"]["Code"] == "BucketAlreadyExists"): raise click.ClickException("The bucket {} already exists.\nIf you created this bucket, you currently credentials do not have access, else try renaming to a different bucket".format(bucket_name)) else: raise e else: return try: key = bucket.key(key_path) # Call the meta, will raise the error if does not exist key.meta except botocore.exceptions.ClientError as e: if (e.response["Error"]["Code"] == "NoSuchKey"): bucket.set(key_path, "{}") key = bucket.key(key_path) else: raise e return key
def __init__(self, bucket, mount_point, other_dirs=None, **kwargs): super(S3FS, self).__init__() self.bucket_name = bucket self.bucket = bucketstore.get(self.bucket_name, **kwargs) self.mount_point = mount_point self.patcher = S3Patcher(filesystem=self) if not other_dirs: other_dirs = ['.'] for _dir in other_dirs: _dir = os.path.abspath(_dir) self.add_real_directory( source_path=_dir, read_only=True, lazy_read=True ) self.keys = [] # Create the mount point. self.CreateDirectory(mount_point) self.refresh()
def test_buckets_can_be_created(): bucket = bucketstore.get("test-bucket", create=True) assert bucket.name == "test-bucket" assert not bucket.is_public # Buckets are private, by default. assert not bucket.all() # Buckets are empty, by default. assert "<S3Bucket" in repr(bucket)
def validate_state(state): bucket = bucketstore.get('rightright.state') state_valid = False if state in bucket: state_valid = True del bucket[state] return state_valid
def bucket() -> Generator: """fixture that provides a bucketstore bucket.""" with mock_s3(): yield bucketstore.get("bucketstore-playground", create=True)
def __init__(self, bucket_name, create=False, *args, **kwargs): self.bucket = bucketstore.get(bucket_name, create=create) super(AnguisS3, self).__init__()
S3 as the latest release if given no arguments. This script expects the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables to be set, as well as all dependencies to be installed (via 'pipenv install'). """ import sys import bucketstore import crayons from parse import parse from docopt import docopt # S3 Bucket. bucket = bucketstore.get('lang-common', create=False) prefix = 'buildpack-stdlib/' def do_list(): """Prints uploaded versions to console.""" print crayons.yellow( 'Versions of buildpack standard library available on Amazon S3:') for version in iter_versions(): print ' - {0}'.format(version) def iter_versions(): """Yields uploaded versions."""
# Support for gomix's 'front-end' and 'back-end' UI. app = Flask(__name__, static_folder='public', template_folder='views') app.debug = config('DEBUG', default=True, cast=bool) # app.wsgi_app = SaferProxyFix(app.wsgi_app) # Set the app secret key from the secret environment variables. app.secret = config('SECRET') # Flask plugins caster = FlaskCaster(app) FlaskUUID(app) sentry = Sentry(app, dsn=config('SENTRY_DSN')) # The S3 Key/Value store. store = bucketstore.get('typy', create=True) store_total = len(store.list()) @app.after_request def apply_kr_hello(response): """Adds some headers to all responses.""" # Made by Kenneth Reitz. if 'MADE_BY' in os.environ: response.headers["X-Was-Here"] = os.environ.get('MADE_BY') # Powered by Flask. response.headers["X-Powered-By"] = os.environ.get('POWERED_BY') return response
def test_buckets_are_not_created_automatically(): with pytest.raises(ValueError): bucketstore.get("non-existent-bucket")
def save_state(state): bucket = bucketstore.get('rightright.state') bucket[state] = "valid"