Beispiel #1
0
def file_exists(bucket, key):
    try:
        aws_resource('s3').meta.client.head_object(Bucket=bucket, Key=key)
        return True
    except botocore.exceptions.ClientError as exception:
        if exception.response['Error']['Code'] == "404":
            return False
        raise
def sync_collection_config_file(local_collections_path, collection_id, bucket):
    local_collection_path = get_collection_path(
        collection_id, collections_path=local_collections_path)
    os.makedirs(local_collection_path, exist_ok=True)

    local_collection_config_filepath = get_collection_config_filepath(
        collection_id, collections_path=local_collections_path)
    bucket_collection_config_filepath = get_collection_config_filepath(
        collection_id)

    # Collection config file
    aws_resource('s3').Bucket(bucket).download_file(
        bucket_collection_config_filepath, local_collection_config_filepath)
Beispiel #3
0
def download_all(bucket, path, local_path):
    paginator = aws_resource('s3').meta.client.get_paginator('list_objects')
    for result in paginator.paginate(Bucket=bucket, Prefix=_prefix(path)):
        for obj in result['Contents']:
            object_key = obj['Key']
            if object_key.endswith('/'):
                continue

            dest_filepath = os.path.join(local_path, _remove_prefix(object_key, path))
            if os.path.isfile(dest_filepath) and os.path.getsize(dest_filepath) == obj['Size']:
                log.debug('Skipping downloading s3://%s/%s to %s', bucket, object_key, dest_filepath)
            else:
                log.debug('Downloading s3://%s/%s to %s', bucket, object_key, dest_filepath)
                os.makedirs(os.path.dirname(dest_filepath), exist_ok=True)
                aws_resource('s3').Bucket(bucket).download_file(object_key, dest_filepath)
def sync_collection_config(local_collections_path, collection_id, bucket):
    local_collection_path = get_collection_path(
        collection_id, collections_path=local_collections_path)
    shutil.rmtree(local_collection_path, ignore_errors=True)
    os.makedirs(local_collection_path)

    local_collection_config_filepath = get_collection_config_filepath(
        collection_id, collections_path=local_collections_path)
    bucket_collection_config_filepath = get_collection_config_filepath(
        collection_id)

    # Collection config file
    aws_resource('s3').Bucket(bucket).download_file(
        bucket_collection_config_filepath, local_collection_config_filepath)
    # Lock
    try:
        local_lock_filepath = get_lock_file(
            collection_id, collections_path=local_collections_path)
        bucket_lock_filepath = get_lock_file(collection_id)
        aws_resource('s3').Bucket(bucket).download_file(
            bucket_lock_filepath, local_lock_filepath)
    except botocore.exceptions.ClientError as error:
        if error.response['Error']['Code'] != "404":
            raise
def sync_changesets(local_collections_path,
                    collection_id,
                    bucket,
                    clean=False):
    local_changesets_path = get_changesets_path(
        collection_id, collections_path=local_collections_path)
    if clean:
        shutil.rmtree(local_changesets_path, ignore_errors=True)
    os.makedirs(local_changesets_path, exist_ok=True)

    bucket_changesets_path = get_changesets_path(collection_id)
    paginator = aws_resource('s3').meta.client.get_paginator('list_objects_v2')
    for result in paginator.paginate(
            Bucket=bucket,
            Delimiter='/',
            Prefix='{}/'.format(bucket_changesets_path)):
        for content in result.get('Contents'):
            key = content['Key']
            changeset_filename = os.path.basename(key)
            local_changeset_filepath = '{}/{}'.format(local_changesets_path,
                                                      changeset_filename)
            if not os.path.exists(local_changeset_filepath):
                aws_resource('s3').Bucket(bucket).download_file(
                    key, local_changeset_filepath)
Beispiel #6
0
def list_keys(bucket, path):
    paginator = aws_resource('s3').meta.client.get_paginator('list_objects_v2')
    for result in paginator.paginate(Bucket=bucket, Delimiter='/', Prefix=_prefix(path)):
        for prefix in result.get('CommonPrefixes'):
            yield prefix.get('Prefix').split('/')[1]
Beispiel #7
0
def download_json(bucket, filepath):
    file = io.BytesIO()
    aws_resource('s3').Bucket(bucket).download_fileobj(filepath, file)
    return json.loads(file.getvalue().decode('utf-8'))
Beispiel #8
0
def upload_json(bucket, filepath, obj):
    body = json.dumps(obj, indent=2).encode('utf-8')
    aws_resource('s3').Bucket(bucket).put_object(Body=body, Key=filepath)
Beispiel #9
0
def download_file(bucket, filepath, local_filepath):
    aws_resource('s3').Bucket(bucket).download_file(filepath, local_filepath)