Beispiel #1
0
def retrieve_files(args):
    session = boto3.Session(profile_name='aws-chiles02')
    s3 = session.resource('s3')

    bucket = s3.Bucket(args.bucket)
    size = 0
    for key in bucket.objects.all():
        size += key.size
        LOG.info('{0}, {1}, {2}, {3}'.format(key.key, bytes2human(key.size), size, bytes2human(size)))

    LOG.info('Size = {0}'.format(bytes2human(size)))
def copy_measurement_set(measurement_set, directory_out, bucket_name):
    LOG.info('measurement_set: {0}, bucket_name: {1}'.format(measurement_set, bucket_name))

    (measurement_set_directory, measurement_set_filename) = split(measurement_set)
    key = 'observation_data/{0}.tar'.format(measurement_set_filename)

    session = boto3.Session(profile_name='aws-chiles02')
    s3 = session.resource('s3', use_ssl=False)

    bucket = s3.Bucket(bucket_name)
    objs = list(bucket.objects.filter(Prefix=key))
    if len(objs) > 0 and objs[0].key == key:
        LOG.info('The measurement set {0} exists in {1}'.format(key, bucket_name))
    else:
        tar_filename = os.path.join(directory_out, '{0}.tar'.format(measurement_set_filename))
        bash = 'cd {0} && tar -cvf {1} {2}'.format(measurement_set_directory, tar_filename, measurement_set_filename)
        return_code = run_command(bash)
        path_exists = os.path.exists(tar_filename)
        if return_code != 0 or not path_exists:
            LOG.error('tar return_code: {0}, exists: {1}'.format(return_code, path_exists))
        else:
            tar_size = float(os.path.getsize(tar_filename))
            chunk_size = int(tar_size / 9999)
            LOG.info(
                'tar_filename: {0}, bucket: {1}, key: {2}, size: {3}, chunk_size: {4}'.format(
                    tar_filename,
                    bucket_name,
                    key,
                    bytes2human(tar_size),
                    chunk_size
                )
            )
            transfer_config = TransferConfig(
                multipart_chunksize=chunk_size
            )
            s3_client = s3.meta.client
            transfer = S3Transfer(s3_client, transfer_config)
            transfer.upload_file(
                tar_filename,
                bucket_name,
                key,
                callback=ProgressPercentage(
                    key,
                    tar_size
                ),
                extra_args={
                    'StorageClass': 'REDUCED_REDUNDANCY',
                }
            )

        # Clean up
        if path_exists:
            os.remove(tar_filename)
Beispiel #3
0
def main():
    arguments = parse_arguments()
    logging.basicConfig(level=logging.INFO)
    if arguments.verbosity == 0:
        logging.getLogger('boto3').setLevel(level=logging.WARN)
        logging.getLogger('botocore').setLevel(level=logging.WARN)
        logging.getLogger('nose').setLevel(level=logging.WARN)

    session = boto3.Session(profile_name='aws-chiles02')
    s3 = session.resource('s3', use_ssl=False)

    bucket = s3.Bucket(arguments.bucket)
    size = 0
    count = 0
    for key in bucket.objects.all():
        size += key.size
        count += 1

        if count % 100 == 0:
            LOG.info('Count: {0}, size: {1}'.format(count, bytes2human(size)))

    LOG.info('Size: {0}'.format(bytes2human(size)))
Beispiel #4
0
def main():
    arguments = parse_arguments()
    logging.basicConfig(level=logging.INFO)
    if arguments.verbosity == 0:
        logging.getLogger('boto3').setLevel(level=logging.WARN)
        logging.getLogger('botocore').setLevel(level=logging.WARN)
        logging.getLogger('nose').setLevel(level=logging.WARN)

    session = boto3.Session(profile_name='aws-chiles02')
    s3 = session.resource('s3', use_ssl=False)

    bucket = s3.Bucket(arguments.bucket)
    size = 0
    count = 0
    for key in bucket.objects.all():
        size += key.size
        count += 1

        if count % 100 == 0:
            LOG.info('Count: {0}, size: {1}'.format(count, bytes2human(size)))

    LOG.info('Size: {0}'.format(bytes2human(size)))
def copy_measurement_set(measurement_set, directory_out, bucket_name):
    LOG.info('measurement_set: {0}, bucket_name: {1}'.format(
        measurement_set, bucket_name))

    (measurement_set_directory,
     measurement_set_filename) = split(measurement_set)
    key = 'observation_data/{0}.tar'.format(measurement_set_filename)

    session = boto3.Session(profile_name='aws-chiles02')
    s3 = session.resource('s3', use_ssl=False)

    bucket = s3.Bucket(bucket_name)
    objs = list(bucket.objects.filter(Prefix=key))
    if len(objs) > 0 and objs[0].key == key:
        LOG.info('The measurement set {0} exists in {1}'.format(
            key, bucket_name))
    else:
        tar_filename = os.path.join(directory_out,
                                    '{0}.tar'.format(measurement_set_filename))
        bash = 'cd {0} && tar -cvf {1} {2}'.format(measurement_set_directory,
                                                   tar_filename,
                                                   measurement_set_filename)
        return_code = run_command(bash)
        path_exists = os.path.exists(tar_filename)
        if return_code != 0 or not path_exists:
            LOG.error('tar return_code: {0}, exists: {1}'.format(
                return_code, path_exists))
        else:
            tar_size = float(os.path.getsize(tar_filename))
            chunk_size = int(tar_size / 9999)
            LOG.info(
                'tar_filename: {0}, bucket: {1}, key: {2}, size: {3}, chunk_size: {4}'
                .format(tar_filename, bucket_name, key, bytes2human(tar_size),
                        chunk_size))
            transfer_config = TransferConfig(multipart_chunksize=chunk_size)
            s3_client = s3.meta.client
            transfer = S3Transfer(s3_client, transfer_config)
            transfer.upload_file(tar_filename,
                                 bucket_name,
                                 key,
                                 callback=ProgressPercentage(key, tar_size),
                                 extra_args={
                                     'StorageClass': 'REDUCED_REDUNDANCY',
                                 })

        # Clean up
        if path_exists:
            os.remove(tar_filename)
def retrieve_files(args):
    session = boto3.Session(profile_name='aws-chiles02')
    s3 = session.resource('s3', use_ssl=False)

    bucket = s3.Bucket(args.bucket)
    size = 0
    for key in bucket.objects.all():
        if key.key.endswith('_calibrated_deepfield.ms.tar'):
            obj = s3.Object(key.bucket_name, key.key)
            storage_class = obj.storage_class
            restore = obj.restore
            size += key.size
            if args.verbosity >= 1:
                LOG.info('{0}, {1}, {2}, {3}'.format(key.key, storage_class, restore, size))

    LOG.info('Size = {0}'.format(bytes2human(size)))