def main(): parser = argparse.ArgumentParser('Check the multipart upload status') parser.add_argument('-c', '--cancel', action="store_true", help='cancel all the outstanding ') parser.add_argument('-f', '--force', action="store_true", help='force all the outstanding ') parser.add_argument('-v', '--verbosity', action='count', default=0, help='increase output verbosity') parser.add_argument('bucket', help='the bucket to check') args = parser.parse_args() set_logging_level(args.verbosity) session = boto3.Session(profile_name='aws-chiles02') s3 = session.resource('s3', use_ssl=False) bucket = s3.Bucket(args.bucket) now = datetime.datetime.now(utc) one_day_ago = now - datetime.timedelta(hours=24) for item in bucket.multipart_uploads.all(): LOG.info('key_name: {0}, initiated: {1}'.format( item.key, item.initiated)) if (item.initiated < one_day_ago and args.cancel) or args.force: LOG.info('Cancelling {0}'.format(item.key)) item.abort()
def main(): arguments = parse_arguments() set_logging_level(arguments.verbosity) work_to_do = WorkToDo(arguments.width, arguments.bucket, get_s3_uvsub_name(arguments.width), get_s3_split_name(arguments.width)) work_to_do.calculate_work_to_do() for work_item in work_to_do.work_to_do: LOG.info(work_item)
def main(): arguments = parse_arguments() set_logging_level(arguments.verbosity) session = boto3.Session(profile_name='aws-chiles02') s3 = session.resource('s3', use_ssl=False) bucket = s3.Bucket(arguments.bucket) # Get the data we need clean_entries = get_clean(bucket, arguments.width, arguments.iterations, arguments.arcsec) analyse_data(clean_entries, arguments.width)
def main(): arguments = parse_arguments() set_logging_level(arguments.verbosity) session = boto3.Session(profile_name='aws-chiles02') s3 = session.resource('s3', use_ssl=False) bucket = s3.Bucket(arguments.bucket) # Get the data we need measurement_sets = get_measurement_sets(bucket) split_entries = get_split(bucket, arguments.width) analyse_data(measurement_sets, split_entries, arguments.width)
def main(): arguments = parse_arguments() set_logging_level(arguments.verbosity) session = boto3.Session(profile_name="aws-chiles02") s3 = session.resource("s3", use_ssl=False) bucket = s3.Bucket(arguments.bucket) # Get the data we need measurement_sets = get_measurement_sets(bucket) split_entries = get_split(bucket, arguments.width) analyse_data(measurement_sets, split_entries, arguments.width)
def main(): parser = argparse.ArgumentParser('Check the multipart upload status') parser.add_argument('-c', '--cancel', action="store_true", help='cancel all the outstanding ') parser.add_argument('-f', '--force', action="store_true", help='force all the outstanding ') parser.add_argument('-v', '--verbosity', action='count', default=0, help='increase output verbosity') parser.add_argument('bucket', help='the bucket to check') args = parser.parse_args() set_logging_level(args.verbosity) session = boto3.Session(profile_name='aws-chiles02') s3 = session.resource('s3', use_ssl=False) bucket = s3.Bucket(args.bucket) now = datetime.datetime.now(utc) one_day_ago = now - datetime.timedelta(hours=24) for item in bucket.multipart_uploads.all(): LOG.info('key_name: {0}, initiated: {1}'.format(item.key, item.initiated)) if (item.initiated < one_day_ago and args.cancel) or args.force: LOG.info('Cancelling {0}'.format(item.key)) item.abort()