def execute(destination): valid, payload = load_and_validate_payload(schemas, Request.IN) if not valid: return -1 s3client = S3Client(get_source_value(payload, ACCESS_KEY), get_source_value(payload, SECRET_KEY), get_source_value(payload, REGION_NAME)) if not s3client.does_bucket_exist(get_source_value(payload, BUCKET)): return -1 archive_filename = join_paths(destination, get_version(payload, VERSION_KEY_NAME)) s3client.download_file(get_source_value(payload, BUCKET), get_version(payload, VERSION_KEY_NAME), archive_filename) uncompress_file(archive_filename, destination) os.remove(archive_filename) print( get_version_output(get_version(payload, VERSION_KEY_NAME), VERSION_KEY_NAME)) return 0
def execute(): valid, payload = load_and_validate_payload(schemas, Request.CHECK) if not valid: return -1 s3client = S3Client(get_source_value(payload, ACCESS_KEY), get_source_value(payload, SECRET_KEY), get_source_value(payload, REGION_NAME)) if not s3client.does_bucket_exist(get_source_value(payload, BUCKET)): return -1 files = s3client.list_files(get_source_value(payload, BUCKET)) version = get_version(payload, VERSION_KEY_NAME) if version is None or version is "": versions = [] else: regexp = '{}(.*).tar.gz'.format(get_source_value(payload, FILE_NAME)) versions = match_versions(regexp, files, get_version(payload, VERSION_KEY_NAME)) print(versions_as_list(versions, VERSION_KEY_NAME)) return 0
def execute(sources_directory): valid, payload = load_and_validate_payload(schemas, Request.OUT) if not valid: return -1 s3client = S3Client(get_source_value(payload, ACCESS_KEY), get_source_value(payload, SECRET_KEY), get_source_value(payload, REGION_NAME)) if not s3client.does_bucket_exist(get_source_value(payload, BUCKET)): return -1 version_file_path = join_paths(sources_directory, get_params_value(payload, VERSION_FILE)) artifacts_folder_path = os.path.join(sources_directory, get_params_value(payload, FOLDER_PATH)) version = read_file(version_file_path) archive_filename = get_source_value(payload, FILE_NAME) + version + '.tar.gz' compress_folder(archive_filename, artifacts_folder_path) archive_file_path = join_paths(artifacts_folder_path, archive_filename) s3client.upload_file(get_source_value(payload, BUCKET), archive_filename, archive_file_path) os.remove(archive_file_path) print(get_version_output(archive_filename, VERSION_KEY_NAME)) return 0
def testShouldCreateExpectedPublicURL(self): client = S3Client( Credentials("0PN5J17HBGZHT7JJ3X82", "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o")) actual = client.publicURL('johnsmith', 'cute/puppy.jpg', datetime.datetime.fromtimestamp(1234)) expected = "https://johnsmith.s3.amazonaws.com/cute/puppy.jpg" +\ "?AWSAccessKeyId=0PN5J17HBGZHT7JJ3X82" +\ "&Expires=1234" +\ "&Signature=HWHkXHVSQazVDcxkZaCkVlGz7vg%3D" self.assertEqual(expected, actual)
def testShouldCreateAndDeleteBucket(self): client = S3Client(loadcredentials()) bucket_name = "test-" + str(uuid.uuid4()) client.createBucket(bucket_name) self.failUnless(client.bucketExists(bucket_name)) bucket_list = client.listBuckets() self.failUnless(len(bucket_list) > 0) self.failUnless(bucket_name in bucket_list) client.deleteBucket(bucket_name) self.failIf(client.bucketExists(bucket_name))
def testShouldCreateAndDeleteObjectInBucket(self): client = S3Client(loadcredentials()) bucket_name = "test-" + str(uuid.uuid4()) client.createBucket(bucket_name) object_key = 's3client.py' file_path = os.path.join(os.getcwd(), object_key) client.createObject(bucket_name, object_key, file_path) self.failUnless(client.objectExists(bucket_name, object_key)) client.deleteObject(bucket_name, object_key) self.failIf(client.objectExists(bucket_name, object_key)) client.deleteBucket(bucket_name)
def testCannotDeleteBucketWithObject(self): client = S3Client(loadcredentials()) bucket_name = "test-" + str(uuid.uuid4()) client.createBucket(bucket_name) object_key = 's3client.py' file_path = os.path.join(os.getcwd(), object_key) client.createObject(bucket_name, object_key, file_path) try: client.deleteBucket(bucket_name) self.fail('Should have failed to delete a bucket with an object') except BadHttpResponse: pass client.deleteObject(bucket_name, object_key) client.deleteBucket(bucket_name)
def testCanDownloadUploadedFile(self): client = S3Client(loadcredentials()) bucket_name = "test-" + str(uuid.uuid4()) client.createBucket(bucket_name) object_key = 's3client.py' file_path = os.path.join(os.getcwd(), object_key) client.createObject(bucket_name, object_key, file_path) temp_file_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4())) client.downloadObject(bucket_name, object_key, temp_file_path) self.assertEqual(client.computeMD5(file_path), client.computeMD5(temp_file_path)) os.remove(temp_file_path) client.deleteObject(bucket_name, object_key) client.deleteBucket(bucket_name)
def testCanDownloadFileUsingPublicURL(self): client = S3Client(loadcredentials(), True) bucket_name = "test-" + str(uuid.uuid4()) client.createBucket(bucket_name) object_key = 's3client.py' file_path = os.path.join(os.getcwd(), object_key) client.createObject(bucket_name, object_key, file_path) temp_file_path = os.path.join(tempfile.gettempdir(), str(uuid.uuid4())) expires = datetime.datetime.now() + datetime.timedelta(days=1) urllib.urlretrieve(client.publicURL(bucket_name, object_key, expires), temp_file_path) self.assertEqual(client.computeMD5(file_path), client.computeMD5(temp_file_path)) os.remove(temp_file_path) client.deleteObject(bucket_name, object_key) client.deleteBucket(bucket_name)
def testShouldReturnOctetStreamForUnknownFileType(self): client = S3Client(loadcredentials()) self.assertEqual('application/octet-stream', client.getMimeType('/foo/file.foo'))
def testShouldFindPublicBuckets(self): client = S3Client(loadcredentials()) self.failUnless(client.bucketExists('public')) self.failUnless(client.bucketExists('Public'))
def testShouldReturnPdfMimieTypeForPdfFile(self): client = S3Client(loadcredentials()) self.assertEqual('application/pdf', client.getMimeType('/foo/file.pdf'))
from s3client import S3Client import pickle import random client = S3Client() # this will help read the files at a given prefix 1/06232020 total = 0 objects = [] for file in client.get_all_s3_objects(Bucket='gap-warehouse', Prefix="1/06232020"): objects.append(file['Key']) total += 1 print(total) for key in objects: print(key) s3_object = client.client.get_object(Bucket='gap-warehouse', Key=key) data = s3_object['Body'].read() print(pickle.loads(data)) # def ret(): # data = { # 'timestamp': 1592952651, # 'label': 'person', # 'trackid' : random.randrange(100), # 'left' : random.randrange(100), # 'right' : random.randrange(100), # 'top' : random.randrange(100), # 'bottom' : random.randrange(100), # }