def test_s3_delete(self): """ Tests S3 deletes. The files used are the same generated from filegenerators_test.py. This includes the create s3 file. """ keys = [ self.bucket + "/another_directory/text2.txt", self.bucket + "/text1.txt", self.bucket + "/another_directory/", ] tasks = [] for key in keys: tasks.append( FileInfo( src=key, src_type="s3", dest_type="local", operation_name="delete", size=0, service=self.service, endpoint=self.endpoint, ) ) self.assertEqual(len(list_contents(self.bucket, self.session)), 3) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_upload_fail(self): """ One of the uploads will fail to upload in this test as the second s3 destination's bucket does not exist. """ self.assertEqual(len(list_contents(self.bucket, self.session)), 0) fail_s3_files = [ self.bucket + '/text1.txt', self.bucket[:-1] + '/another_directory/text2.txt' ] files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append( FileInfo(src=self.loc_files[i], dest=fail_s3_files[i], compare_key=None, src_type='local', dest_type='s3', operation_name='upload', size=0, last_update=None, service=self.service, endpoint=self.endpoint)) self.s3_handler.call(tasks) # Confirm only one of the files was uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
def test_delete_url_encode(self): key = self.bucket + '/a+b/foo' tasks = [FileInfo(src=key, src_type='s3', dest_type='local', operation='delete', size=0)] self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_delete_url_encode(self): key = self.bucket + '/a+b/foo' tasks = [FileInfo( src=key, src_type='s3', dest_type='local', operation_name='delete', size=0, service=self.service, endpoint=self.endpoint, )] self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_s3_delete_url_encode(self): """ Tests S3 deletes. The files used are the same generated from filegenerators_test.py. This includes the create s3 file. """ key = self.bucket + '/a+b/foo' tasks = [FileInfo(src=key, src_type='s3', dest_type='local', operation='delete', size=0)] self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_move_unicode(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket2, self.session)), 0) # Create file info objects to perform move. tasks = [] for i in range(len(self.s3_files)): tasks.append(FileInfo(src=self.s3_files[i], src_type='s3', dest=self.s3_files2[i], dest_type='s3', operation='move', size=0)) # Perform the move. self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket2, self.session)), 1)
def test_s3_delete_url_encode(self): """ Tests S3 deletes. The files used are the same generated from filegenerators_test.py. This includes the create s3 file. """ key = self.bucket + '/a+b/foo' tasks = [FileInfo( src=key, src_type='s3', dest_type='local', operation_name='delete', size=0, service=self.service, endpoint=self.endpoint)] self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_upload(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 0) # Create file info objects to perform upload. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append(FileInfo(src=self.loc_files[i], dest=self.s3_files[i], operation='upload', size=0)) # Perform the upload. self.s3_handler.call(tasks) # Confirm despite the exceptions, the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
def test_move_unicode(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket2, self.session)), 0) # Create file info objects to perform move. tasks = [] for i in range(len(self.s3_files)): tasks.append(FileInfo( src=self.s3_files[i], src_type='s3', dest=self.s3_files2[i], dest_type='s3', operation_name='move', size=0, service=self.service, endpoint=self.endpoint )) # Perform the move. self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket2, self.session)), 1)
def test_delete_url_encode(self): key = self.bucket + "/a+b/foo" tasks = [ FileInfo( src=key, src_type="s3", dest_type="local", operation_name="delete", size=0, service=self.service, endpoint=self.endpoint, ) ] self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_s3_delete(self): """ Tests S3 deletes. The files used are the same generated from filegenerators_test.py. This includes the create s3 file. """ keys = [self.bucket + '/another_directory/text2.txt', self.bucket + '/text1.txt', self.bucket + '/another_directory/'] tasks = [] for key in keys: tasks.append(FileInfo(src=key, src_type='s3', dest_type='local', operation='delete', size=0)) self.assertEqual(len(list_contents(self.bucket, self.session)), 3) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_move(self): # Create file info objects to perform move. tasks = [] time = datetime.datetime.now() for i in range(len(self.s3_files)): tasks.append( FileInfo(src=self.s3_files[i], src_type='s3', dest=self.loc_files[i], dest_type='local', last_update=time, operation_name='move', size=0, service=self.service, endpoint=self.endpoint)) # Perform the move. self.s3_handler.call(tasks) # Confirm that the files now exist. for filename in self.loc_files: self.assertTrue(os.path.exists(filename)) # Ensure the contents are as expected. with open(self.loc_files[0], 'rb') as filename: self.assertEqual(filename.read(), b'This is a test.') with open(self.loc_files[1], 'rb') as filename: self.assertEqual(filename.read(), b'This is another test.') # Ensure the objects are no longer in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
def test_upload(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 0) # Create file info objects to perform upload. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append(FileInfo(src=self.loc_files[i], dest=self.s3_files[i], operation_name='upload', size=0, service=self.service, endpoint=self.endpoint)) # Perform the upload. self.s3_handler.call(tasks) # Confirm despite the exceptions, the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
def test_upload_fail(self): """ One of the uploads will fail to upload in this test as the second s3 destination's bucket does not exist. """ self.assertEqual(len(list_contents(self.bucket, self.session)), 0) fail_s3_files = [self.bucket + '/text1.txt', self.bucket[:-1] + '/another_directory/text2.txt'] files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append(FileInfo(src=self.loc_files[i], dest=fail_s3_files[i], operation='upload', size=0)) self.s3_handler.call(tasks) # Confirm only one of the files was uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
def test_upload(self): """ Test the abiltiy to upload a file without the use of threads. """ # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 0) # Create file info objects to perform upload. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append(FileInfo(src=self.loc_files[i], dest=self.s3_files[i], operation='upload', size=0)) # Perform the upload. self.s3_handler.call(tasks) # Confirm the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
def test_move(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket2, self.session)), 0) # Create file info objects to perform move. tasks = [] for i in range(len(self.s3_files)): tasks.append(FileInfo( src=self.s3_files[i], src_type='s3', dest=self.s3_files2[i], dest_type='s3', operation_name='move', size=0, service=self.service, endpoint=self.endpoint)) # Perform the move. self.s3_handler.call(tasks) # Confirm the files were moved. The origial bucket had three # objects. Only two were moved. self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.assertEqual(len(list_contents(self.bucket2, self.session)), 2)
def test_move_unicode(self): self.bucket2 = make_s3_files(self.session, key1=u'\u2713') tasks = [FileInfo(src=self.bucket2 + '/' + u'\u2713', src_type='s3', dest=self.bucket + '/' + u'\u2713', dest_type='s3', operation='move', size=0)] self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
def test_upload(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 0) # Create file info objects to perform upload. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append(FileInfo(src=self.loc_files[i], dest=self.s3_files[i], operation='upload', size=0)) # Perform the upload. self.s3_handler.call(tasks) # Confirm the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2) # Verify the guessed content type. self.assertEqual( self.session.s3[self.bucket][ 'another_directory/text2.txt']['ContentType'], 'text/plain')
def test_upload(self): """ Test the abiltiy to upload a file without the use of threads. """ # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 0) # Create file info objects to perform upload. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append( FileInfo(src=self.loc_files[i], dest=self.s3_files[i], operation='upload', size=0)) # Perform the upload. self.s3_handler.call(tasks) # Confirm the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
def test_s3_delete(self): """ Tests S3 deletes. The files used are the same generated from filegenerators_test.py. This includes the create s3 file. """ keys = [self.bucket + '/another_directory/text2.txt', self.bucket + '/text1.txt', self.bucket + '/another_directory/'] tasks = [] for key in keys: tasks.append(FileInfo( src=key, src_type='s3', dest_type='local', operation_name='delete', size=0, service=self.service, endpoint=self.endpoint)) self.assertEqual(len(list_contents(self.bucket, self.session)), 3) self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
def test_move(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket2, self.session)), 0) # Create file info objects to perform move. tasks = [] for i in range(len(self.s3_files)): tasks.append( FileInfo(src=self.s3_files[i], src_type='s3', dest=self.s3_files2[i], dest_type='s3', operation='move', size=0)) # Perform the move. self.s3_handler.call(tasks) # Confirm the files were moved. The origial bucket had three # objects. Only two were moved. self.assertEqual(len(list_contents(self.bucket, self.session)), 1) self.assertEqual(len(list_contents(self.bucket2, self.session)), 2)
def test_upload(self): # Confirm there are no objects in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 0) # Create file info objects to perform upload. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append( FileInfo(src=self.loc_files[i], dest=self.s3_files[i], operation='upload', size=0)) # Perform the upload. self.s3_handler.call(tasks) # Confirm the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2) # Verify the guessed content type. self.assertEqual( self.session.s3[self.bucket]['another_directory/text2.txt'] ['ContentType'], 'text/plain')
def test_move_unicode(self): self.bucket2 = make_s3_files(self.session, key1=u'\u2713') tasks = [FileInfo( src=self.bucket2 + '/' + u'\u2713', src_type='s3', dest=self.bucket + '/' + u'\u2713', dest_type='s3', operation_name='move', size=0, service=self.service, endpoint=self.endpoint, )] self.s3_handler.call(tasks) self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
def test_move(self): # Create file info objects to perform move. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append(FileInfo(src=self.loc_files[i], src_type='local', dest=self.s3_files[i], dest_type='s3', operation='move', size=0)) # Perform the move. self.s3_handler.call(tasks) # Confirm the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2) # Confirm local files do not exist. for filename in files: self.assertFalse(os.path.exists(filename))
def test_move(self): # Create file info objects to perform move. files = [self.loc_files[0], self.loc_files[1]] tasks = [] for i in range(len(files)): tasks.append(FileInfo( src=self.loc_files[i], src_type='local', dest=self.s3_files[i], dest_type='s3', operation_name='move', size=0, service=self.service, endpoint=self.endpoint)) # Perform the move. self.s3_handler.call(tasks) # Confirm the files were uploaded. self.assertEqual(len(list_contents(self.bucket, self.session)), 2) # Confirm local files do not exist. for filename in files: self.assertFalse(os.path.exists(filename))
def test_move(self): # Create file info objects to perform move. tasks = [] time = datetime.datetime.now() for i in range(len(self.s3_files)): tasks.append(FileInfo(src=self.s3_files[i], src_type='s3', dest=self.loc_files[i], dest_type='local', last_update=time, operation='move', size=0)) # Perform the move. self.s3_handler.call(tasks) # Confirm that the files now exist. for filename in self.loc_files: self.assertTrue(os.path.exists(filename)) # Ensure the contents are as expected. with open(self.loc_files[0], 'rb') as filename: self.assertEqual(filename.read(), b'This is a test.') with open(self.loc_files[1], 'rb') as filename: self.assertEqual(filename.read(), b'This is another test.') # Ensure the objects are no longer in the bucket. self.assertEqual(len(list_contents(self.bucket, self.session)), 1)