예제 #1
0
    def test_key_with_files(self):
        """
        test simple key 'from_file' and 'to_file' functions
        """
        log = logging.getLogger("test_key_with_files")
        bucket_name = "com-dougfort-test-key-with-files"
        key_name = "A" * 1024
        test_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        test_file_size = 1024 ** 2
        buffer_size = 1024

        log.debug("writing %s bytes to %s" % (
            test_file_size, test_file_path, 
        ))
        bytes_written = 0
        with open(test_file_path, "w") as output_file:
            while bytes_written < test_file_size:
                output_file.write(_random_string(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_bucket(bucket_name)
        self.assertTrue(bucket is not None)
        self.assertEqual(bucket.name, bucket_name)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "r") as archive_file:
            write_key.set_contents_from_file(archive_file)        
        self.assertTrue(write_key.exists())

        # create another key with the same name 
        read_key = Key(bucket, key_name)

        # read back the data
        retrieve_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        with open(retrieve_file_path, "w") as retrieve_file:
            read_key.get_contents_to_file(retrieve_file)      
        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False)
        )

        # delete the key
        read_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket_name)
    def _bucket_with_unauth_locations(self, access_control):
        log = logging.getLogger("_bucket_with_unauth_locations")

        access_control_json = json.dumps(access_control)

        s3_connection = motoboto.S3Emulator()

        # create the bucket
        bucket = s3_connection.create_unique_bucket(
            access_control=access_control_json)

        self.assertTrue(bucket is not None)

        # the bucket's authenticated connection should be able to list keys
        _ = bucket.get_all_keys()

        # in location an unauthenticated connection should be denied list_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _list_keys(bucket.name)
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to write
        auth_key_name = "authenticated_key"
        auth_test_string = "authenticated test string"
        write_key = Key(bucket)
        write_key.name = auth_key_name
        write_key.set_contents_from_string(auth_test_string)        
        self.assertTrue(write_key.exists())

        # an unauthenticated connection should also be able to write
        unauth_key_name = "unauthenticated_key"
        unauth_test_string = "unauth test string"
        archive_result = _archive_key_from_string(bucket.name, 
                                                  unauth_key_name, 
                                                  unauth_test_string)
        self.assertTrue("version_identifier" in archive_result)
        head_result = _head_key(bucket.name, unauth_key_name)
        log.info("head_result = {0}".format(head_result))

        # the bucket's authenticated connection should be able to read
        read_key = Key(bucket, auth_key_name)
        returned_string = read_key.get_contents_as_string()        
        self.assertEqual(returned_string.decode("utf-8"), auth_test_string)

        # an unauthenticated connection should also be able to read
        returned_string = _retrieve_key_to_string(bucket.name, unauth_key_name) 
        self.assertEqual(returned_string.decode("utf-8"), unauth_test_string)

        # the bucket's authenticated connection should be able to delete
        read_key.delete()        

        # an unauthenticated connection should also be able to delete
        delete_result = _delete_key(bucket.name, unauth_key_name) 
        self.assertTrue(delete_result["success"])

        # delete the bucket
        s3_connection.delete_bucket(bucket.name)
        s3_connection.close()
    def _bucket_without_unauth_access(self, access_control):
        if access_control is None:
            access_control_json = None
        else:
            access_control_json = json.dumps(access_control)

        s3_connection = motoboto.S3Emulator()

        # create the bucket
        bucket = s3_connection.create_unique_bucket(
            access_control=access_control_json)

        # the bucket's authenticated connection should be able to list keys
        _ = bucket.get_all_keys()

        # an unauthenticated connection should be denied list_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _list_keys(bucket.name)
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to write
        auth_key_name = "authenticated_key"
        auth_test_string = "authenticated test string"
        write_key = Key(bucket)
        write_key.name = auth_key_name
        write_key.set_contents_from_string(auth_test_string)        
        self.assertTrue(write_key.exists())

        # an unauthenticated connection should be denied write_access
        unauth_key_name = "unauthenticated_key"
        unauth_test_string = "unauth test string"
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _archive_key_from_string(bucket.name, 
                                       unauth_key_name, 
                                       unauth_test_string)
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to read
        read_key = Key(bucket, auth_key_name)
        returned_string = read_key.get_contents_as_string()        
        self.assertEqual(returned_string.decode("utf-8"), auth_test_string)

        # an unauthenticated connection should be denied read_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _retrieve_key_to_string(bucket.name, unauth_key_name) 
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to delete
        read_key.delete()        

        # an unauthenticated connection should be denied delete_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _delete_key(bucket.name, unauth_key_name) 
        self.assertEqual(context_manager.exception.status, 401)

        # delete the bucket
        s3_connection.delete_bucket(bucket.name)
        s3_connection.close()
예제 #4
0
    def _bucket_with_unauth_locations(self, access_control):
        log = logging.getLogger("_bucket_with_unauth_locations")

        access_control_json = json.dumps(access_control)

        s3_connection = motoboto.S3Emulator()

        # create the bucket
        bucket = s3_connection.create_unique_bucket(
            access_control=access_control_json)

        self.assertTrue(bucket is not None)

        # the bucket's authenticated connection should be able to list keys
        _ = bucket.get_all_keys()

        # in location an unauthenticated connection should be denied list_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _list_keys(bucket.name)
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to write
        auth_key_name = "authenticated_key"
        auth_test_string = "authenticated test string"
        write_key = Key(bucket)
        write_key.name = auth_key_name
        write_key.set_contents_from_string(auth_test_string)
        self.assertTrue(write_key.exists())

        # an unauthenticated connection should also be able to write
        unauth_key_name = "unauthenticated_key"
        unauth_test_string = "unauth test string"
        archive_result = _archive_key_from_string(bucket.name, unauth_key_name,
                                                  unauth_test_string)
        self.assertTrue("version_identifier" in archive_result)
        head_result = _head_key(bucket.name, unauth_key_name)
        log.info("head_result = {0}".format(head_result))

        # the bucket's authenticated connection should be able to read
        read_key = Key(bucket, auth_key_name)
        returned_string = read_key.get_contents_as_string()
        self.assertEqual(returned_string.decode("utf-8"), auth_test_string)

        # an unauthenticated connection should also be able to read
        returned_string = _retrieve_key_to_string(bucket.name, unauth_key_name)
        self.assertEqual(returned_string.decode("utf-8"), unauth_test_string)

        # the bucket's authenticated connection should be able to delete
        read_key.delete()

        # an unauthenticated connection should also be able to delete
        delete_result = _delete_key(bucket.name, unauth_key_name)
        self.assertTrue(delete_result["success"])

        # delete the bucket
        s3_connection.delete_bucket(bucket.name)
        s3_connection.close()
예제 #5
0
    def _bucket_without_unauth_access(self, access_control):
        if access_control is None:
            access_control_json = None
        else:
            access_control_json = json.dumps(access_control)

        s3_connection = motoboto.S3Emulator()

        # create the bucket
        bucket = s3_connection.create_unique_bucket(
            access_control=access_control_json)

        # the bucket's authenticated connection should be able to list keys
        _ = bucket.get_all_keys()

        # an unauthenticated connection should be denied list_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _list_keys(bucket.name)
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to write
        auth_key_name = "authenticated_key"
        auth_test_string = "authenticated test string"
        write_key = Key(bucket)
        write_key.name = auth_key_name
        write_key.set_contents_from_string(auth_test_string)
        self.assertTrue(write_key.exists())

        # an unauthenticated connection should be denied write_access
        unauth_key_name = "unauthenticated_key"
        unauth_test_string = "unauth test string"
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _archive_key_from_string(bucket.name, unauth_key_name,
                                         unauth_test_string)
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to read
        read_key = Key(bucket, auth_key_name)
        returned_string = read_key.get_contents_as_string()
        self.assertEqual(returned_string.decode("utf-8"), auth_test_string)

        # an unauthenticated connection should be denied read_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _retrieve_key_to_string(bucket.name, unauth_key_name)
        self.assertEqual(context_manager.exception.status, 401)

        # the bucket's authenticated connection should be able to delete
        read_key.delete()

        # an unauthenticated connection should be denied delete_access
        with self.assertRaises(LumberyardHTTPError) as context_manager:
            _ = _delete_key(bucket.name, unauth_key_name)
        self.assertEqual(context_manager.exception.status, 401)

        # delete the bucket
        s3_connection.delete_bucket(bucket.name)
        s3_connection.close()
예제 #6
0
    def test_uninterrupted_resumable(self):
        """
        test get_contents_to_file without any interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(test_dir_path, "test-orignal")
        test_file_size = 1024**2
        buffer_size = 1024

        log.debug("writing {0} bytes to {1}".format(test_file_size,
                                                    test_file_path))
        bytes_written = 0
        with open(test_file_path, "wb") as output_file:
            while bytes_written < test_file_size:
                output_file.write(os.urandom(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(test_dir_path, "tracker-file")
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path)

        # read back the data
        retrieve_file_path = os.path.join(test_dir_path,
                                          "test_key_with_files-orignal")
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file,
                                           res_download_handler=\
                                            download_handler)

        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False))

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())

        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)
예제 #7
0
    def xxxtest_key_with_meta(self):
        """
        test simple key with metadata added
        """
        bucket_name = "com-dougfort-test-key-with-meta"
        key_name = u"test-key"
        test_string = _random_string(1024)
        meta_key = u"meta_key"
        meta_value = "pork"

        # create the bucket
        bucket = self._s3_connection.create_bucket(bucket_name)
        self.assertTrue(bucket is not None)
        self.assertEqual(bucket.name, bucket_name)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        # self.assertFalse(write_key.exists())

        # set some metadata
        write_key.set_metadata(meta_key, meta_value)

        # upload some data
        write_key.set_contents_from_string(test_string)        
        self.assertTrue(write_key.exists())

        # create another key with the same name 
        read_key = Key(bucket, key_name)

        # read back the data
        returned_string = read_key.get_contents_as_string()      
        self.assertEqual(returned_string, test_string)

        # get the metadata
        returned_meta_value = read_key.get_metadata(meta_key)
        self.assertEqual(returned_meta_value, meta_value)

        # delete the key
        read_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket_name)
예제 #8
0
    def test_key_with_strings(self):
        """
        test simple key 'from_string' and 'as_string' functions
        """
        bucket_name = "com-dougfort-test-key-with-strings"
        key_name = u"test-key"
        test_string = _random_string(1024)

        # create the bucket
        bucket = self._s3_connection.create_bucket(bucket_name)
        self.assertTrue(bucket is not None)
        self.assertEqual(bucket.name, bucket_name)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        # self.assertFalse(write_key.exists())

        # upload some data
        write_key.set_contents_from_string(test_string)        
        self.assertTrue(write_key.exists())

        # create another key with the same name 
        read_key = Key(bucket, key_name)

        # read back the data
        returned_string = read_key.get_contents_as_string()      
        self.assertEqual(returned_string, test_string)

        # delete the key
        read_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket_name)
예제 #9
0
    def test_uninterrupted_resumable(self):
        """
        test get_contents_to_file without any interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(
            test_dir_path, "test-orignal"
        )
        test_file_size = 1024 ** 2
        buffer_size = 1024

        log.debug("writing {0} bytes to {1}".format(test_file_size, 
                                                    test_file_path))
        bytes_written = 0
        with open(test_file_path, "wb") as output_file:
            while bytes_written < test_file_size:
                output_file.write(os.urandom(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)        
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(
            test_dir_path, "tracker-file"
        )
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path
        )

        # read back the data
        retrieve_file_path = os.path.join(
            test_dir_path, "test_key_with_files-orignal"
        )
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file, 
                                           res_download_handler=\
                                            download_handler)      

        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False)
        )

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)
예제 #10
0
    def test_interrupted_resumable(self):
        """
        test get_contents_to_file with a simulated interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(
            test_dir_path, "test-orignal"
        )
        test_file_size = 1024 ** 2
        interrupted_size = 1024 * 42

        test_data = os.urandom(test_file_size)

        log.debug("writing {0} bytes to {1}".format(test_file_size, 
                                                    test_file_path))
        with open(test_file_path, "wb") as output_file:
            output_file.write(test_data)

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
#        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)        
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(
            test_dir_path, "tracker-file"
        )
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path
        )

        retrieve_file_path = os.path.join(
            test_dir_path, "test_key_with_files-orignal"
        )

        # copy some of the data to the retrieve file to simulate an
        # interrupted retrieve
        with open(retrieve_file_path, "wb") as output_file:
            output_file.write(test_data[interrupted_size:])

        # spoof the resumable handler into thinking it has a retrieve
        # in progress
        download_handler._save_tracker_info(write_key)

        # resume the retrieve
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file, 
                                           res_download_handler=\
                                            download_handler)      

        # read back the retrieved data
        with open(retrieve_file_path, "rb") as retrieve_file:
            retrieved_data = retrieve_file.read()

        self.assertEqual(len(retrieved_data), len(test_data))
        self.assertTrue(retrieved_data == test_data)

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)
예제 #11
0
    def xxxtest_simple_multipart(self):
        """
        test a simple multipart upload
        """
        log = logging.getLogger("test_simple_multipart")
        bucket_name = "com-dougfort-test-simple-multipart"
        key_name = "test_key"
        test_file_path = os.path.join(
            _test_dir_path, "test_simple_multipart-orignal"
        )
        part_count = 2
        # 5mb is the minimum size s3 will take 
        test_file_size = 1024 ** 2 * 5 * part_count
        buffer_size = 1024

        log.debug("writing %s bytes to %s" % (
            test_file_size, test_file_path, 
        ))
        bytes_written = 0
        with open(test_file_path, "w") as output_file:
            while bytes_written < test_file_size:
                output_file.write(_random_string(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_bucket(bucket_name)
        self.assertTrue(bucket is not None)
        self.assertEqual(bucket.name, bucket_name)

        # assert that we have no uploads in progress
        upload_list = bucket.get_all_multipart_uploads()
        self.assertEqual(len(upload_list), 0)

        # start the multipart upload
        multipart_upload = bucket.initiate_multipart_upload(key_name)

        # assert that our upload is in progress
        upload_list = bucket.get_all_multipart_uploads()
        self.assertEqual(len(upload_list), 1)
        self.assertEqual(upload_list[0].id, multipart_upload.id)

        # upload a file in pieces
        current_pos = 0
        part_size = int(test_file_size / part_count)
        for index in range(part_count):
            with open(test_file_path, "r") as input_file:
                input_file.seek(current_pos)
                data = input_file.read(part_size)
            upload_file = StringIO(data)
            multipart_upload.upload_part_from_file(upload_file, index+1)

        # complete the upload
        completed_upload = multipart_upload.complete_upload()
        print >> sys.stderr, dir(completed_upload)

        # delete the key
        key = Key(bucket, key_name)
        key.delete()
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket_name)
예제 #12
0
    def test_key_with_files_and_callback(self):
        """
        test simple key 'from_file' and 'to_file' functions
        """
        def _archive_callback(bytes_sent, total_bytes):
            print >> sys.stderr, "archived", str(bytes_sent), "out of", \
                    str(total_bytes)

        def _retrieve_callback(bytes_sent, total_bytes):
            print >> sys.stderr, "retrieved", str(bytes_sent), "out of", \
                    str(total_bytes)

        log = logging.getLogger("test_key_with_files")
        bucket_name = "com-dougfort-test-key-with-files-and-callback"
        key_name = "A" * 1024
        test_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        test_file_size = 1024 ** 2
        buffer_size = 1024

        log.debug("writing %s bytes to %s" % (
            test_file_size, test_file_path, 
        ))
        bytes_written = 0
        with open(test_file_path, "w") as output_file:
            while bytes_written < test_file_size:
                output_file.write(_random_string(buffer_size))
                bytes_written += buffer_size

        # create the bucket
        bucket = self._s3_connection.create_bucket(bucket_name)
        self.assertTrue(bucket is not None)
        self.assertEqual(bucket.name, bucket_name)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "r") as archive_file:
            write_key.set_contents_from_file(
                archive_file, cb=_archive_callback
            )        
        self.assertTrue(write_key.exists())

        # create another key with the same name 
        read_key = Key(bucket, key_name)

        # read back the data
        retrieve_file_path = os.path.join(
            _test_dir_path, "test_key_with_files-orignal"
        )
        # 2011-08-08 dougfort boto aborts if you don't tell it the size
        read_key.size = test_file_size
        with open(retrieve_file_path, "w") as retrieve_file:
            read_key.get_contents_to_file(
                retrieve_file, cb=_retrieve_callback
            )      
        self.assertTrue(
            filecmp.cmp(test_file_path, retrieve_file_path, shallow=False)
        )

        # delete the key
        read_key.delete()
        self.assertFalse(write_key.exists())
        
        # delete the bucket
        self._s3_connection.delete_bucket(bucket_name)
예제 #13
0
    def test_interrupted_resumable(self):
        """
        test get_contents_to_file with a simulated interruption. 
        """
        log = logging.getLogger("test_uninterrupted_resumable")
        key_name = "test-key"
        test_file_path = os.path.join(test_dir_path, "test-orignal")
        test_file_size = 1024**2
        interrupted_size = 1024 * 42

        test_data = os.urandom(test_file_size)

        log.debug("writing {0} bytes to {1}".format(test_file_size,
                                                    test_file_path))
        with open(test_file_path, "wb") as output_file:
            output_file.write(test_data)

        # create the bucket
        bucket = self._s3_connection.create_unique_bucket()
        self.assertTrue(bucket is not None)

        # create an empty key
        write_key = Key(bucket)

        # set the name
        write_key.name = key_name
        #        self.assertFalse(write_key.exists())

        # upload some data
        with open(test_file_path, "rb") as archive_file:
            write_key.set_contents_from_file(archive_file)
        self.assertTrue(write_key.exists())

        # create a ResumableDownloadHandler
        tracker_file_path = os.path.join(test_dir_path, "tracker-file")
        download_handler = ResumableDownloadHandler(
            tracker_file_name=tracker_file_path)

        retrieve_file_path = os.path.join(test_dir_path,
                                          "test_key_with_files-orignal")

        # copy some of the data to the retrieve file to simulate an
        # interrupted retrieve
        with open(retrieve_file_path, "wb") as output_file:
            output_file.write(test_data[interrupted_size:])

        # spoof the resumable handler into thinking it has a retrieve
        # in progress
        download_handler._save_tracker_info(write_key)

        # resume the retrieve
        with open(retrieve_file_path, "wb") as retrieve_file:
            write_key.get_contents_to_file(retrieve_file,
                                           res_download_handler=\
                                            download_handler)

        # read back the retrieved data
        with open(retrieve_file_path, "rb") as retrieve_file:
            retrieved_data = retrieve_file.read()

        self.assertEqual(len(retrieved_data), len(test_data))
        self.assertTrue(retrieved_data == test_data)

        # delete the key
        write_key.delete()
        self.assertFalse(write_key.exists())

        # delete the bucket
        self._s3_connection.delete_bucket(bucket.name)