Пример #1
0
class S3HandlerTestURLEncodeDeletes(S3HandlerBaseTest):
    def setUp(self):
        super(S3HandlerTestURLEncodeDeletes, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session, key1='a+b/foo', key2=None)

    def tearDown(self):
        super(S3HandlerTestURLEncodeDeletes, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_s3_delete_url_encode(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        key = self.bucket + '/a+b/foo'
        tasks = [
            FileInfo(src=key,
                     src_type='s3',
                     dest_type='local',
                     operation_name='delete',
                     size=0,
                     service=self.service,
                     endpoint=self.endpoint)
        ]
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
Пример #2
0
class S3HandlerTestURLEncodeDeletes(S3HandlerBaseTest):
    def setUp(self):
        super(S3HandlerTestURLEncodeDeletes, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session, key1='a+b/foo', key2=None)

    def tearDown(self):
        super(S3HandlerTestURLEncodeDeletes, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_s3_delete_url_encode(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        key = self.bucket + '/a+b/foo'
        tasks = [FileInfo(
            src=key, src_type='s3', dest_type='local',
            operation_name='delete', size=0,
            service=self.service, endpoint=self.endpoint)]
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
Пример #3
0
class TestThrowsWarning(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.root = self.files.rootdir
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        self.files.remove_all()

    def test_no_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        return_val = file_gen.triggers_warning(full_path)
        self.assertFalse(return_val)
        self.assertTrue(file_gen.result_queue.empty())

    def test_no_exists(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        filename = os.path.join(self.root, 'file')
        return_val = file_gen.triggers_warning(filename)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(
            warning_message.message,
            ("warning: Skipping file %s. File does not exist." % filename))

    def test_no_read_access(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            return_val = file_gen.triggers_warning(full_path)
            self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File/Directory is "
                          "not readable." % full_path))

    @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
                     'Special files only supported on mac/linux')
    def test_is_special_file_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        file_path = os.path.join(self.files.rootdir, 'foo')
        # Use socket for special file.
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        return_val = file_gen.triggers_warning(file_path)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File is character "
                          "special device, block special device, FIFO, or "
                          "socket." % file_path))
Пример #4
0
class TestThrowsWarning(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.root = self.files.rootdir
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        self.files.remove_all()

    def test_no_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        return_val = file_gen.triggers_warning(full_path)
        self.assertFalse(return_val)
        self.assertTrue(file_gen.result_queue.empty())

    def test_no_exists(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        filename = os.path.join(self.root, 'file')
        return_val = file_gen.triggers_warning(filename)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File does not exist." %
                          filename))

    def test_no_read_access(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            return_val = file_gen.triggers_warning(full_path)
            self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File/Directory is "
                          "not readable." % full_path))

    @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
                     'Special files only supported on mac/linux')
    def test_is_special_file_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        file_path = os.path.join(self.files.rootdir, 'foo')
        # Use socket for special file.
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        return_val = file_gen.triggers_warning(file_path)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File is character "
                          "special device, block special device, FIFO, or "
                          "socket." % file_path))
Пример #5
0
class S3HandlerTestMvS3Local(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a download then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3Local, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

    def tearDown(self):
        super(S3HandlerTestMvS3Local, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_move(self):
        # Create file info objects to perform move.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=self.s3_files[i],
                         src_type='s3',
                         dest=self.loc_files[i],
                         dest_type='local',
                         last_update=time,
                         operation_name='move',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')
        # Ensure the objects are no longer in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
Пример #6
0
class TestIgnoreFilesLocally(unittest.TestCase):
    """
    This class tests the ability to ignore particular files.  This includes
    skipping symlink when desired.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_bad_symlink(self):
        path = os.path.join(self.files.rootdir, 'badsymlink')
        os.symlink('non-existent-file', path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_skip_symlink(self):
        filename = 'foo.txt'
        self.files.create_file(os.path.join(self.files.rootdir,
                               filename),
                               contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(filename, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', False)
        self.assertTrue(filegenerator.should_ignore_file(sym_path))

    def test_no_skip_symlink(self):
        filename = 'foo.txt'
        path = self.files.create_file(os.path.join(self.files.rootdir,
                                                   filename),
                                      contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_no_skip_symlink_dir(self):
        filename = 'dir'
        path = os.path.join(self.files.rootdir, 'dir/')
        os.mkdir(path)
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))
Пример #7
0
class TestIgnoreFilesLocally(unittest.TestCase):
    """
    This class tests the ability to ignore particular files.  This includes
    skipping symlink when desired.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_warning(self):
        path = os.path.join(self.files.rootdir, 'badsymlink')
        os.symlink('non-existent-file', path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertTrue(filegenerator.should_ignore_file(path))

    def test_skip_symlink(self):
        filename = 'foo.txt'
        self.files.create_file(os.path.join(self.files.rootdir,
                               filename),
                               contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(filename, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', False)
        self.assertTrue(filegenerator.should_ignore_file(sym_path))

    def test_no_skip_symlink(self):
        filename = 'foo.txt'
        path = self.files.create_file(os.path.join(self.files.rootdir,
                                                   filename),
                                      contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_no_skip_symlink_dir(self):
        filename = 'dir'
        path = os.path.join(self.files.rootdir, 'dir/')
        os.mkdir(path)
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))
Пример #8
0
class S3HandlerTestMvLocalS3(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a upload then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvLocalS3, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerTestMvLocalS3, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_move_unicode(self):
        self.bucket2 = make_s3_files(self.session, key1=u'\u2713')
        tasks = [FileInfo(
            src=self.bucket2 + '/' + u'\u2713',
            src_type='s3',
            dest=self.bucket + '/' + u'\u2713',
            dest_type='s3', operation_name='move',
            size=0,
            service=self.service,
            endpoint=self.endpoint,
        )]
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)

    def test_move(self):
        # Create file info objects to perform move.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i], src_type='local',
                dest=self.s3_files[i], dest_type='s3',
                operation_name='move', size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
        # Confirm local files do not exist.
        for filename in files:
            self.assertFalse(os.path.exists(filename))
Пример #9
0
class S3HandlerTestMvS3Local(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a download then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3Local, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

    def tearDown(self):
        super(S3HandlerTestMvS3Local, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_move(self):
        # Create file info objects to perform move.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='move',
                size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')
        # Ensure the objects are no longer in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
Пример #10
0
class S3HandlerTestMvS3S3(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a copy then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3S3, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.bucket2 = create_bucket(self.session)
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        self.s3_files2 = [
            self.bucket2 + '/text1.txt',
            self.bucket2 + '/another_directory/text2.txt'
        ]

    def tearDown(self):
        super(S3HandlerTestMvS3S3, self).tearDown()
        s3_cleanup(self.bucket, self.session)
        s3_cleanup(self.bucket2, self.session)

    def test_move(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 0)
        # Create file info objects to perform move.
        tasks = []
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=self.s3_files[i],
                         src_type='s3',
                         dest=self.s3_files2[i],
                         dest_type='s3',
                         operation_name='move',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm the files were moved.  The origial bucket had three
        # objects. Only two were moved.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 2)
Пример #11
0
class S3HandlerExceptionMultiTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle multipart upload exceptions.
    This includes a standard error stemming from an operation on
    a nonexisting bucket, connection error, and md5 error.
    """
    def setUp(self):
        super(S3HandlerExceptionMultiTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler_multi = S3Handler(self.session,
                                          params,
                                          multi_threshold=10,
                                          chunksize=2)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]

    def tearDown(self):
        super(S3HandlerExceptionMultiTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        fail_s3_files = [
            self.bucket + '/text1.txt',
            self.bucket[:-1] + '/another_directory/text2.txt'
        ]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(src=self.loc_files[i],
                         dest=fail_s3_files[i],
                         size=15,
                         operation_name='upload',
                         service=self.service,
                         endpoint=self.endpoint))
        self.s3_handler_multi.call(tasks)
Пример #12
0
class S3HandlerTestBucket(S3HandlerBaseTest):
    """
    Test the ability to make a bucket then remove it.
    """
    def setUp(self):
        super(S3HandlerTestBucket, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = None

    def tearDown(self):
        super(S3HandlerTestBucket, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'
        orig_number_buckets = len(list_buckets(self.session))

        file_info = FileInfo(
            src=self.bucket,
            operation_name='make_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        self.s3_handler.call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets + 1, number_buckets)

        file_info = FileInfo(
            src=self.bucket,
            operation_name='remove_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        self.s3_handler.call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets, number_buckets)
Пример #13
0
class S3HandlerExceptionSingleTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle connection and md5 exceptions.
    The command used in this general test is a put command.
    """
    def setUp(self):
        super(S3HandlerExceptionSingleTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]

    def tearDown(self):
        super(S3HandlerExceptionSingleTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(src=self.loc_files[i],
                         dest=self.s3_files[i],
                         operation_name='upload',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm despite the exceptions, the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
Пример #14
0
class S3HandlerTestMvS3S3(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a copy then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3S3, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.bucket2 = create_bucket(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        self.s3_files2 = [self.bucket2 + '/text1.txt',
                          self.bucket2 + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerTestMvS3S3, self).tearDown()
        s3_cleanup(self.bucket, self.session)
        s3_cleanup(self.bucket2, self.session)

    def test_move(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 0)
        # Create file info objects to perform move.
        tasks = []
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.s3_files2[i], dest_type='s3',
                operation_name='move', size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm the files were moved.  The origial bucket had three
        # objects. Only two were moved.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 2)
Пример #15
0
class S3HandlerTestBucket(S3HandlerBaseTest):
    """
    Test the ability to make a bucket then remove it.
    """
    def setUp(self):
        super(S3HandlerTestBucket, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.params = {'region': 'us-east-1'}
        self.bucket = None

    def tearDown(self):
        super(S3HandlerTestBucket, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'
        orig_number_buckets = len(list_buckets(self.session))

        file_info = FileInfo(
            src=self.bucket,
            operation_name='make_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        S3Handler(self.session, self.params).call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets + 1, number_buckets)

        file_info = FileInfo(
            src=self.bucket,
            operation_name='remove_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        S3Handler(self.session, self.params).call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets, number_buckets)
Пример #16
0
class S3HandlerExceptionMultiTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle multipart upload exceptions.
    This includes a standard error stemming from an operation on
    a nonexisting bucket, connection error, and md5 error.
    """
    def setUp(self):
        super(S3HandlerExceptionMultiTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'quiet': True}
        self.s3_handler_multi = S3Handler(
            self.session, params,
            runtime_config=runtime_config(
                multipart_threshold=10, multipart_chunksize=2))
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerExceptionMultiTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        fail_s3_files = [self.bucket + '/text1.txt',
                         self.bucket[:-1] + '/another_directory/text2.txt']
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=fail_s3_files[i], size=15,
                operation_name='upload',
                service=self.service,
                endpoint=self.endpoint))
        self.s3_handler_multi.call(tasks)
Пример #17
0
class S3HandlerExceptionSingleTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle connection and md5 exceptions.
    The command used in this general test is a put command.
    """
    def setUp(self):
        super(S3HandlerExceptionSingleTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerExceptionSingleTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(src=self.loc_files[i],
                                  dest=self.s3_files[i],
                                  operation_name='upload', size=0,
                                  service=self.service,
                                  endpoint=self.endpoint))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm despite the exceptions, the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
Пример #18
0
class S3FileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.file1 = self.bucket + '/' + 'text1.txt'
        self.file2 = self.bucket + '/' + 'another_directory/text2.txt'
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        s3_cleanup(self.bucket, self.session)

    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, '', params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, '', params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file2,
                             dest='another_directory' + os.sep + 'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)
        file_info2 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='',
                              service=None, endpoint=None)

        ref_list = [file_info, file_info2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, 'delete', params).call(
            input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=result_list[0].size,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[2].size,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Пример #19
0
class S3FileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.file1 = self.bucket + '/' + 'text1.txt'
        self.file2 = self.bucket + '/' + 'another_directory/text2.txt'
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        s3_cleanup(self.bucket, self.session)

    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint,
                              '').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint,
                              '').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file2,
                             dest='another_directory' + os.sep +
                             'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)
        file_info2 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='',
                              service=None, endpoint=None)

        ref_list = [file_info, file_info2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        files = FileGenerator(self.service, self.endpoint,
                              'delete').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=result_list[0].size,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[2].size,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Пример #20
0
class S3HandlerTestDeleteList(S3HandlerBaseTest):
    """
    This tests the ability to delete both files locally and in s3.
    """
    def setUp(self):
        super(S3HandlerTestDeleteList, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()

    def tearDown(self):
        super(S3HandlerTestDeleteList, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_loc_delete(self):
        """
        Test delete local file tasks.  The local files are the same
        generated from filegenerator_test.py.
        """
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for filename in files:
            self.assertTrue(os.path.exists(filename))
            tasks.append(
                FileInfo(src=filename,
                         src_type='local',
                         dest_type='s3',
                         operation_name='delete',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        self.s3_handler.call(tasks)
        for filename in files:
            self.assertFalse(os.path.exists(filename))

    def test_s3_delete(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        keys = [
            self.bucket + '/another_directory/text2.txt',
            self.bucket + '/text1.txt', self.bucket + '/another_directory/'
        ]
        tasks = []
        for key in keys:
            tasks.append(
                FileInfo(src=key,
                         src_type='s3',
                         dest_type='local',
                         operation_name='delete',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        self.assertEqual(len(list_contents(self.bucket, self.session)), 3)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)

    def test_list_objects(self):
        """
        Tests the ability to list objects, common prefixes, and buckets.
        If an error occurs the test fails as this is only a printing
        operation
        """
        prefix_name = self.bucket + '/'
        file_info = FileInfo(src=prefix_name,
                             operation_name='list_objects',
                             size=0,
                             service=self.service,
                             endpoint=self.endpoint)
        params = {'region': 'us-east-1'}
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])
        file_info = FileInfo(src='',
                             operation_name='list_objects',
                             size=0,
                             service=self.service,
                             endpoint=self.endpoint)
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])
Пример #21
0
class S3HandlerTestUpload(S3HandlerBaseTest):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        super(S3HandlerTestUpload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session, multi_threshold=10,
                                          chunksize=2,
                                          params=params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerTestUpload, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i],
                operation_name='upload', size=0,
                service=self.service, endpoint=self.endpoint))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
        # Verify the guessed content type.
        self.assertEqual(
            self.session.s3[self.bucket][
                'another_directory/text2.txt']['ContentType'],
            'text/plain')

    def test_upload_fail(self):
        """
        One of the uploads will fail to upload in this test as
        the second s3 destination's bucket does not exist.
        """
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        fail_s3_files = [self.bucket + '/text1.txt',
                         self.bucket[:-1] + '/another_directory/text2.txt']
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=fail_s3_files[i],
                compare_key=None,
                src_type='local',
                dest_type='s3',
                operation_name='upload', size=0,
                last_update=None,
                service=self.service,
                endpoint=self.endpoint))
        self.s3_handler.call(tasks)
        # Confirm only one of the files was uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)

    def test_multi_upload(self):
        """
        This test only checks that the multipart upload process works.
        It confirms that the parts are properly formatted but does not
        perform any tests past checking the parts are uploaded correctly.
        """
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=self.s3_files[i], size=15,
                operation_name='upload',
                service=self.service,
                endpoint=self.endpoint))
        self.s3_handler_multi.call(tasks)
        self.assertEqual(
            self.session.s3[self.bucket][
                'another_directory/text2.txt']['ContentType'],
            'text/plain')
Пример #22
0
class S3HandlerTestUpload(S3HandlerBaseTest):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        super(S3HandlerTestUpload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session,
                                          multi_threshold=10,
                                          chunksize=2,
                                          params=params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]

    def tearDown(self):
        super(S3HandlerTestUpload, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(src=self.loc_files[i],
                         dest=self.s3_files[i],
                         operation_name='upload',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
        # Verify the guessed content type.
        self.assertEqual(
            self.session.s3[self.bucket]['another_directory/text2.txt']
            ['ContentType'], 'text/plain')

    def test_upload_fail(self):
        """
        One of the uploads will fail to upload in this test as
        the second s3 destination's bucket does not exist.
        """
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        fail_s3_files = [
            self.bucket + '/text1.txt',
            self.bucket[:-1] + '/another_directory/text2.txt'
        ]
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(src=self.loc_files[i],
                         dest=fail_s3_files[i],
                         compare_key=None,
                         src_type='local',
                         dest_type='s3',
                         operation_name='upload',
                         size=0,
                         last_update=None,
                         service=self.service,
                         endpoint=self.endpoint))
        self.s3_handler.call(tasks)
        # Confirm only one of the files was uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)

    def test_multi_upload(self):
        """
        This test only checks that the multipart upload process works.
        It confirms that the parts are properly formatted but does not
        perform any tests past checking the parts are uploaded correctly.
        """
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(src=self.loc_files[i],
                         dest=self.s3_files[i],
                         size=15,
                         operation_name='upload',
                         service=self.service,
                         endpoint=self.endpoint))
        self.s3_handler_multi.call(tasks)
        self.assertEqual(
            self.session.s3[self.bucket]['another_directory/text2.txt']
            ['ContentType'], 'text/plain')
Пример #23
0
class LocalFileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.local_file = os.path.abspath(".") + os.sep + "some_directory" + os.sep + "text1.txt"
        self.local_dir = os.path.abspath(".") + os.sep + "some_directory" + os.sep
        self.session = FakeSession()
        self.service = self.session.get_service("s3")
        self.endpoint = self.service.get_endpoint("us-east-1")
        self.files = make_loc_files()

    def tearDown(self):
        clean_loc_files(self.files)

    def test_local_file(self):
        """
        Generate a single local file.
        """
        input_local_file = {
            "src": {"path": self.local_file, "type": "local"},
            "dest": {"path": "bucket/text1.txt", "type": "s3"},
            "dir_op": False,
            "use_src_name": False,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(
            src=self.local_file,
            dest="bucket/text1.txt",
            compare_key="text1.txt",
            size=size,
            last_update=last_update,
            src_type="local",
            dest_type="s3",
            operation_name="",
            service=None,
            endpoint=None,
        )
        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_local_directory(self):
        """
        Generate an entire local directory.
        """
        input_local_dir = {
            "src": {"path": self.local_dir, "type": "local"},
            "dest": {"path": "bucket/", "type": "s3"},
            "dir_op": True,
            "use_src_name": True,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_dir)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(
            src=self.local_file,
            dest="bucket/text1.txt",
            compare_key="text1.txt",
            size=size,
            last_update=last_update,
            src_type="local",
            dest_type="s3",
            operation_name="",
            service=None,
            endpoint=None,
        )
        path = self.local_dir + "another_directory" + os.sep + "text2.txt"
        size, last_update = get_file_stat(path)
        file_info2 = FileInfo(
            src=path,
            dest="bucket/another_directory/text2.txt",
            compare_key="another_directory/text2.txt",
            size=size,
            last_update=last_update,
            src_type="local",
            dest_type="s3",
            operation_name="",
            service=None,
            endpoint=None,
        )
        ref_list = [file_info2, file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Пример #24
0
class TestStreams(S3HandlerBaseTest):
    def setUp(self):
        super(TestStreams, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.params = {'is_stream': True, 'region': 'us-east-1'}

    def test_pull_from_stream(self):
        s3handler = S3StreamHandler(self.session, self.params, chunksize=2)
        input_to_stdin = b'This is a test'
        size = len(input_to_stdin)
        # Retrieve the entire string.
        with MockStdIn(input_to_stdin):
            payload, is_amount_requested = s3handler._pull_from_stream(size)
            data = payload.read()
            self.assertTrue(is_amount_requested)
            self.assertEqual(data, input_to_stdin)
        # Ensure the function exits when there is nothing to read.
        with MockStdIn():
            payload, is_amount_requested = s3handler._pull_from_stream(size)
            data = payload.read()
            self.assertFalse(is_amount_requested)
            self.assertEqual(data, b'')
        # Ensure the function does not grab too much out of stdin.
        with MockStdIn(input_to_stdin):
            payload, is_amount_requested = s3handler._pull_from_stream(size-2)
            data = payload.read()
            self.assertTrue(is_amount_requested)
            self.assertEqual(data, input_to_stdin[:-2])
            # Retrieve the rest of standard in.
            payload, is_amount_requested = s3handler._pull_from_stream(size)
            data = payload.read()
            self.assertFalse(is_amount_requested)
            self.assertEqual(data, input_to_stdin[-2:])

    def test_upload_stream_not_multipart_task(self):
        s3handler = S3StreamHandler(self.session, self.params)
        s3handler.executor = mock.Mock()
        fileinfos = [FileInfo('filename', operation_name='upload',
                              is_stream=True, size=0)]
        with MockStdIn(b'bar'):
            s3handler._enqueue_tasks(fileinfos)
        submitted_tasks = s3handler.executor.submit.call_args_list
        # No multipart upload should have been submitted.
        self.assertEqual(len(submitted_tasks), 1)
        self.assertEqual(submitted_tasks[0][0][0].payload.read(),
                         b'bar')

    def test_upload_stream_is_multipart_task(self):
        s3handler = S3StreamHandler(self.session, self.params,
                                    multi_threshold=1)
        s3handler.executor = mock.Mock()
        fileinfos = [FileInfo('filename', operation_name='upload',
                              is_stream=True, size=0)]
        with MockStdIn(b'bar'):
            s3handler._enqueue_tasks(fileinfos)
        submitted_tasks = s3handler.executor.submit.call_args_list
        # This should be a multipart upload so multiple tasks
        # should have been submitted.
        self.assertEqual(len(submitted_tasks), 4)
        self.assertEqual(submitted_tasks[1][0][0]._payload.read(),
                         b'b')
        self.assertEqual(submitted_tasks[2][0][0]._payload.read(),
                         b'ar')

    def test_upload_stream_with_expected_size(self):
        self.params['expected_size'] = 100000
        # With this large of expected size, the chunksize of 2 will have
        # to change.
        s3handler = S3StreamHandler(self.session, self.params, chunksize=2)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='upload',
                            is_stream=True)
        with MockStdIn(b'bar'):
            s3handler._enqueue_multipart_upload_tasks(fileinfo, b'')
        submitted_tasks = s3handler.executor.submit.call_args_list
        # Determine what the chunksize was changed to from one of the
        # UploadPartTasks.
        changed_chunk_size = submitted_tasks[1][0][0]._chunk_size
        # New chunksize should have a total parts under 1000.
        self.assertTrue(100000/changed_chunk_size < 1000)

    def test_upload_stream_enqueue_upload_task(self):
        s3handler = S3StreamHandler(self.session, self.params)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='upload',
                            is_stream=True)
        stdin_input = b'This is a test'
        with MockStdIn(stdin_input):
            num_parts = s3handler._enqueue_upload_tasks(None, 2, mock.Mock(),
                                                        fileinfo,
                                                        UploadPartTask)
        submitted_tasks = s3handler.executor.submit.call_args_list
        # Ensure the returned number of parts is correct.
        self.assertEqual(num_parts, len(submitted_tasks) + 1)
        # Ensure the number of tasks uploaded are as expected
        self.assertEqual(len(submitted_tasks), 8)
        index = 0
        for i in range(len(submitted_tasks)-1):
            self.assertEqual(submitted_tasks[i][0][0]._payload.read(),
                             stdin_input[index:index+2])
            index += 2
        # Ensure that the last part is an empty string as expected.
        self.assertEqual(submitted_tasks[7][0][0]._payload.read(), b'')

    def test_enqueue_upload_single_part_task_stream(self):
        """
        This test ensures that a payload gets attached to a task when
        it is submitted to the executor.
        """
        s3handler = S3StreamHandler(self.session, self.params)
        s3handler.executor = mock.Mock()
        mock_task_class = mock.Mock()
        s3handler._enqueue_upload_single_part_task(
            part_number=1, chunk_size=2, upload_context=None,
            filename=None, task_class=mock_task_class,
            payload=b'This is a test'
        )
        args, kwargs = mock_task_class.call_args
        self.assertIn('payload', kwargs.keys())
        self.assertEqual(kwargs['payload'], b'This is a test')

    def test_enqueue_multipart_download_stream(self):
        """
        This test ensures the right calls are made in ``_enqueue_tasks()``
        if the file should be a multipart download.
        """
        s3handler = S3StreamHandler(self.session, self.params,
                                    multi_threshold=5)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='download',
                            is_stream=True)
        with mock.patch('awscli.customizations.s3.s3handler'
                        '.S3StreamHandler._enqueue_range_download_tasks') as \
                mock_enqueue_range_tasks:
            with mock.patch('awscli.customizations.s3.fileinfo.FileInfo'
                            '.set_size_from_s3') as mock_set_size_from_s3:
                # Set the file size to something larger than the multipart
                # threshold.
                fileinfo.size = 100
                # Run the main enqueue function.
                s3handler._enqueue_tasks([fileinfo])
                # Assert that the size of the ``FileInfo`` object was set
                # if we are downloading a stream.
                self.assertTrue(mock_set_size_from_s3.called)
                # Ensure that this download would have been a multipart
                # download.
                self.assertTrue(mock_enqueue_range_tasks.called)

    def test_enqueue_range_download_tasks_stream(self):
        s3handler = S3StreamHandler(self.session, self.params, chunksize=100)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='download',
                            is_stream=True, size=100)
        s3handler._enqueue_range_download_tasks(fileinfo)
        # Ensure that no request was sent to make a file locally.
        submitted_tasks = s3handler.executor.submit.call_args_list
        self.assertNotEqual(type(submitted_tasks[0][0][0]),
                            CreateLocalFileTask)
Пример #25
0
class S3FileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.file1 = self.bucket + "/" + "text1.txt"
        self.file2 = self.bucket + "/" + "another_directory/text2.txt"
        self.service = self.session.get_service("s3")
        self.endpoint = self.service.get_endpoint("us-east-1")

    def tearDown(self):
        s3_cleanup(self.bucket, self.session)

    def test_nonexist_s3_file(self):
        """
        This tests to make sure that files are not misproperly yielded by
        ensuring the file prefix is the exact same as what was inputted.
        """
        input_s3_file = {
            "src": {"path": self.file1[:-1], "type": "s3"},
            "dest": {"path": "text1.txt", "type": "local"},
            "dir_op": False,
            "use_src_name": False,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_s3_file)
        self.assertEqual(len(list(files)), 0)

    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.file1, "type": "s3"},
            "dest": {"path": "text1.txt", "type": "local"},
            "dir_op": False,
            "use_src_name": False,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.bucket + "/", "type": "s3"},
            "dest": {"path": "", "type": "local"},
            "dir_op": True,
            "use_src_name": True,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(
            src=self.file2,
            dest="another_directory" + os.sep + "text2.txt",
            compare_key="another_directory/text2.txt",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="",
            service=None,
            endpoint=None,
        )
        file_info2 = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[1].size,
            last_update=result_list[1].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info, file_info2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.bucket + "/", "type": "s3"},
            "dest": {"path": "", "type": "local"},
            "dir_op": True,
            "use_src_name": True,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "delete", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(
            src=self.bucket + "/another_directory/",
            dest="another_directory" + os.sep,
            compare_key="another_directory/",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )
        file_info2 = FileInfo(
            src=self.file2,
            dest="another_directory" + os.sep + "text2.txt",
            compare_key="another_directory/text2.txt",
            size=result_list[1].size,
            last_update=result_list[1].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )
        file_info3 = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[2].size,
            last_update=result_list[2].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Пример #26
0
class LocalFileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.local_file = six.text_type(os.path.abspath('.') + os.sep + 'some_directory' \
            + os.sep + 'text1.txt')
        self.local_dir = six.text_type(os.path.abspath('.') + os.sep + 'some_directory' \
            + os.sep)
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = make_loc_files()

    def tearDown(self):
        clean_loc_files(self.files)

    def test_local_file(self):
        """
        Generate a single local file.
        """
        input_local_file = {'src': {'path': self.local_file,
                                    'type': 'local'},
                            'dest': {'path': 'bucket/text1.txt',
                                     'type': 's3'},
                            'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service,
                              self.endpoint, '', params).call(input_local_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                             compare_key='text1.txt', size=size,
                             last_update=last_update, src_type='local',
                             dest_type='s3', operation_name='',
                             service=None, endpoint=None)
        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_local_directory(self):
        """
        Generate an entire local directory.
        """
        input_local_dir = {'src': {'path': self.local_dir,
                                   'type': 'local'},
                           'dest': {'path': 'bucket/',
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint,
                              '', params).call(input_local_dir)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                             compare_key='text1.txt', size=size,
                             last_update=last_update, src_type='local',
                             dest_type='s3', operation_name='',
                             service=None, endpoint=None)
        path = self.local_dir + 'another_directory' + os.sep \
            + 'text2.txt'
        size, last_update = get_file_stat(path)
        file_info2 = FileInfo(src=path,
                              dest='bucket/another_directory/text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=size, last_update=last_update,
                              src_type='local',
                              dest_type='s3', operation_name='',
                              service=None, endpoint=None)
        ref_list = [file_info2, file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Пример #27
0
class S3HandlerTestDownload(S3HandlerBaseTest):
    """
    This class tests the ability to download s3 objects locally as well
    as using multipart downloads
    """
    def setUp(self):
        super(S3HandlerTestDownload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session, params,
                                          multi_threshold=10, chunksize=2)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

        self.fail_session = FakeSession(connection_error=True)
        self.fail_session.s3 = self.session.s3
        self.s3_handler_multi_except = S3Handler(self.fail_session, params,
                                                 multi_threshold=10,
                                                 chunksize=2)

    def tearDown(self):
        super(S3HandlerTestDownload, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_download(self):
        # Confirm that the files do not exist.
        for filename in self.loc_files:
            self.assertFalse(os.path.exists(filename))
        # Create file info objects to perform download.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='download',
                size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the download.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download(self):
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='download',
                size=15,
                service=self.service,
                endpoint=self.endpoint,
            ))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download_fail(self):
        """
        This test ensures that a multipart download can handle a
        standard error exception stemming from an operation
        being performed on a nonexistant bucket.  The existing file
        should be downloaded properly but the other will not.
        """
        tasks = []
        wrong_s3_files = [self.bucket + '/text1.txt',
                          self.bucket[:-1] + '/another_directory/text2.txt']
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=wrong_s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='download',
                size=15,
                service=self.service,
                endpoint=self.endpoint
            ))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        self.assertTrue(os.path.exists(self.loc_files[0]))
        # The second file should not exist.
        self.assertFalse(os.path.exists(self.loc_files[1]))
        # Ensure that contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
Пример #28
0
class TestSymlinksIgnoreFiles(unittest.TestCase):
    """
    This class tests the ability to list out the correct local files
    depending on if symlinks are being followed.  Also tests to ensure
    broken symlinks fail.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()
        # List of local filenames.
        self.filenames = []
        self.root = self.files.rootdir
        self.bucket = 'bucket/'
        filename_1 = self.files.create_file('foo.txt',
                                            contents='foo.txt contents')
        self.filenames.append(filename_1)
        nested_dir = os.path.join(self.root, 'realfiles')
        os.mkdir(nested_dir)
        filename_2 = self.files.create_file(os.path.join(nested_dir,
                                                         'bar.txt'),
                                            contents='bar.txt contents')
        self.filenames.append(filename_2)
        # Names of symlinks.
        self.symlinks = []
        # Names of files if symlinks are followed.
        self.symlink_files = []
        # Create symlink to file foo.txt.
        symlink_1 = os.path.join(self.root, 'symlink_1')
        os.symlink(filename_1, symlink_1)
        self.symlinks.append(symlink_1)
        self.symlink_files.append(symlink_1)
        # Create a symlink to a file that does not exist.
        symlink_2 = os.path.join(self.root, 'symlink_2')
        os.symlink('non-existent-file', symlink_2)
        self.symlinks.append(symlink_2)
        # Create a symlink to directory realfiles
        symlink_3 = os.path.join(self.root, 'symlink_3')
        os.symlink(nested_dir, symlink_3)
        self.symlinks.append(symlink_3)
        self.symlink_files.append(os.path.join(symlink_3, 'bar.txt'))

    def tearDown(self):
        self.files.remove_all()

    def test_no_follow_symlink(self):
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_stats = FileGenerator(self.service, self.endpoint,
                                   '', False).call(input_local_dir)
        self.filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(self.filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(self.filenames[i]))
            self.assertEqual(result_list[i], filename)

    def test_warn_bad_symlink(self):
        """
        This tests to make sure it fails when following bad symlinks.
        """
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_stats = FileGenerator(self.service, self.endpoint,
                                   '', True).call(input_local_dir)
        file_gen = FileGenerator(self.service, self.endpoint, '', True)
        file_stats = file_gen.call(input_local_dir)
        all_filenames = self.filenames + self.symlink_files
        all_filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(all_filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(all_filenames[i]))
            self.assertEqual(result_list[i], filename)
        self.assertFalse(file_gen.result_queue.empty())

    def test_follow_symlink(self):
        # First remove the bad symlink.
        os.remove(os.path.join(self.root, 'symlink_2'))
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_stats = FileGenerator(self.service, self.endpoint,
                                   '', True).call(input_local_dir)
        all_filenames = self.filenames + self.symlink_files
        all_filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(all_filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(all_filenames[i]))
            self.assertEqual(result_list[i], filename)
Пример #29
0
class LocalFileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.local_file = six.text_type(os.path.abspath('.') +
                                        os.sep + 'some_directory' +
                                        os.sep + 'text1.txt')
        self.local_dir = six.text_type(os.path.abspath('.') +
                                       os.sep + 'some_directory' +
                                       os.sep)
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = make_loc_files()

    def tearDown(self):
        clean_loc_files(self.files)

    def test_local_file(self):
        """
        Generate a single local file.
        """
        input_local_file = {'src': {'path': self.local_file,
                                    'type': 'local'},
                            'dest': {'path': 'bucket/text1.txt',
                                     'type': 's3'},
                            'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service,
                              self.endpoint, '').call(input_local_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                             compare_key='text1.txt', size=size,
                             last_update=last_update, src_type='local',
                             dest_type='s3', operation_name='',
                             service=None, endpoint=None)
        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_local_directory(self):
        """
        Generate an entire local directory.
        """
        input_local_dir = {'src': {'path': self.local_dir,
                                   'type': 'local'},
                           'dest': {'path': 'bucket/',
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service,
                              self.endpoint,'').call(input_local_dir)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                             compare_key='text1.txt', size=size,
                             last_update=last_update, src_type='local',
                             dest_type='s3', operation_name='',
                             service=None, endpoint=None)
        path = self.local_dir + 'another_directory' + os.sep \
            + 'text2.txt'
        size, last_update = get_file_stat(path)
        file_info2 = FileInfo(src=path,
                              dest='bucket/another_directory/text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=size, last_update=last_update,
                              src_type='local',
                              dest_type='s3', operation_name='',
                              service=None, endpoint=None)
        ref_list = [file_info2, file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Пример #30
0
class TestSymlinksIgnoreFiles(unittest.TestCase):
    """
    This class tests the ability to list out the correct local files
    depending on if symlinks are being followed.  Also tests to ensure
    broken symlinks fail.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()
        # List of local filenames.
        self.filenames = []
        self.root = self.files.rootdir
        self.bucket = 'bucket/'
        filename_1 = self.files.create_file('foo.txt',
                                            contents='foo.txt contents')
        self.filenames.append(filename_1)
        nested_dir = os.path.join(self.root, 'realfiles')
        os.mkdir(nested_dir)
        filename_2 = self.files.create_file(os.path.join(nested_dir,
                                                         'bar.txt'),
                                            contents='bar.txt contents')
        self.filenames.append(filename_2)
        # Names of symlinks.
        self.symlinks = []
        # Names of files if symlinks are followed.
        self.symlink_files = []
        # Create symlink to file foo.txt.
        symlink_1 = os.path.join(self.root, 'symlink_1')
        os.symlink(filename_1, symlink_1)
        self.symlinks.append(symlink_1)
        self.symlink_files.append(symlink_1)
        # Create a symlink to a file that does not exist.
        symlink_2 = os.path.join(self.root, 'symlink_2')
        os.symlink('non-existent-file', symlink_2)
        self.symlinks.append(symlink_2)
        # Create a symlink to directory realfiles
        symlink_3 = os.path.join(self.root, 'symlink_3')
        os.symlink(nested_dir, symlink_3)
        self.symlinks.append(symlink_3)
        self.symlink_files.append(os.path.join(symlink_3, 'bar.txt'))

    def tearDown(self):
        self.files.remove_all()

    def test_no_follow_symlink(self):
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_infos = FileGenerator(self.service, self.endpoint,
                                   '', False).call(input_local_dir)
        self.filenames.sort()
        result_list = []
        for file_info in file_infos:
            result_list.append(getattr(file_info, 'src'))
        self.assertEqual(len(result_list), len(self.filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(self.filenames[i]))
            self.assertEqual(result_list[i], filename)

    def test_follow_bad_symlink(self):
        """
        This tests to make sure it fails when following bad symlinks.
        """
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_infos = FileGenerator(self.service, self.endpoint,
                                   '', True).call(input_local_dir)
        result_list = []
        rc = 0
        try:
            for file_info in file_infos:
                result_list.append(getattr(file_info, 'src'))
            rc = 1
        except OSError as e:
            pass
        # Error shows up as ValueError in Python 3.
        except ValueError as e:
            pass
        self.assertEquals(0, rc)


    def test_follow_symlink(self):
        # First remove the bad symlink.
        os.remove(os.path.join(self.root, 'symlink_2'))
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_infos = FileGenerator(self.service, self.endpoint,
                                   '', True).call(input_local_dir)
        all_filenames = self.filenames + self.symlink_files
        all_filenames.sort()
        result_list = []
        for file_info in file_infos:
            result_list.append(getattr(file_info, 'src'))
        self.assertEqual(len(result_list), len(all_filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(all_filenames[i]))
            self.assertEqual(result_list[i], filename)
Пример #31
0
class S3HandlerTestDownload(S3HandlerBaseTest):
    """
    This class tests the ability to download s3 objects locally as well
    as using multipart downloads
    """
    def setUp(self):
        super(S3HandlerTestDownload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session,
                                          params,
                                          multi_threshold=10,
                                          chunksize=2)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

        self.fail_session = FakeSession(connection_error=True)
        self.fail_session.s3 = self.session.s3
        self.s3_handler_multi_except = S3Handler(self.fail_session,
                                                 params,
                                                 multi_threshold=10,
                                                 chunksize=2)

    def tearDown(self):
        super(S3HandlerTestDownload, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_download(self):
        # Confirm that the files do not exist.
        for filename in self.loc_files:
            self.assertFalse(os.path.exists(filename))
        # Create file info objects to perform download.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=self.s3_files[i],
                         src_type='s3',
                         dest=self.loc_files[i],
                         dest_type='local',
                         last_update=time,
                         operation_name='download',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the download.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download(self):
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(
                    src=self.s3_files[i],
                    src_type='s3',
                    dest=self.loc_files[i],
                    dest_type='local',
                    last_update=time,
                    operation_name='download',
                    size=15,
                    service=self.service,
                    endpoint=self.endpoint,
                ))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download_fail(self):
        """
        This test ensures that a multipart download can handle a
        standard error exception stemming from an operation
        being performed on a nonexistant bucket.  The existing file
        should be downloaded properly but the other will not.
        """
        tasks = []
        wrong_s3_files = [
            self.bucket + '/text1.txt',
            self.bucket[:-1] + '/another_directory/text2.txt'
        ]
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=wrong_s3_files[i],
                         src_type='s3',
                         dest=self.loc_files[i],
                         dest_type='local',
                         last_update=time,
                         operation_name='download',
                         size=15,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        self.assertTrue(os.path.exists(self.loc_files[0]))
        # The second file should not exist.
        self.assertFalse(os.path.exists(self.loc_files[1]))
        # Ensure that contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
Пример #32
0
class S3HandlerTestDeleteList(S3HandlerBaseTest):
    """
    This tests the ability to delete both files locally and in s3.
    """
    def setUp(self):
        super(S3HandlerTestDeleteList, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()

    def tearDown(self):
        super(S3HandlerTestDeleteList, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_loc_delete(self):
        """
        Test delete local file tasks.  The local files are the same
        generated from filegenerator_test.py.
        """
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for filename in files:
            self.assertTrue(os.path.exists(filename))
            tasks.append(FileInfo(
                src=filename, src_type='local',
                dest_type='s3', operation_name='delete', size=0,
                service=self.service, endpoint=self.endpoint))
        self.s3_handler.call(tasks)
        for filename in files:
            self.assertFalse(os.path.exists(filename))

    def test_s3_delete(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        keys = [self.bucket + '/another_directory/text2.txt',
                self.bucket + '/text1.txt',
                self.bucket + '/another_directory/']
        tasks = []
        for key in keys:
            tasks.append(FileInfo(
                src=key, src_type='s3',
                dest_type='local', operation_name='delete',
                size=0,
                service=self.service,
                endpoint=self.endpoint))
        self.assertEqual(len(list_contents(self.bucket, self.session)), 3)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)

    def test_list_objects(self):
        """
        Tests the ability to list objects, common prefixes, and buckets.
        If an error occurs the test fails as this is only a printing
        operation
        """
        prefix_name = self.bucket + '/'
        file_info = FileInfo(
            src=prefix_name, operation_name='list_objects', size=0,
            service=self.service, endpoint=self.endpoint)
        params = {'region': 'us-east-1'}
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])
        file_info = FileInfo(
            src='', operation_name='list_objects', size=0,
            service=self.service, endpoint=self.endpoint)
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])